+ ./ya make -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --add-peerdirs-tests all --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -A --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.H3Psnf4nEZ --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest ydb --build-custom-json=/home/runner/actions_runner/_work/ydb/ydb/graph.json --custom-context=/home/runner/actions_runner/_work/ydb/ydb/context.json --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 0.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/rate_limiter/ut/unittest | 0.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} | 1.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} | 1.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut | 1.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} | 1.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a | 1.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a | 2.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/show_create/view/tests/py3test | 2.7%| PREPARE $(VCS) | 2.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_range_ops/unittest | 3.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 3.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/pq_async_io/ut/unittest | 3.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceproxy/ut/unittest | 3.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/server_restart/gtest | 3.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 4.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} | 4.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} | 4.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/ctas | 5.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} | 5.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 5.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} | 5.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} | 5.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} | 5.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 5.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} | 6.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} | 6.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_stats/unittest | 6.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 6.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} | 6.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 6.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/sql/py3test | 6.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 6.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 6.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} | 7.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} | 7.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} | 7.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 7.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} | 7.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 7.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel_unstable/unittest | 8.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 8.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 8.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test | 8.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_testshard/unittest | 8.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a | 8.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a | 9.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/pq_read/test/py3test | 9.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a | 9.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a | 9.2%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a | 9.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |10.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |10.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |10.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |10.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |10.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |10.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |10.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |10.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |10.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |10.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |10.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |11.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |11.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |11.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |11.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |11.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |11.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |11.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |11.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |11.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |11.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |11.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |11.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |11.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |11.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |11.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |12.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |12.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |12.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a |12.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |12.9%| PREPARE $(YMAKE_PYTHON3) - 8.40 MB |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |13.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |13.2%| PREPARE $(BLACK-1616416287) - 8.40 MB |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |13.5%| PREPARE $(RUFF-3583455953) - 8.40 MB |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bridge/libydb-services-bridge.a |13.9%| PREPARE $(TEST_TOOL_HOST-sbr:10669990147) - 25.18 MB |14.0%| PREPARE $(CLANG_FORMAT-3815817643) - 7.58 KB |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |14.0%| PREPARE $(PYTHON) - 50.35 MB |14.1%| PREPARE $(LLD_ROOT-3107549726) - 31.71 MB |14.2%| PREPARE $(YMAKE_PYTHON3-212672652) |14.2%| PREPARE $(FLAKE8_PY3-3596799299) - 8.40 MB |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-sqs/liblibs-aws-sdk-cpp-aws-cpp-sdk-sqs.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |14.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |14.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |18.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |18.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |18.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |18.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |18.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |18.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |18.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |18.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |18.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |18.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |18.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |19.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |19.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |19.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |19.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |19.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |19.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |19.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |19.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |20.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |20.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |20.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohappyeyeballs/libpy3contrib-python-aiohappyeyeballs.global.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |20.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |20.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |20.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |20.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |20.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |20.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |20.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |21.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |21.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |21.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |21.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |21.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |21.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |21.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |21.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |21.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |21.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |22.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |22.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |22.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |22.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |23.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |23.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |23.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |23.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |23.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |23.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |23.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/show_create_table |23.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |23.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |24.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |24.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |24.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |24.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |24.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |24.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |24.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |24.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |24.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |24.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |24.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |24.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |24.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |24.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/transfer |24.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |25.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |25.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |25.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |25.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |25.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |25.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |25.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |25.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |25.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |25.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |25.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |25.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |25.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |25.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |25.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |25.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |26.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |26.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |26.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |26.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |26.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |26.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |26.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |26.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |26.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |26.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |26.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |26.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |26.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |26.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |26.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |26.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |26.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |26.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |27.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |27.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |27.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |27.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |27.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |27.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |27.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |27.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |27.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |28.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |28.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |28.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |28.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |28.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |28.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |28.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |28.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |28.5%| PREPARE $(CLANG-1922233694) - 218.12 MB |28.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |28.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |29.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |29.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |29.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |29.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |29.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |29.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |29.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |29.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |29.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |29.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |29.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |29.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |29.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |29.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |29.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |29.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |29.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |29.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |30.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |30.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |30.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |30.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |30.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |30.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |30.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |30.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |30.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |30.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_partitions.cpp |31.0%| PREPARE $(CLANG16-1380963495) - 289.92 MB |31.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |31.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |31.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |31.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |31.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |31.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |31.8%| PREPARE $(CLANG18-1866954364) - 310.41 MB |31.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |31.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_create.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |32.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |32.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |32.7%| PREPARE $(CLANG20-882982600) - 285.25 MB |32.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |32.8%| PREPARE $(CLANG-2403293607) |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |33.3%| PREPARE $(JDK_DEFAULT-2548586558) - 179.92 MB |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |33.4%| PREPARE $(JDK17-2548586558) |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_layers_integration.cpp |33.4%| PREPARE $(WITH_JDK-sbr:9470949154) |33.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |33.6%| PREPARE $(WITH_JDK17-sbr:9470949154) |33.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |33.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |33.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |33.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |33.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |33.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |33.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |33.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |33.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |34.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |35.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |35.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |34.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |35.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |35.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |35.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |35.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |35.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |35.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |35.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |36.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |36.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |35.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |36.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |36.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |36.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |36.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |36.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |36.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |36.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |36.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |36.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |36.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |36.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |36.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |37.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |38.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_priority_provider.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |40.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/time/libsrc-library-time.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/sasl/liblibrary-login-sasl.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/codes/libservices-datastreams-codes.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/executor/libclient-types-executor.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/poller/libactors-interconnect-poller.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/optimization/libservices-metadata-optimization.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/protos/receipt/libsqs_topic-protos-receipt.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/holder/libsqs_topic-queue_url-holder.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/sasl/libcore-security-sasl.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |47.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |47.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/utils/liblibs-control_plane_proxy-utils.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_locks_helper.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/join_perf/libkqp-tools-join_perf.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/counters_info/libydb-core-counters_info.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/combiner_perf/libkqp-tools-combiner_perf.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/regexp/libcore-backup-regexp.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/utf8_range/librestricted-google-utf8_range.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |52.6%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/libproto_ast-gen-v1_proto_split_antlr4.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |52.5%| [CP] {default-linux-x86_64, release, asan} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |52.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |52.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |52.7%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/dec/libbrotli-c-dec.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/yajl/libcontrib-deprecated-yajl.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan/libclang_rt.asan-x86_64.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/enc/libbrotli-c-enc.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_inference/libydb-library-arrow_inference.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ftxui/libcontrib-libs-ftxui.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/base/libcontrol-lib-base.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/printer/libformats-arrow-printer.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/libcommands-interactive-ai.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/tools/libinteractive-ai-tools.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/models/libinteractive-ai-models.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/sqs_json/libsqs_json.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/common/libcommands-interactive-common.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/session/libcommands-interactive-session.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/libsqs_workload.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libutils-fetch-proto.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/events/liblibs-checkpointing-events.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ibdrv/libcontrib-libs-ibdrv.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcc/liblibrary-workload-tpcc.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/dump_helpers/libyt-lib-dump_helpers.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/full_capture/libyt-lib-full_capture.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |53.4%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 8.40 MB |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_helper.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_publisher_service_actor.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_log.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_logins.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cluster_state.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_backup.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cms.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_config.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_import.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_export.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_login.cpp |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_ping.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_replication.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |53.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_fq.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/table_settings.cpp |53.7%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |53.8%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_mon.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_token_resolver/libyt-lib-yt_token_resolver.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_view.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/temp_files/libyt-lib-temp_files.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_test_shard.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/cached/libobject-simple-cached.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/local/libname-cache-local.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/helpers/libkqp-compile_service-helpers.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |53.8%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_file_download/libyt-lib-yt_file_download.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |54.0%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/ut/utils/libcomp_nodes-ut-utils.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/control.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_batch_operations.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_yql.h_serialized.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_tx_info.h_serialized.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_types.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_user_request_context.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_lwtrace_probes.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_yql.cpp |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_row_builder.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_script_executions.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_tx.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/impatient/libname-service-impatient.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/chaos_lease_base.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_client.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_session.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/chaos_lease.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/cluster_directory.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/dynamic.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/provided.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/constrained_schema.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/binding/libname-service-binding.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/counters/libpersqueue-public-counters.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/describer/libpersqueue-public-describer.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snowball/libcontrib-libs-snowball.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/cow_string/libcpp-containers-cow_string.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/token_manager/libcore-security-token_manager.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/protos/libbackup-import-protos.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |56.0%| PREPARE $(GDB) - 18.23 MB |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stress/libpy3tests-library-stress.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libpy3utils-fetch-proto.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/layers/libessentials-core-layers.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_helpers.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_helpers.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_linear.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_login_helper.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.h_serialized.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_types.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_scheme_builders.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_system_names.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_tx_infly.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_narrow_map.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mutdict.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_expand_map.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/common/liblibrary-testlib-common.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/solomon_helpers/liblibrary-testlib-solomon_helpers.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/utils/libintegration-topic-utils.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/async/liblibrary-actors-async.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/address/libactors-interconnect-address.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/cq_actor/libinterconnect-rdma-cq_actor.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/libactors-interconnect-rdma.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/range_treap/libydb-library-range_treap.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/hashes_checker/liblibrary-login-hashes_checker.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/plan2svg/libydb-library-plan2svg.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/executor/libclient-impl-executor.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/libcomplete-name-cache.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/common/libbrotli-c-common.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/column/libname-service-column.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/local/libcomplete-analysis-local.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/attributes/libcore-ymq-attributes.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/libservices-sqs_topic-queue_url.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/static/libobject-simple-static.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/ini_config/libini_config.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/fulltext/libfulltext_udf.global.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/workload_kv |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/libapi-client-nc_private.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libpy3core-protos-nbs.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cron_expression/liblibrary-cpp-cron_expression.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/discovery_mutator/libclient-extensions-discovery_mutator.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/libpy3show_create_table.global.a |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/libpy3ctas.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_0035b673555f394234ae284e25.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_c0a0299090f532c29f2ee6dc87.o |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/objcopy_970514ee5aa7605a49b54b8feb.o |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/workload/libpy3show_create-table-workload.global.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/workload_topic |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/objcopy_3cb499a0fcc9aa014af2855233.o |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/libpy3workload_kv.global.a |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/objcopy_691607a9cbabb8d8c0161d1a6d.o |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a38b1580810a6e4b419da99dcf.o |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_7f9e816a97aaeee837ac316091.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/aba998449c2518e3272d8e87fb_raw.auxcpp |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_e32003454342267c2263935765.o |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_join_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_combine_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/backup_stress |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_4d329553dae503930cad7a3eec.o |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/workload/libpy3stress-streaming-workload.global.a |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_6dc907e08e2d6bd66a6c998081.o |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_f022524c80d73c5ff5d59211dd.o |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydbd/export/libapps-ydbd-export.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/statistics_internal/libstatistics_internal_udf.global.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_08f7acdb6eb761b28bf6990862.o |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_5acd2383ed2cd599cfd64f7c8a.o |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_60e08504076128d310212c6460.o |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_59eb97971e5f83d3296e6c33b5.o |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/workload/libpy3show_create-view-workload.global.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_5d73baff4bb68923ddbe5f4fcd.o |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/objcopy_e9d954952def1b899e1fb63731.o |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/libpy3workload_topic.global.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_7479409fb33baf855b74c3e835.o |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_acf74a4313fbcafa6df239e3ec.o |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_df4191b43fee1a7d77acb3207f.o |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/registry/libcpp-dwarf_backtrace-registry.global.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_367e2bc5d83faa0907a06d2976.o |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/interface/libtable_data_service-local-interface.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/workload/libpy3stress-backup-workload.global.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/upload/interface/libfile-upload-interface.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/interface/libfmr-gc_service-interface.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/upload/impl/libfile-upload-impl.a |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/objcopy_06a563d04a0e897145ef1061d2.o |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/impl/libfmr-gc_service-impl.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/metadata/interface/libfile-metadata-interface.a |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/ut_service/common/libfederated_query-ut_service-common.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/interface/libcoordinator-yt_coordinator_service-interface.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cluster/static/libname-cluster-static.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/cluster/libname-service-cluster.a |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/check/libv1-complete-check.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/yql/libcomplete-analysis-yql.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/file/libcoordinator-yt_coordinator_service-file.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_3310cbcd39c3373557308c8e76.o |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_e91d43b449a687b2b36f1f5526.o |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/check/libv1-format-check.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_4ffdb694eb351ca96de1930bf2.o |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/libpy3workload_testshard.global.a |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/libpy3backup_stress.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/metadata/impl/libfile-metadata-impl.a |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/objcopy_4f24d349611ecfcf19e3001adb.o |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_preparer/interface/libfmr-job_preparer-interface.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/interface/libfmr-worker-interface.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_launcher/libyt-fmr-job_launcher.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/file/libtable_data_service-discovery-file.a |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/impl/libcoordinator-yt_coordinator_service-impl.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/proto_helpers/libtable_data_service-client-proto_helpers.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/impl/libtable_data_service-local-impl.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_preparer/impl/libfmr-job_preparer-impl.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/interface/libtable_data_service-discovery-interface.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/interface/libfmr-yt_job_service-interface.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/impl/libtable_data_service-client-impl.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/process/libyt-fmr-process.a |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/fast/kqp_federated_query_actors_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/file/libfmr-yt_job_service-file.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/impl/libfmr-yt_job_service-impl.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_51b071d7746089933668451b33.o |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_e66920085df69f6f7e41547063.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/streaming |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_388aef0b6ac03d4f661ae7a30e.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/quota_tracker_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/show_create_view |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/olap_workload |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_e8c94c485e81b4b2899f52f594.o |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/workload/libpy3stress-s3_backups-workload.global.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_3bb523a1011c0a7019f2684a90.o |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/join_perf/bin/main.cpp |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/libpy3workload_mixed.global.a |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/objcopy_b632f28ee823f938d14c0e85f9.o |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/workload/libpy3stress-transfer-workload.global.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/libpy3transfer.global.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7bfd03a31f5e230607792f10cc.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_1a1e300767b552f4c13c3295d0.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_3db6af291678d4ac330517956a.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/rbo/objcopy_8a4839ce59e28719331c9dfe7e.o |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_cd57da3671b96739ee73293fb1.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_83efacabe56767ae4f106a6d27.o |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/objcopy_1ac33161075322a9a0efb211c2.o |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/libpy3streaming.global.a |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |61.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_278b1a63a14648a80c4b930adb.o |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_04ccb9b757b207bc74705e9bb1.o |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |61.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_b9596990f3fd41de0fa350fc68.o |61.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_d54fb5ab35d376fe3311e9feea.o |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/auth/gtest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes/indexes_ut.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d2d4e3343da9b011ee6a983244.o |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/workload/libpy3stress-ctas-workload.global.a |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |61.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_953328e5c3275a286b65dc3b1d.o |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d0e1cde98d2ab34e72d18aae9c.o |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/workload/libpy3stress-node_broker-workload.global.a |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/workload/libpy3stress-viewer-workload.global.a |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |61.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/849c58233edc33539cbeb93a31_raw.auxcpp |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e508a8abac843a0a0f92fc62eb.o |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |61.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/solomon/py3test |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/http/ut/unittest |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_8e0f2cf91b35e6051ad82535a4.o |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e55498abceca534315a6428452.o |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/time_cast/ut/unittest |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_b8d63b589074145793d63c27a3.o |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |59.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/view/unittest |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/testshard_workload/tests/py3test |59.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_1815f02732d96389c328f04d90.o |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_f0d8fb718a757998dc9403df32.o |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_df04396057094f2483296b9dbe.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/objcopy_9ccdc4f01b578a43bc35d4d519.o |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/generic/actors/ut/unittest |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/libpy3show_create_view.global.a |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/example/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7c0098f27edc25092453a8033c.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_c386e2211742a44d16094d73d0.o |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_2900a0c4957bb4f1bc1729508c.o |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_dac3ec236f3cba753ea226bb96.o |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/workload/libpy3stress-cdc-workload.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7f02665786b7523f76c02ad1dd.o |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/objcopy_9de271b22d7bcc64ef77cc3cde.o |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_b9fcf9641e3e569e88014f85ff.o |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/type/libpy3olap_workload-workload-type.global.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_af18efc2f04dd1af5ca802c329.o |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/libpy3olap_workload.global.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/oom/py3test |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_48a08121f0a68da2f2666b0341.o |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_4826ee2207124da1bc398e3bd8.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/error.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_d3749b17b0bc2057695c3e736a.o |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_ee633aebbed65e39e09fbf4271.o |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cancellation.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_priority_provider.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload/libpy3stress-kv-workload.global.a |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload/libpy3stress-topic-workload.global.a |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/raw_socket/ut/buffered_writer_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_6bdc69403f0fa7c03edbf19c28.o |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |62.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut/inside_ydb_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |62.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |62.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |62.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_verify_failure_regression.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/result_set_format/result_set_format |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |62.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/tests/kikimr_tpch/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/workload/type/libpy3result_set_format-workload-type.global.a |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/workload/libpy3stress-result_set_format-workload.global.a |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/objcopy_ab0068aecc73dfe57505e1cc57.o |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/libpy3result_set_format.global.a |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_aac602e1ee9075b2856f1975ca.o |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_2983697b65fe3fbc437e0acf1f.o |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/service.pb.{h, cc} |62.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |62.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_771af70678639da3d434ea1416.o |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |62.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |62.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/kafka/tests/py3test |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload/libpy3stress-mixedpy-workload.global.a |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/topic/tests/py3test |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/dstool/ydb-dstool |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |59.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceshard/ut/unittest |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_tests/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/backup/tests/py3test |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/column_table/unittest |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/base/ut/unittest |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/kqprun/tests/py3test |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/quota/ut/unittest |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.{pb.h ... grpc.pb.h} |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/objcopy_fca89909cedb628068681e1038.o |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_ec9bc627b6d56d1a941c2b7e4f.o |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/combiner_perf/bin/main.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_f4b44a5d280d0f27f5ffd278e8.o |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_d709b1895f91108d9f51b703ea.o |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_dfbd751fc64901b06ded4354c8.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/auth/ydb-public-sdk-cpp-tests-integration-auth |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |59.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |59.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |57.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| COMPACTING CACHE 112.0KiB |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_transfer/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_transfer/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_363b5875cc5c5e5745458b16b8.o |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_303f7409bfab4277e367bbd11a.o |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_08a4b5d38a76e21591db0c3424.o |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/audit_ut.cpp |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_e2637cea0f2e4db109b364a246.o |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.global.a |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_bcbbd2d8f2367d5f3ed5199234.o |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/objcopy_caf222d14387d4810b5cb3e853.o |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sequence/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sequence/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |60.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/streaming/tests/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/list_topics/ut/unittest |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/cdc |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/ut/unittest |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/backup/s3_path_style/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/query_actor/ut/unittest |60.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |60.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/s3_backups |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/memory_controller/ut/unittest |60.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/long_tx_service/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/control_plane_proxy/ut/unittest |60.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/olap_workload/tests/py3test |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/event.pb.{h, cc} |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/graph_params.pb.{h, cc} |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_28c396580e7e319c4a82e15fc9.o |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_4e3ea6c3c5a0438f05942dbc81.o |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.9%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.{gen.h ... defs.inl.h} |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_94f66830f5c535f3f015b42e43.o |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_8cba80b2275265b72407436cdf.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |61.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/c4711c742b4f72331dccea9c2a_raw.auxcpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_3382de65b417782bf648c475b1.o |61.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |61.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |61.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |61.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |61.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/objcopy_7d7339f4588397fc771e31030c.o |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_rs/unittest |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/config/ut/unittest |61.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/limiter/grouped_memory/ut/unittest |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |61.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/high_load/unittest |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_sequence/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/simple_queue/tests/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/plan2svg/py3test |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/coordinator/ut/unittest |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/libpy3cdc.global.a |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |61.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |61.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/13360e4ecdf34efe6c3a817a44_raw.auxcpp |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_cf3971576aced18377e99f5367.o |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_f93c60b04a0499f2ec6880591a.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/yt/kqp_yt_import/py3test |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/objcopy_4508aef343f36758ea760320db.o |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/libpy3s3_backups.global.a |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_bd84885c5c24478d181ba9e493.o |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_fulltext_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_2a98e2f0e66f286cb125620511.o |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_75e82e9b2ff2024ae902b7d5e4.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/column_table/ydb-core-transfer-ut-column_table |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/graph_description.pb.{h, cc} |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/dc048c91e67372877fc6ad2dfc_raw.auxcpp |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_40779f0570229cef213050a4fa.o |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_00c87b13e2f685811a9825079d.o |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.{pb.h ... grpc.pb.h} |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_followers/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/transfer/tests/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/kv/tests/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/build_index/ut/unittest |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/row_dispatcher/ut/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_7648c2519d02b8456f762efc4b.o |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_compaction/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/http_api/py3test |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/comp_nodes/ut/unittest |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/objcopy_e0d6c3883613601e455029f31f.o |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/libpy3kafka_streams_test.global.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_c02c3d9f840d02af9fad858a55.o |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |59.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/library/ut/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/actor/yc_search_ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/fetcher/ut/unittest |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/functional/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_keys/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_background_compaction/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/topic_kafka/tests/py3test |59.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_6887bde1dc99f5c5c2f0922842.o |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_afdf6d60c4f76ae91a235d460b.o |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup_collection/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup_collection/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_e0aef87c4bf15cfdc957f4bdd1.o |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rowlocks.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_tiered_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/encryption/py3test |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serializable/py3test |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_query_session/unittest |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/oltp_workload/tests/py3test |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/replication/unittest |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_init/unittest |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/describer/ut/unittest |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/keyvalue/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_query_svc/unittest |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_e7dfca3c87220ea0ed36a65f9f.o |62.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/daf02fd86bb7e2296f1437ae1f_raw.auxcpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_22edc4e58ff43cb5e83c9bbe2c.o |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |62.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.h |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |62.2%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_b1ab101896e634020e0c6ffeaf.o |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_82d6d29ac7be3798b7e748facc.o |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.{pb.h ... grpc.pb.h} |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_88c3bdc430d7cb7f217b6a7f5c.o |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_8d4997dfd93843149bbf2f76f1.o |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.{pb.h ... grpc.pb.h} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_281dee3582d4ce59ddf75426c6.o |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/1bbc8f31f8eb46680aacf37392_raw.auxcpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |62.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/reservoir_sampling/libreservoir_sampling_udf.so |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_589d529f9477963cf67237781c.o |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/bool_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/simple_reader_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/bool_test_enums.h_serialized.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_benches_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_generator.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |62.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/mixedpy/tests/py3test |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_state_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_265d7fd505d52534f38ea6fb7f.o |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_a52eb3c900a84eaad86a211549.o |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/solomon/actors/ut/unittest |62.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |62.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/gateway/ut/gtest |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/restarts/py3test |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/result_set_format/tests/py3test |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/node_broker |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/wardens/py3test |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/709f125727d9ea4165df516509_raw.auxcpp |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_40226ff8497733c6e798ee3940.o |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_5992d4831c5055a481712a2a80.o |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |62.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_965640ca94893d27c182c611e2.o |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |62.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/objcopy_2a9fba044b5f98d2ff5f5c7f44.o |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |62.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_8491a772a9425d10f304e6f0e9.o |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_4e45fac9e6e2cbc502659b10eb.o |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_c19b3eb5266bf8e49b2b628bc2.o |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_fd8d9957a06c9923c501e36fd9.o |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |63.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/tests/sql/solomon/pytest |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |63.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |63.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |63.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/row_table/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_11720a94357f4e62a79e34a420.o |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/ctas/tests/py3test |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |63.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_scale_manager_graph_cmp_ut.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |62.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |63.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/checkpointing/ut/unittest |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_export/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |62.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/cdc/tests/py3test |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/quoter/ut/unittest |62.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/tablet/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/postgresql/py3test |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |62.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/public_http/ut/unittest |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/viewer/tests/py3test |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/federated_query/ut_service/fast/unittest |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/run_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/libpy3node_broker.global.a |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_volatile/unittest |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_rate_limiter.pb.{h, cc} |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup/unittest |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |62.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_replication/unittest |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_e68ca1a2fa9943132c020ae028.o |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |62.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |62.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |62.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |62.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_927a1f7611cf94fb1cd21ef8cf.o |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |63.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_22b5b8dd6ea05f4194f60e6181.o |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |62.9%| PREPARE $(FLAKE8_PY2-2255386470) - 3.39 MB |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_df0cb3f315162a3110ee243ecd.o |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| RESOURCE $(sbr:4966407557) |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/mon_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_replication/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_volatile/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_indexes/unittest |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/graph/shard/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_reassign/unittest |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/s3_backups/tests/py3test |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/basic_example/gtest |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |62.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_76cd981cf66123b7633d25b898.o |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_60a4829fdc305e3a74a7ddcb41.o |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/splitter/ut/unittest |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_disk_quotas/unittest |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/node_broker/tests/py3test |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/mediator/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/objcopy_33061e6ec0580baa7f93f1ce23.o |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |62.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/libpy3workload_topic_kafka.global.a |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/objcopy_c96ef635306ccee8a5cf6359f1.o |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |62.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |62.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_d191482d8b66f1c03ea8df56d3.o |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |62.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_8685c3ae88e5169a5acffc7bc4.o |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |62.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_e25036fa51e72ace049084c308.o |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.{pb.h ... grpc.pb.h} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |62.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.{pb.h ... grpc.pb.h} |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.{pb.h ... grpc.pb.h} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/ut/ut_helper.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |63.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_output_channel_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/slow/kqp_federated_query_actors_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_arrow_helpers_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_ut.cpp |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk_io/vdisk_io.cpp |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_1de592266ca9bc1d10b20d8e9a.o |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_c4b248e24a6215fa53b9e5552d.o |63.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |63.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |63.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/executer_actor/ut/unittest |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/graph/ut/unittest |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/backup/unittest |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/raw_socket/ut/unittest |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |63.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/show_create/table/tests/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/mlp/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/dstool/py3test |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |63.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_ab542bc0d25a481768cec1ad31.o |63.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_2650fc91e1b7439f48496ea51e.o |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test |63.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_51c4be7ece033a38cc2a2dfb64.o |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |63.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/common/py3test |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |63.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |63.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |63.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/discovery/unittest |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/test_connection/ut/unittest |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_ff581f3cff717ab223922f0cd8.o |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/viewer |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_1c931ae3496b883d009f58ef8e.o |63.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_5233daad98e22a16d60b4b7b86.o |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/client/libpy3tools-solomon_emulator-client.global.a |63.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/retry/py3/libpy3python-retry-py3.global.a |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_38c6001204b7ada03b8b3e421d.o |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |63.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/fetch_config.pb.{h, cc} |63.7%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |63.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.7%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/workload/libpy3scheme_board-pile_promotion-workload.global.a |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/arrow/libsrc-client-arrow.a |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_bridge_common.pb.{h, cc} |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |63.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload/libpy3stress-topic_kafka-workload.global.a |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |64.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |64.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |64.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.8%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |63.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_replication/unittest |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_vacuum/unittest |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/add_column/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/query_cache/py3test |63.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/viewer/tests/py3test |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |63.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/minikql_compile/ut/unittest |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_column_stats/unittest |63.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_registry.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_64cecb639c5f85fbf868097a08.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |64.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/libpy3pile_promotion_workload.global.a |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_b9aaa278b10ed44e5645b3ef2f.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/objcopy_e99c9b04005e36c324dfb9fd3b.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/fmr.pb.{h, cc} |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_auth.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minstep/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minstep/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |64.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.{pb.h ... grpc.pb.h} |64.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |64.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |64.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |64.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a457e57e9ccca716aa1224bf90.o |64.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |64.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |64.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |64.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_aebf7c73fcaf6a54715cc177c8.o |64.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |64.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_e1e64d508ce59834ec0a40f731.o |64.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |64.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |64.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |64.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |64.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |64.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |64.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_6d8369510b03c08a300f2e2657.o |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |64.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_f152d89e868e3e70c582478d88.o |64.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_7d0deb4120fbddf720c11b5358.o |64.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a14abb13ecebd457a15fc48470.o |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |64.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |64.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |64.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |64.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |65.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |65.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |65.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/row_table/ydb-core-transfer-ut-row_table |66.4%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |69.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |70.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |73.1%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |73.2%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |74.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |75.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/transfer |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/transfer |76.0%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/transfer |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |76.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |76.0%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |76.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |76.0%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |76.0%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |76.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |76.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |76.0%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |76.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |76.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |76.0%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |76.0%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/workload_topic |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/workload_topic |76.1%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/workload_topic |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/s3_backups |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/s3_backups |76.1%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/s3_backups |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |76.1%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.1%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |76.1%| [LD] {RESULT} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |76.1%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |76.3%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |76.3%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |76.4%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |76.4%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |76.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |76.5%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |76.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |77.3%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/workload_kv |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/workload_kv |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |77.7%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/workload_kv |77.7%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.8%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |77.9%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/ctas |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/ctas |78.1%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/ctas |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/show_create_view |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/show_create_view |78.1%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/show_create_view |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |78.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/streaming |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/streaming |78.2%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/streaming |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/node_broker |78.2%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/node_broker |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/node_broker |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/cdc |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/cdc |78.2%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/cdc |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |78.2%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |78.2%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |78.2%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |78.2%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/result_set_format/result_set_format |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/result_set_format/result_set_format |78.2%| [LD] {RESULT} $(B)/ydb/tests/stress/result_set_format/result_set_format |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |78.2%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |78.2%| [LD] {RESULT} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |78.3%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |78.3%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |78.3%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |78.6%| [LD] {RESULT} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |78.8%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/olap_workload |78.9%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/olap_workload |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/olap_workload |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |78.9%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |79.0%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |79.0%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |79.0%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/show_create_table |79.0%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/show_create_table |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/show_create_table |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |79.1%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |79.1%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |79.2%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |79.3%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |79.4%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |79.4%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |79.4%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |79.5%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |79.5%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |79.5%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/workload_mixed |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |79.6%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/workload_mixed |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |79.6%| [LD] {RESULT} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/kafka_streams_test |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |79.7%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/kafka_streams_test |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/backup_stress |79.7%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/backup_stress |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/backup_stress |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |79.7%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |79.8%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/dstool/ydb-dstool |79.9%| [LD] {RESULT} $(B)/ydb/apps/dstool/ydb-dstool |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/dstool/ydb-dstool |79.9%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |79.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |79.9%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |80.0%| [LD] {RESULT} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |80.0%| [LD] {RESULT} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |80.0%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |80.1%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |80.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |80.1%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |80.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |80.2%| [LD] {RESULT} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.2%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.3%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.4%| [LD] {RESULT} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |80.4%| [LD] {RESULT} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |80.5%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |80.6%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.6%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.6%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |80.6%| [LD] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |80.6%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.6%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |80.6%| [AR] {RESULT} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |80.7%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |80.7%| [LD] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |80.7%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp >> FormatTimes::DurationUs [GOOD] >> FormatTimes::ParseDuration [GOOD] >> ParseStats::ParseWithSources [GOOD] >> ParseStats::ParseJustOutput [GOOD] >> ParseStats::ParseMultipleGraphsV1 [GOOD] >> ParseStats::ParseMultipleGraphsV2 [GOOD] >> StatsFormat::FullStat [GOOD] |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut >> StatsFormat::AggregateStat [GOOD] >> ActionParsingTest::ToAndFromStringAreConsistent [GOOD] >> ActionParsingTest::ActionsForQueueTest [GOOD] >> ActionParsingTest::BatchActionTest [GOOD] >> ActionParsingTest::ActionsForMessageTest [GOOD] >> ActionParsingTest::FastActionsTest [GOOD] >> HttpCountersTest::CountersAggregationTest [GOOD] >> LazyCounterTest::LazyCounterTest [GOOD] >> LazyCounterTest::AggregationLazyTest [GOOD] >> LazyCounterTest::AggregationNonLazyTest [GOOD] >> LazyCounterTest::HistogramAggregationTest [GOOD] >> MessageAttributeValidationTest::MessageAttributeValidationTest [GOOD] >> MessageBodyValidationTest::MessageBodyValidationTest [GOOD] >> MeteringCountersTest::CountersAggregationTest [GOOD] >> NameValidationTest::NameValidationTest [GOOD] >> QueueAttributes::BasicStdTest [GOOD] >> QueueAttributes::BasicFifoTest [GOOD] >> QueueAttributes::BasicClampTest [GOOD] >> QueueCountersTest::InsertCountersTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithFolderTest [GOOD] >> QueueCountersTest::CountersAggregationTest [GOOD] >> QueueCountersTest::CountersAggregationCloudTest [GOOD] >> RedrivePolicy::RedrivePolicyValidationTest [GOOD] >> RedrivePolicy::RedrivePolicyToJsonTest [GOOD] >> RedrivePolicy::RedrivePolicyArnValidationTest [GOOD] >> SecureProtobufPrinterTest::MessageBody [GOOD] >> SecureProtobufPrinterTest::Tokens [GOOD] >> FormatTimes::DurationMs [GOOD] >> StringValidationTest::IsAlphaNumAndPunctuationTest [GOOD] >> UserCountersTest::DisableCountersTest >> Config::IncludeScope [GOOD] >> Config::ExcludeScope [GOOD] >> UserCountersTest::DisableCountersTest [GOOD] >> UserCountersTest::RemoveUserCountersTest [GOOD] >> UserCountersTest::CountersAggregationTest [GOOD] |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest >> ParseStats::ParseMultipleGraphsV2 [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest >> TDqHashCombineTest::TestBlockModeNoInput [GOOD] >> TDqHashCombineTest::TestBlockModeSingleRow >> TDqHashCombineTest::TestBlockModeSingleRow [GOOD] >> TDqHashCombineTest::TestBlockModeMultiBlocks |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.8%| [TM] {RESULT} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |80.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/base/ut/unittest >> UserCountersTest::CountersAggregationTest [GOOD] |80.8%| [TS] {RESULT} ydb/core/ymq/base/ut/unittest |80.8%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/base/ut/unittest >> TDqHashCombineTest::TestBlockModeMultiBlocks [GOOD] >> TDqHashCombineTest::TestWideModeNoInput [GOOD] >> TDqHashCombineTest::TestWideModeSingleRow |80.8%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp >> TDqHashCombineTest::TestWideModeSingleRow [GOOD] >> TDqHashCombineTest::TestWideModeMultiRows |80.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |80.8%| [AR] {RESULT} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |80.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/test_connection/ut/unittest >> TDqHashCombineTest::TestWideModeMultiRows [GOOD] >> TDqHashCombineTest::TestWideModeAggregationNoInput |80.8%| [TS] {RESULT} ydb/core/fq/libs/test_connection/ut/unittest |80.8%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/test_connection/ut/unittest >> TDqHashCombineTest::TestWideModeAggregationNoInput [GOOD] >> TDqHashCombineTest::TestWideModeAggregationSingleRow >> TDqHashCombineTest::TestWideModeAggregationSingleRow [GOOD] >> TDqHashCombineTest::TestWideModeAggregationWithSpilling |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp >> TDqHashCombineTest::TestWideModeAggregationWithSpilling [GOOD] >> TDqHashCombineTest::TestWideModeAggregationMultiRowNoSpilling |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp >> TDqHashCombineTest::TestWideModeAggregationMultiRowNoSpilling [GOOD] >> TDqHashJoinBasicTest::TestBasicPassthrough-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestBasicPassthrough+BlockJoin >> TDqHashJoinBasicTest::TestBasicPassthrough+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestCrossPassthrough-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestCrossPassthrough+BlockJoin >> TDqHashJoinBasicTest::TestCrossPassthrough+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestMixedKeysPassthrough-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestMixedKeysPassthrough+BlockJoin >> TDqHashJoinBasicTest::TestMixedKeysPassthrough+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyFlows-BlockJoin >> TDqHashJoinBasicTest::TestEmptyFlows-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyFlows+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyLeft-BlockJoin >> TDqHashJoinBasicTest::TestEmptyLeft-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyLeft+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyRight-BlockJoin >> TDqHashJoinBasicTest::TestEmptyRight-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyRight+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestInnerRenamesKind-BlockJoin >> TDqHashJoinBasicTest::TestInnerRenamesKind-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestInnerRenamesKind+BlockJoin >> TDqHashJoinBasicTest::TestInnerRenamesKind+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestBlockSpilling >> TDqHashJoinBasicTest::TestBlockSpilling [GOOD] |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/comp_nodes/ut/unittest >> TDqHashJoinBasicTest::TestBlockSpilling [GOOD] |80.9%| [TM] {RESULT} ydb/library/yql/dq/comp_nodes/ut/unittest |80.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/comp_nodes/ut/unittest |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |80.9%| [AR] {RESULT} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |80.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut >> BlobDepotWithTestShard::PlainGroup [GOOD] >> TGenerateQueueIdTests::MakeQueueIdBasic [GOOD] >> TParseParamsTests::CreateUser [GOOD] >> XmlBuilderTest::WritesProperly [GOOD] >> TParseParamsTests::ChangeMessageVisibilityBatchRequest [GOOD] >> TParseParamsTests::DeleteMessageBatchRequest [GOOD] >> TParseParamsTests::MessageBody [GOOD] >> TParseParamsTests::SendMessageBatchRequest [GOOD] >> TParseParamsTests::DeleteQueueBatchRequest [GOOD] >> TParseParamsTests::PurgeQueueBatchRequest [GOOD] >> TParseParamsTests::GetQueueAttributesBatchRequest [GOOD] >> TParseParamsTests::UnnumberedAttribute [GOOD] >> TParseParamsTests::UnnumberedAttributeName [GOOD] >> TParseParamsTests::FailsOnInvalidDeduplicationId [GOOD] >> TParseParamsTests::FailsOnInvalidGroupId [GOOD] >> TParseParamsTests::FailsOnInvalidReceiveRequestAttemptId [GOOD] >> TParseParamsTests::FailsOnInvalidMaxNumberOfMessages [GOOD] >> TParseParamsTests::FailsOnInvalidWaitTime [GOOD] >> XmlBuilderTest::MacroBuilder [GOOD] >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_testshard/unittest >> BlobDepotWithTestShard::PlainGroup [GOOD] >> GroupedMemoryLimiter::Simplest [GOOD] >> GroupedMemoryLimiter::Simple [GOOD] >> GroupedMemoryLimiter::CommonUsage [GOOD] >> GroupedMemoryLimiter::Update [GOOD] |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_testshard/unittest |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |80.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/http/ut/unittest >> XmlBuilderTest::MacroBuilder [GOOD] |80.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/ut/unittest >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] |80.9%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/http/ut/unittest |80.9%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/limiter/grouped_memory/ut/unittest >> GroupedMemoryLimiter::Update [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=100;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=1;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=1;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:43;event=common_forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=100;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=3;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=2;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=10;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=7;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=2010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=4;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=3;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=3010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=5;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=6;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=2010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=1010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=10;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=2000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=3000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=9;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=3;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=4000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=10;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=11;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=8;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=3000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=2000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:100;name=GLOBAL;event=update;usage=1000;waiting=10;allocated=1;from=1000;to=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=1;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:43;event=common_forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=20;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=13;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=12;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=10;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=10; |80.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/limiter/grouped_memory/ut/unittest |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.9%| [TM] {RESULT} ydb/core/blobstorage/ut_testshard/unittest |80.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.9%| [TS] {RESULT} ydb/core/ymq/http/ut/unittest |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |80.9%| [TS] {RESULT} ydb/core/tx/limiter/grouped_memory/ut/unittest |80.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |80.9%| [TS] {RESULT} ydb/core/ymq/ut/unittest |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |80.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |80.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |81.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |80.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |81.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge >> BsControllerTest::DecommitRejected >> BsControllerTest::SelfHealMirror3dc |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal >> SelfHealActorTest::SingleErrorDisk [GOOD] >> SelfHealActorTest::NoMoreThanOneReplicating |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> BsControllerTest::TestLocalBrokenRelocation >> BsControllerTest::SelfHealBlock4Plus2 >> VDiskBalancing::TestStopOneNode_Block42 >> BlobDepot::BasicPutAndGet |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> VDiskBalancing::TestRandom_Mirror3dc >> VDiskBalancing::TestRandom_Block42 >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistribution >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> BsControllerTest::DecommitRejected [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2026-01-07T17:23:27.467625Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-07T17:23:27.467684Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-07T17:23:27.467754Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-07T17:23:27.467776Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-07T17:23:27.467824Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-07T17:23:27.467850Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-07T17:23:27.467883Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-07T17:23:27.467912Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-07T17:23:27.467944Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-07T17:23:27.467973Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-07T17:23:27.468017Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-07T17:23:27.468036Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-07T17:23:27.468069Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-07T17:23:27.468087Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-07T17:23:27.468116Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-07T17:23:27.468134Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-07T17:23:27.468189Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-07T17:23:27.468218Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-07T17:23:27.468250Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-07T17:23:27.468269Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-07T17:23:27.468297Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-07T17:23:27.468316Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-07T17:23:27.468349Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-07T17:23:27.468368Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-07T17:23:27.468395Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-07T17:23:27.468414Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-07T17:23:27.468442Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-07T17:23:27.468462Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-07T17:23:27.468499Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-07T17:23:27.468519Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-07T17:23:27.488640Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:514:38] Status# ERROR ClientId# [1:514:38] ServerId# [0:0:0] PipeClient# [1:514:38] 2026-01-07T17:23:27.489414Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:515:20] Status# ERROR ClientId# [2:515:20] ServerId# [0:0:0] PipeClient# [2:515:20] 2026-01-07T17:23:27.489472Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:516:20] Status# ERROR ClientId# [3:516:20] ServerId# [0:0:0] PipeClient# [3:516:20] 2026-01-07T17:23:27.489554Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:517:20] Status# ERROR ClientId# [4:517:20] ServerId# [0:0:0] PipeClient# [4:517:20] 2026-01-07T17:23:27.489593Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:518:20] Status# ERROR ClientId# [5:518:20] ServerId# [0:0:0] PipeClient# [5:518:20] 2026-01-07T17:23:27.489632Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:519:20] Status# ERROR ClientId# [6:519:20] ServerId# [0:0:0] PipeClient# [6:519:20] 2026-01-07T17:23:27.489691Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:520:20] Status# ERROR ClientId# [7:520:20] ServerId# [0:0:0] PipeClient# [7:520:20] 2026-01-07T17:23:27.489733Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:521:20] Status# ERROR ClientId# [8:521:20] ServerId# [0:0:0] PipeClient# [8:521:20] 2026-01-07T17:23:27.489771Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:522:20] Status# ERROR ClientId# [9:522:20] ServerId# [0:0:0] PipeClient# [9:522:20] 2026-01-07T17:23:27.489808Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:523:20] Status# ERROR ClientId# [10:523:20] ServerId# [0:0:0] PipeClient# [10:523:20] 2026-01-07T17:23:27.489856Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:524:20] Status# ERROR ClientId# [11:524:20] ServerId# [0:0:0] PipeClient# [11:524:20] 2026-01-07T17:23:27.490005Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:525:20] Status# ERROR ClientId# [12:525:20] ServerId# [0:0:0] PipeClient# [12:525:20] 2026-01-07T17:23:27.490055Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:526:20] Status# ERROR ClientId# [13:526:20] ServerId# [0:0:0] PipeClient# [13:526:20] 2026-01-07T17:23:27.490094Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:527:20] Status# ERROR ClientId# [14:527:20] ServerId# [0:0:0] PipeClient# [14:527:20] 2026-01-07T17:23:27.490130Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:528:20] Status# ERROR ClientId# [15:528:20] ServerId# [0:0:0] PipeClient# [15:528:20] 2026-01-07T17:23:27.539228Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2026-01-07T17:23:27.539304Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2026-01-07T17:23:27.539341Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2026-01-07T17:23:27.539414Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2026-01-07T17:23:27.539449Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2026-01-07T17:23:27.539502Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2026-01-07T17:23:27.539547Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2026-01-07T17:23:27.539596Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2026-01-07T17:23:27.539655Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2026-01-07T17:23:27.539692Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2026-01-07T17:23:27.539746Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2026-01-07T17:23:27.539789Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2026-01-07T17:23:27.539822Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2026-01-07T17:23:27.539861Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2026-01-07T17:23:27.539901Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2026-01-07T17:23:27.543220Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:589:66] Status# OK ClientId# [1:589:66] ServerId# [1:618:67] PipeClient# [1:589:66] 2026-01-07T17:23:27.543286Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2026-01-07T17:23:27.549143Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:590:21] Status# OK ClientId# [2:590:21] ServerId# [1:619:68] PipeClient# [2:590:21] 2026-01-07T17:23:27.549204Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2026-01-07T17:23:27.549252Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:591:21] Status# OK ClientId# [3:591:21] ServerId# [1:620:69] PipeClient# [3:591:21] 2026-01-07T17:23:27.549297Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2026-01-07T17:23:27.549347Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:592:21] Status# OK ClientId# [4:592:21] ServerId# [1:621:70] PipeClient# [4:592:21] 2026-01-07T17:23:27.549380Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2026-01-07T17:23:27.549421Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:593:21] Status# OK ClientId# [5:593:21] ServerId# [1:622:71] PipeClient# [5:593:21] 2026-01-07T17:23:27.549447Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2026-01-07T17:23:27.549482Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:594:21] Status# OK ClientId# [6:594:21] ServerId# [1:623:72] PipeClient# [6:594:21] 2026-01-07T17:23:27.549507Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2026-01-07T17:23:27.549544Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:595:21] Status# OK ClientId# [7:595:21] ServerId# [1:624:73] PipeClient# [7:595:21] 2026-01-07T17:23:27.549578Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2026-01-07T17:23:27.549630Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:596:21] Status# OK ClientId# [8:596:21] ServerId# [1:625:74] PipeClient# [8:596:21] 2026-01-07T17:23:27.549654Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2026-01-07T17:23:27.549689Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:597:21] Status# OK ClientId# [9:597:21] ServerId# [1:626:75] PipeClient# [9:597:21] 2026-01-07T17:23:27.549711Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2026-01-07T17:23:27.549745Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:598:21] Status# OK ClientId# [10:598:21] ServerId# [1:627:76] PipeClient# [10:598:21] 2026-01-07T17:23:27.549768Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2026-01-07T17:23:27.549803Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:599:21] Status# OK ClientId# [11:599:21] ServerId# [1:628:77] PipeClient# [11:599:21] 2026-01-07T17:23:27.549844Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2026-01-07T17:23:27.549889Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:600:21] Status# OK ClientId# [12:600:21] ServerId# [1:629:78] PipeClient# [12:600:21] 2026-01-07T17:23:27.549915Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2026-01-07T17:23:27.549957Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:601:21] Status# OK ClientId# [13:601:21] ServerId# [1:630:79] PipeClient# [13:601:21] 2026-01-07T17:23:27.549983Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2026-01-07T17:23:27.550034Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:602:21] Status# OK ClientId# [14:602:21] ServerId# [1:631:80] PipeClient# [14:602:21] 2026-01-07T17:23:27.550059Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2026-01-07T17:23:27.550094Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:603:21] Status# OK ClientId# [15:603:21] ServerId# [1:632:81] PipeClient# [15:603:21] 2026-01-07T17:23:27.550116Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2026-01-07T17:23:27.552765Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:27.552846Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-07T17:23:27.577305Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2026-01-07T17:23:27.578428Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-01-07T17:23:27.578492Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-07T17:23:27.578569Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2026-01-07T17:23:27.578748Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-01-07T17:23:27.578830Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-07T17:23:27.578882Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2026-01-07T17:23:27.578985Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2026-01-07T17:23:27.579030Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-07T17:23:27.579084Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2026-01-07T17:23:27.579185Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-01-07T17:23:27.579220Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-07T17:23:27.579265Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2026-01-07T1 ... 000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.806843Z 1 00h01m25.355024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.807056Z 13 00h01m26.349512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2026-01-07T17:23:27.807429Z 1 00h01m26.349512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.807676Z 14 00h01m28.082512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2026-01-07T17:23:27.807980Z 1 00h01m28.082512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.808255Z 1 00h01m28.864512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.808659Z 1 00h01m30.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.808991Z 1 00h01m30.620512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.809149Z 10 00h01m30.687512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2026-01-07T17:23:27.809451Z 1 00h01m30.687512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.809612Z 11 00h01m32.053512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2026-01-07T17:23:27.809871Z 1 00h01m32.053512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.810025Z 2 00h01m33.502512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2026-01-07T17:23:27.810414Z 1 00h01m33.502512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.810637Z 3 00h01m34.316512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2026-01-07T17:23:27.810994Z 1 00h01m34.316512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.811229Z 1 00h01m35.813512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2026-01-07T17:23:27.811504Z 1 00h01m35.813512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-07T17:23:27.811708Z 14 00h01m37.067536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2026-01-07T17:23:27.812035Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483648 2026-01-07T17:23:27.812712Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:27.812762Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2026-01-07T17:23:27.813014Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:27.813049Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2026-01-07T17:23:27.813084Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:27.813109Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2026-01-07T17:23:27.813150Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:27.813186Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2026-01-07T17:23:27.813229Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:27.813258Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2026-01-07T17:23:27.813287Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:27.813311Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2026-01-07T17:23:27.813338Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:27.813362Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2026-01-07T17:23:27.813388Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:27.813425Z 1 00h01m37.067536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2026-01-07T17:23:27.815751Z 1 00h01m37.068048s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:27.815819Z 1 00h01m37.068048s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2026-01-07T17:23:27.816378Z 1 00h01m37.068048s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2026-01-07T17:23:27.816413Z 1 00h01m37.068048s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483648 Success# true 2026-01-07T17:23:27.816521Z 8 00h01m37.068048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-07T17:23:27.816561Z 8 00h01m37.068048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2026-01-07T17:23:27.816658Z 2 00h01m37.068048s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-01-07T17:23:27.816708Z 2 00h01m37.068048s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2026-01-07T17:23:27.816791Z 3 00h01m37.068048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-01-07T17:23:27.816831Z 3 00h01m37.068048s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2026-01-07T17:23:27.816916Z 4 00h01m37.068048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2026-01-07T17:23:27.816999Z 4 00h01m37.068048s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2026-01-07T17:23:27.817068Z 5 00h01m37.068048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-01-07T17:23:27.817117Z 5 00h01m37.068048s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2026-01-07T17:23:27.817197Z 6 00h01m37.068048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-01-07T17:23:27.817238Z 6 00h01m37.068048s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2026-01-07T17:23:27.817296Z 9 00h01m37.068048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-07T17:23:27.817352Z 13 00h01m37.068048s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-01-07T17:23:27.817395Z 13 00h01m37.068048s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2026-01-07T17:23:27.817469Z 14 00h01m37.068048s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2026-01-07T17:23:27.817511Z 14 00h01m37.068048s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2026-01-07T17:23:27.817600Z 15 00h01m37.068048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-07T17:23:27.817638Z 15 00h01m37.068048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2026-01-07T17:23:27.817706Z 15 00h01m37.068048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2026-01-07T17:23:27.819101Z 15 00h01m41.246048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2026-01-07T17:23:27.821611Z 15 00h01m58.881048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2026-01-07T17:23:27.822459Z 9 00h01m58.881560s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-07T17:23:27.822514Z 9 00h01m58.881560s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> BridgeGet::PartRestorationAcrossBridge |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnRange >> BlobDepot::BasicPutAndGet [GOOD] >> BlobDepot::TestBlockedEvGetRequest |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/etcd_proxy/etcd_proxy |81.1%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BlobDepot::TestBlockedEvGetRequest [GOOD] >> BlobDepot::BasicRange |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/dqrun |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |81.1%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |81.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 11229557958632594636 SEND TEvPut with key [1:1:1:0:0:100:0] 2026-01-07T17:23:28.790751Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-07T17:23:28.791058Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2026-01-07T17:23:28.837638Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> BlobDepot::BasicRange [GOOD] >> BlobDepot::BasicDiscover >> BsControllerTest::TestLocalSelfHeal [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> BridgeGet::PartRestorationAcrossBridge [GOOD] >> BlobDepot::BasicDiscover [GOOD] >> BlobDepot::BasicBlock |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |81.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |81.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2026-01-07T17:23:27.765335Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-07T17:23:27.765473Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-07T17:23:27.765830Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-07T17:23:27.765856Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-07T17:23:27.765937Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-07T17:23:27.765957Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-07T17:23:27.766042Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-07T17:23:27.766063Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-07T17:23:27.766161Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-07T17:23:27.766180Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-07T17:23:27.766206Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-07T17:23:27.766226Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-07T17:23:27.766256Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-07T17:23:27.766342Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-07T17:23:27.766432Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-07T17:23:27.766574Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-07T17:23:27.766682Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-07T17:23:27.766704Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-07T17:23:27.766736Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-07T17:23:27.766756Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-07T17:23:27.767359Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-07T17:23:27.767381Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-07T17:23:27.767412Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-07T17:23:27.767430Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-07T17:23:27.767540Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-07T17:23:27.767558Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-07T17:23:27.767696Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-07T17:23:27.767716Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-07T17:23:27.767837Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-07T17:23:27.767953Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-07T17:23:27.767988Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-01-07T17:23:27.768006Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-01-07T17:23:27.768114Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-01-07T17:23:27.768136Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-01-07T17:23:27.768217Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-01-07T17:23:27.768238Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-01-07T17:23:27.768280Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-01-07T17:23:27.768302Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-01-07T17:23:27.768339Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-01-07T17:23:27.768360Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-01-07T17:23:27.768400Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-01-07T17:23:27.768421Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-01-07T17:23:27.768453Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-01-07T17:23:27.768476Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-01-07T17:23:27.768512Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-01-07T17:23:27.768574Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-01-07T17:23:27.768618Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-01-07T17:23:27.768639Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-01-07T17:23:27.768672Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-01-07T17:23:27.768697Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-01-07T17:23:27.768732Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-01-07T17:23:27.768755Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-01-07T17:23:27.768804Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-01-07T17:23:27.768829Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-01-07T17:23:27.768871Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-01-07T17:23:27.768892Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-01-07T17:23:27.768939Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-01-07T17:23:27.768963Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-01-07T17:23:27.769012Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-01-07T17:23:27.769035Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-01-07T17:23:27.769073Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-01-07T17:23:27.769105Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-01-07T17:23:27.769142Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-01-07T17:23:27.769162Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-01-07T17:23:27.769200Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-01-07T17:23:27.769221Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-01-07T17:23:27.769269Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-01-07T17:23:27.769291Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-01-07T17:23:27.769326Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-01-07T17:23:27.769345Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-01-07T17:23:27.769378Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-01-07T17:23:27.769402Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-01-07T17:23:27.815053Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-01-07T17:23:27.817272Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-01-07T17:23:27.817341Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-01-07T17:23:27.817381Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-01-07T17:23:27.817516Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-01-07T17:23:27.817611Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-01-07T17:23:27.817681Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-01-07T17:23:27.817719Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-01-07T17:23:27.817806Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-01-07T17:23:27.817851Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-01-07T17:23:27.817893Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-01-07T17:23:27.817929Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-01-07T17:23:27.818014Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-01-07T17:23:27.818077Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-01-07T17:23:27.818133Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-01-07T17:23:27.818173Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-01-07T17:23:27.818220Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-01-07T17:23:27.818340Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-01-07T17:23:27.818384Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-01-07T17:23:27.818419Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-01-07T17:23:27.818454Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-01-07T17:23:27.818600Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-01-07T17:23:27.818645Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-01-07T17:23:27.818681Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-01-07T17:23:27.818716Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-01-07T17:23:27.818750Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-01-07T17:23:27.818843Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-01-07T17:23:27.818880Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-01-07T17:23:27.819090Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-01-07T17:23:27.819233Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-01-07T17:23:27.819319Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-01-07T17:23:27.819357Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-01-07T17:23:27.819392Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-01-07T17:23:27.819425Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-01-07T17:23:27.819521Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... icated# true 2026-01-07T17:23:30.667526Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483739 VDiskId# [8000005b:1:2:0:0] DiskIsOk# true 2026-01-07T17:23:30.667556Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483739 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.667581Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483739 VDiskId# [8000005b:1:2:2:0] DiskIsOk# true 2026-01-07T17:23:30.689663Z 1 00h05m00.105120s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483739 Items# [8000005b:1:2:1:0]: 33:1000:1005 -> 33:1001:1010 ConfigTxSeqNo# 48 2026-01-07T17:23:30.689704Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483739 Success# true 2026-01-07T17:23:30.689830Z 18 00h05m00.105120s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2026-01-07T17:23:30.689885Z 18 00h05m00.105120s :BS_NODE DEBUG: [18] VDiskId# [8000005b:1:1:0:0] -> [8000005b:2:1:0:0] 2026-01-07T17:23:30.690506Z 36 00h05m00.105120s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-01-07T17:23:30.690727Z 36 00h05m00.105120s :BS_NODE DEBUG: [36] VDiskId# [8000005b:1:2:2:0] -> [8000005b:2:2:2:0] 2026-01-07T17:23:30.690798Z 21 00h05m00.105120s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-01-07T17:23:30.690834Z 21 00h05m00.105120s :BS_NODE DEBUG: [21] VDiskId# [8000005b:1:1:1:0] -> [8000005b:2:1:1:0] 2026-01-07T17:23:30.691084Z 6 00h05m00.105120s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-01-07T17:23:30.691124Z 6 00h05m00.105120s :BS_NODE DEBUG: [6] VDiskId# [8000005b:1:0:0:0] -> [8000005b:2:0:0:0] 2026-01-07T17:23:30.691187Z 24 00h05m00.105120s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2026-01-07T17:23:30.691221Z 24 00h05m00.105120s :BS_NODE DEBUG: [24] VDiskId# [8000005b:1:1:2:0] -> [8000005b:2:1:2:0] 2026-01-07T17:23:30.691836Z 9 00h05m00.105120s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-07T17:23:30.691872Z 9 00h05m00.105120s :BS_NODE DEBUG: [9] VDiskId# [8000005b:1:0:1:0] -> [8000005b:2:0:1:0] 2026-01-07T17:23:30.692466Z 12 00h05m00.105120s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-01-07T17:23:30.692505Z 12 00h05m00.105120s :BS_NODE DEBUG: [12] VDiskId# [8000005b:1:0:2:0] -> [8000005b:2:0:2:0] 2026-01-07T17:23:30.693079Z 30 00h05m00.105120s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2026-01-07T17:23:30.693120Z 30 00h05m00.105120s :BS_NODE DEBUG: [30] VDiskId# [8000005b:1:2:0:0] -> [8000005b:2:2:0:0] 2026-01-07T17:23:30.693522Z 33 00h05m00.105120s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.693560Z 33 00h05m00.105120s :BS_NODE DEBUG: [33] VDiskId# [8000005b:2:2:1:0] PDiskId# 1001 VSlotId# 1010 created 2026-01-07T17:23:30.693617Z 33 00h05m00.105120s :BS_NODE DEBUG: [33] VDiskId# [8000005b:2:2:1:0] status changed to INIT_PENDING 2026-01-07T17:23:30.694721Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483723 2026-01-07T17:23:30.702542Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.702766Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:1:0:0:0] DiskIsOk# true 2026-01-07T17:23:30.702801Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.702824Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:1:0:1:0] DiskIsOk# true 2026-01-07T17:23:30.702848Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.703035Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:1:0:2:0] DiskIsOk# true 2026-01-07T17:23:30.703226Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.703412Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:1:1:0:0] DiskIsOk# true 2026-01-07T17:23:30.703608Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.703810Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:1:1:1:0] DiskIsOk# true 2026-01-07T17:23:30.703836Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.704026Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:1:1:2:0] DiskIsOk# true 2026-01-07T17:23:30.704049Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.704071Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:1:2:0:0] DiskIsOk# true 2026-01-07T17:23:30.704092Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:30.704116Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:1:2:2:0] DiskIsOk# true 2026-01-07T17:23:30.722425Z 1 00h05m00.105632s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483723 Items# [8000004b:1:2:1:0]: 33:1000:1004 -> 33:1002:1010 ConfigTxSeqNo# 49 2026-01-07T17:23:30.722768Z 1 00h05m00.105632s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483723 Success# true 2026-01-07T17:23:30.723392Z 18 00h05m00.105632s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2026-01-07T17:23:30.723473Z 18 00h05m00.105632s :BS_NODE DEBUG: [18] VDiskId# [8000004b:1:1:0:0] -> [8000004b:2:1:0:0] 2026-01-07T17:23:30.723773Z 36 00h05m00.105632s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-01-07T17:23:30.723816Z 36 00h05m00.105632s :BS_NODE DEBUG: [36] VDiskId# [8000004b:1:2:2:0] -> [8000004b:2:2:2:0] 2026-01-07T17:23:30.724064Z 21 00h05m00.105632s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-01-07T17:23:30.724439Z 21 00h05m00.105632s :BS_NODE DEBUG: [21] VDiskId# [8000004b:1:1:1:0] -> [8000004b:2:1:1:0] 2026-01-07T17:23:30.724516Z 6 00h05m00.105632s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-01-07T17:23:30.724566Z 6 00h05m00.105632s :BS_NODE DEBUG: [6] VDiskId# [8000004b:1:0:0:0] -> [8000004b:2:0:0:0] 2026-01-07T17:23:30.724649Z 24 00h05m00.105632s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2026-01-07T17:23:30.725197Z 24 00h05m00.105632s :BS_NODE DEBUG: [24] VDiskId# [8000004b:1:1:2:0] -> [8000004b:2:1:2:0] 2026-01-07T17:23:30.725265Z 9 00h05m00.105632s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-07T17:23:30.725300Z 9 00h05m00.105632s :BS_NODE DEBUG: [9] VDiskId# [8000004b:1:0:1:0] -> [8000004b:2:0:1:0] 2026-01-07T17:23:30.725706Z 12 00h05m00.105632s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-01-07T17:23:30.725910Z 12 00h05m00.105632s :BS_NODE DEBUG: [12] VDiskId# [8000004b:1:0:2:0] -> [8000004b:2:0:2:0] 2026-01-07T17:23:30.725984Z 30 00h05m00.105632s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2026-01-07T17:23:30.726023Z 30 00h05m00.105632s :BS_NODE DEBUG: [30] VDiskId# [8000004b:1:2:0:0] -> [8000004b:2:2:0:0] 2026-01-07T17:23:30.726588Z 33 00h05m00.105632s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.726633Z 33 00h05m00.105632s :BS_NODE DEBUG: [33] VDiskId# [8000004b:2:2:1:0] PDiskId# 1002 VSlotId# 1010 created 2026-01-07T17:23:30.726847Z 33 00h05m00.105632s :BS_NODE DEBUG: [33] VDiskId# [8000004b:2:2:1:0] status changed to INIT_PENDING 2026-01-07T17:23:30.732397Z 33 00h05m01.259048s :BS_NODE DEBUG: [33] VDiskId# [8000003b:2:2:1:0] status changed to REPLICATING 2026-01-07T17:23:30.801000Z 33 00h05m01.412560s :BS_NODE DEBUG: [33] VDiskId# [8000002b:2:2:1:0] status changed to REPLICATING 2026-01-07T17:23:30.805002Z 33 00h05m01.971120s :BS_NODE DEBUG: [33] VDiskId# [8000005b:2:2:1:0] status changed to REPLICATING 2026-01-07T17:23:30.809607Z 33 00h05m03.486584s :BS_NODE DEBUG: [33] VDiskId# [8000000b:2:2:1:0] status changed to REPLICATING 2026-01-07T17:23:30.814034Z 33 00h05m03.861632s :BS_NODE DEBUG: [33] VDiskId# [8000004b:2:2:1:0] status changed to REPLICATING 2026-01-07T17:23:30.820604Z 33 00h05m04.281072s :BS_NODE DEBUG: [33] VDiskId# [8000001b:2:2:1:0] status changed to REPLICATING 2026-01-07T17:23:30.831635Z 33 00h05m05.181096s :BS_NODE DEBUG: [33] VDiskId# [8000007b:2:2:1:0] status changed to REPLICATING 2026-01-07T17:23:30.837531Z 33 00h05m05.318608s :BS_NODE DEBUG: [33] VDiskId# [8000006b:2:2:1:0] status changed to REPLICATING 2026-01-07T17:23:30.843967Z 33 00h05m09.317120s :BS_NODE DEBUG: [33] VDiskId# [8000005b:2:2:1:0] status changed to READY 2026-01-07T17:23:30.852782Z 33 00h05m09.317632s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.853058Z 33 00h05m09.317632s :BS_NODE DEBUG: [33] VDiskId# [8000005b:1:2:1:0] destroyed 2026-01-07T17:23:30.855097Z 33 00h05m11.976584s :BS_NODE DEBUG: [33] VDiskId# [8000000b:2:2:1:0] status changed to READY 2026-01-07T17:23:30.861003Z 33 00h05m11.977096s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.861051Z 33 00h05m11.977096s :BS_NODE DEBUG: [33] VDiskId# [8000000b:1:2:1:0] destroyed 2026-01-07T17:23:30.867415Z 33 00h05m17.374560s :BS_NODE DEBUG: [33] VDiskId# [8000002b:2:2:1:0] status changed to READY 2026-01-07T17:23:30.875575Z 33 00h05m17.375072s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.875620Z 33 00h05m17.375072s :BS_NODE DEBUG: [33] VDiskId# [8000002b:1:2:1:0] destroyed 2026-01-07T17:23:30.876717Z 33 00h05m20.220048s :BS_NODE DEBUG: [33] VDiskId# [8000003b:2:2:1:0] status changed to READY 2026-01-07T17:23:30.884941Z 33 00h05m20.220560s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.884999Z 33 00h05m20.220560s :BS_NODE DEBUG: [33] VDiskId# [8000003b:1:2:1:0] destroyed 2026-01-07T17:23:30.887735Z 33 00h05m25.715096s :BS_NODE DEBUG: [33] VDiskId# [8000007b:2:2:1:0] status changed to READY 2026-01-07T17:23:30.895905Z 33 00h05m25.715608s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.895958Z 33 00h05m25.715608s :BS_NODE DEBUG: [33] VDiskId# [8000007b:1:2:1:0] destroyed 2026-01-07T17:23:30.896835Z 33 00h05m26.816072s :BS_NODE DEBUG: [33] VDiskId# [8000001b:2:2:1:0] status changed to READY 2026-01-07T17:23:30.904665Z 33 00h05m26.816584s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.904713Z 33 00h05m26.816584s :BS_NODE DEBUG: [33] VDiskId# [8000001b:1:2:1:0] destroyed 2026-01-07T17:23:30.909899Z 33 00h05m30.597632s :BS_NODE DEBUG: [33] VDiskId# [8000004b:2:2:1:0] status changed to READY 2026-01-07T17:23:30.919054Z 33 00h05m30.598144s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.919101Z 33 00h05m30.598144s :BS_NODE DEBUG: [33] VDiskId# [8000004b:1:2:1:0] destroyed 2026-01-07T17:23:30.923076Z 33 00h05m35.758608s :BS_NODE DEBUG: [33] VDiskId# [8000006b:2:2:1:0] status changed to READY 2026-01-07T17:23:30.930502Z 33 00h05m35.759120s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-07T17:23:30.930552Z 33 00h05m35.759120s :BS_NODE DEBUG: [33] VDiskId# [8000006b:1:2:1:0] destroyed |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 7394292065082794419 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2026-01-07T17:23:29.273866Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 11172256636344186239 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2026-01-07T17:23:29.459222Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2026-01-07T17:23:29.459369Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:301:68] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2026-01-07T17:23:29.459458Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2026-01-07T17:23:29.459552Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:298:65] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2026-01-07T17:23:29.459620Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2026-01-07T17:23:29.459691Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2026-01-07T17:23:29.459811Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 7233121557619503528 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2026-01-07T17:23:29.100864Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-07T17:23:29.101369Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2026-01-07T17:23:29.175924Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 13022074673774771527 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2026-01-07T17:23:29.352660Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2026-01-07T17:23:29.352825Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:301:68] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2026-01-07T17:23:29.352902Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2026-01-07T17:23:29.353015Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:298:65] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2026-01-07T17:23:29.353087Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2026-01-07T17:23:29.353150Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2026-01-07T17:23:29.353236Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 17711528558300873437 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2026-01-07T17:23:29.344913Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:6350:836] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> BlobDepot::BasicBlock [GOOD] >> BlobDepot::BasicCollectGarbage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridge [GOOD] Test command err: RandomSeed# 10224699743591521021 originalGroupIndex# 0 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 0 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 9008073393497173138 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2026-01-07T17:23:30.407133Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> test.py::test[solomon-BadDownsamplingAggregation-] >> BsControllerTest::TestLocalBrokenRelocation [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne >> BSCRestartPDisk::RestartOneByOneWithReconnects >> BSCRestartPDisk::RestartNotAllowed |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BlobDepot::BasicCollectGarbage [GOOD] >> BlobDepot::VerifiedRandom |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2026-01-07T17:23:27.949508Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-07T17:23:27.949556Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-07T17:23:27.949633Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-07T17:23:27.949654Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-07T17:23:27.949696Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-07T17:23:27.949716Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-07T17:23:27.949760Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-07T17:23:27.949779Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-07T17:23:27.949810Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-07T17:23:27.949829Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-07T17:23:27.949858Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-07T17:23:27.949877Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-07T17:23:27.949909Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-07T17:23:27.949937Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-07T17:23:27.949980Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-07T17:23:27.950003Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-07T17:23:27.950045Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-07T17:23:27.950066Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-07T17:23:27.950101Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-07T17:23:27.950121Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-07T17:23:27.950152Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-07T17:23:27.950170Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-07T17:23:27.950198Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-07T17:23:27.950225Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-07T17:23:27.950302Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-07T17:23:27.950324Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-07T17:23:27.950360Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-07T17:23:27.950381Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-07T17:23:27.950420Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-07T17:23:27.950447Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-07T17:23:27.950476Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-01-07T17:23:27.950490Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-01-07T17:23:27.950514Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-01-07T17:23:27.950530Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-01-07T17:23:27.950548Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-01-07T17:23:27.950561Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-01-07T17:23:27.950583Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-01-07T17:23:27.950595Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-01-07T17:23:27.950619Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-01-07T17:23:27.950632Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-01-07T17:23:27.950657Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-01-07T17:23:27.950671Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-01-07T17:23:27.950695Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-01-07T17:23:27.950712Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-01-07T17:23:27.950732Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-01-07T17:23:27.950761Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-01-07T17:23:27.950785Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-01-07T17:23:27.950799Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-01-07T17:23:27.950821Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-01-07T17:23:27.950836Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-01-07T17:23:27.950857Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-01-07T17:23:27.950871Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-01-07T17:23:27.950891Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-01-07T17:23:27.950904Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-01-07T17:23:27.950927Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-01-07T17:23:27.950940Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-01-07T17:23:27.950970Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-01-07T17:23:27.950985Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-01-07T17:23:27.951014Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-01-07T17:23:27.951028Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-01-07T17:23:27.951054Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-01-07T17:23:27.951074Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-01-07T17:23:27.951101Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-01-07T17:23:27.951113Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-01-07T17:23:27.951132Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-01-07T17:23:27.951146Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-01-07T17:23:27.951180Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-01-07T17:23:27.951193Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-01-07T17:23:27.951213Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-01-07T17:23:27.951225Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-01-07T17:23:27.951245Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-01-07T17:23:27.951270Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-01-07T17:23:27.965728Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-01-07T17:23:27.966656Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-01-07T17:23:27.966698Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-01-07T17:23:27.966734Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-01-07T17:23:27.966759Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-01-07T17:23:27.966784Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-01-07T17:23:27.966807Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-01-07T17:23:27.966830Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-01-07T17:23:27.966857Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-01-07T17:23:27.966881Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-01-07T17:23:27.966905Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-01-07T17:23:27.966927Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-01-07T17:23:27.966955Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-01-07T17:23:27.967009Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-01-07T17:23:27.967042Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-01-07T17:23:27.967070Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-01-07T17:23:27.967106Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-01-07T17:23:27.967136Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-01-07T17:23:27.967165Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-01-07T17:23:27.967189Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-01-07T17:23:27.967218Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-01-07T17:23:27.967254Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-01-07T17:23:27.967282Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-01-07T17:23:27.967307Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-01-07T17:23:27.967331Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-01-07T17:23:27.967354Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-01-07T17:23:27.967387Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-01-07T17:23:27.967417Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-01-07T17:23:27.967464Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-01-07T17:23:27.967502Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-01-07T17:23:27.967536Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-01-07T17:23:27.967563Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-01-07T17:23:27.967591Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-01-07T17:23:27.967614Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-01-07T17:23:27.967641Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2026-01-07T17:23:34.428640Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2026-01-07T17:23:34.428699Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2026-01-07T17:23:34.428745Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2026-01-07T17:23:34.428790Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2026-01-07T17:23:34.429421Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-01-07T17:23:34.429476Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2026-01-07T17:23:34.429527Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2026-01-07T17:23:34.429580Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2026-01-07T17:23:34.429621Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2026-01-07T17:23:34.429680Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2026-01-07T17:23:34.429731Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2026-01-07T17:23:34.429772Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2026-01-07T17:23:34.429810Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2026-01-07T17:23:34.429855Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2026-01-07T17:23:34.429894Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2026-01-07T17:23:34.429936Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2026-01-07T17:23:34.429977Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2026-01-07T17:23:34.430021Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2026-01-07T17:23:34.430062Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2026-01-07T17:23:34.430112Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2026-01-07T17:23:34.430159Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2026-01-07T17:23:34.430757Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2026-01-07T17:23:34.430813Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2026-01-07T17:23:34.430863Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2026-01-07T17:23:34.430905Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2026-01-07T17:23:34.430945Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2026-01-07T17:23:34.431002Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2026-01-07T17:23:34.431053Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2026-01-07T17:23:34.431101Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2026-01-07T17:23:34.431150Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2026-01-07T17:23:34.431189Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2026-01-07T17:23:34.431230Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2026-01-07T17:23:34.431270Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2026-01-07T17:23:34.431725Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2026-01-07T17:23:34.431779Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2026-01-07T17:23:34.431831Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2026-01-07T17:23:34.431872Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2026-01-07T17:23:34.431908Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2026-01-07T17:23:34.431945Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2026-01-07T17:23:34.432010Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2026-01-07T17:23:34.432060Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2026-01-07T17:23:34.432098Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2026-01-07T17:23:34.434743Z 4 01h25m01.655560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2026-01-07T17:23:34.435210Z 2 01h25m01.916560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2026-01-07T17:23:34.435651Z 10 01h25m02.280560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2026-01-07T17:23:34.436042Z 4 01h25m02.541560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2026-01-07T17:23:34.436389Z 7 01h25m02.783560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2026-01-07T17:23:34.436852Z 5 01h25m03.279560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2026-01-07T17:23:34.437273Z 7 01h25m03.348560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2026-01-07T17:23:34.437645Z 10 01h25m03.990560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2026-01-07T17:23:34.438023Z 2 01h25m04.090560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2026-01-07T17:23:34.438340Z 5 01h25m04.219560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2026-01-07T17:23:34.438722Z 4 01h25m04.394560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2026-01-07T17:23:34.439135Z 10 01h25m04.510560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2026-01-07T17:23:34.439544Z 7 01h25m04.612560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2026-01-07T17:23:34.441373Z 8 01h25m05.345560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2026-01-07T17:23:34.441815Z 4 01h25m05.504560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2026-01-07T17:23:34.442288Z 7 01h25m06.036560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2026-01-07T17:23:34.442975Z 5 01h25m10.946560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2026-01-07T17:23:34.443864Z 1 01h25m10.947072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.443931Z 1 01h25m10.947072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2026-01-07T17:23:34.444074Z 4 01h25m12.548560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2026-01-07T17:23:34.444807Z 1 01h25m12.549072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.444858Z 1 01h25m12.549072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2026-01-07T17:23:34.444985Z 4 01h25m12.741560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2026-01-07T17:23:34.445698Z 1 01h25m12.742072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.445745Z 1 01h25m12.742072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2026-01-07T17:23:34.446676Z 10 01h25m15.706560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2026-01-07T17:23:34.447444Z 1 01h25m15.707072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.447491Z 1 01h25m15.707072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2026-01-07T17:23:34.447942Z 4 01h25m25.801560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2026-01-07T17:23:34.448762Z 1 01h25m25.802072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.448805Z 1 01h25m25.802072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2026-01-07T17:23:34.449172Z 5 01h25m26.280560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2026-01-07T17:23:34.449872Z 1 01h25m26.281072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.449923Z 1 01h25m26.281072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2026-01-07T17:23:34.450023Z 7 01h25m26.538560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2026-01-07T17:23:34.450757Z 1 01h25m26.539072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.450803Z 1 01h25m26.539072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2026-01-07T17:23:34.451391Z 4 01h25m28.154560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2026-01-07T17:23:34.452120Z 1 01h25m28.155072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.452170Z 1 01h25m28.155072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2026-01-07T17:23:34.452271Z 10 01h25m28.799560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2026-01-07T17:23:34.453037Z 1 01h25m28.800072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.453083Z 1 01h25m28.800072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2026-01-07T17:23:34.453199Z 2 01h25m29.022560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2026-01-07T17:23:34.453873Z 1 01h25m29.023072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.453969Z 1 01h25m29.023072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2026-01-07T17:23:34.454065Z 7 01h25m29.171560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2026-01-07T17:23:34.454781Z 1 01h25m29.172072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.454828Z 1 01h25m29.172072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2026-01-07T17:23:34.454914Z 8 01h25m29.911560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2026-01-07T17:23:34.455616Z 1 01h25m29.912072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.455659Z 1 01h25m29.912072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2026-01-07T17:23:34.456747Z 2 01h25m30.009560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2026-01-07T17:23:34.457502Z 1 01h25m30.010072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.457548Z 1 01h25m30.010072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2026-01-07T17:23:34.457908Z 7 01h25m32.717560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2026-01-07T17:23:34.458621Z 1 01h25m32.718072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.458672Z 1 01h25m32.718072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2026-01-07T17:23:34.458758Z 10 01h25m32.989560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2026-01-07T17:23:34.459475Z 1 01h25m32.990072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.459544Z 1 01h25m32.990072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2026-01-07T17:23:34.460023Z 7 01h25m35.787560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2026-01-07T17:23:34.460734Z 1 01h25m35.788072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-07T17:23:34.460787Z 1 01h25m35.788072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] Test command err: RandomSeed# 11105039456143841012 2026-01-07T17:23:33.167534Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:33.169452Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 831910751831021017] 2026-01-07T17:23:33.186891Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:33.292659Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:33.294523Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4904104560021967420] 2026-01-07T17:23:33.313452Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:33.390337Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:33.392185Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16432367975340809775] 2026-01-07T17:23:33.404014Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:33.847200Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:33.849257Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4815901133564010452] 2026-01-07T17:23:33.859988Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:33.957774Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:33.959793Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7599957322786243861] 2026-01-07T17:23:33.983704Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:34.072272Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:34.074336Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4667858805880317432] 2026-01-07T17:23:34.086898Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:34.540307Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:34.542336Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2503394883423048066] 2026-01-07T17:23:34.552562Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:34.671858Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:34.673714Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14173289802776223812] 2026-01-07T17:23:34.699937Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2026-01-07T17:23:27.725562Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-07T17:23:27.725606Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-07T17:23:27.725669Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-07T17:23:27.725699Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-07T17:23:27.725735Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-07T17:23:27.725752Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-07T17:23:27.725785Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-07T17:23:27.725812Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-07T17:23:27.725849Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-07T17:23:27.725869Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-07T17:23:27.725906Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-07T17:23:27.725925Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-07T17:23:27.725966Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-07T17:23:27.725987Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-07T17:23:27.726022Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-07T17:23:27.726041Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-07T17:23:27.726075Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-07T17:23:27.726095Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-07T17:23:27.726126Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-07T17:23:27.726157Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-07T17:23:27.726204Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-07T17:23:27.726225Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-07T17:23:27.726257Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-07T17:23:27.726277Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-07T17:23:27.726329Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-07T17:23:27.726351Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-07T17:23:27.726386Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-07T17:23:27.726407Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-07T17:23:27.726438Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-07T17:23:27.726471Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-07T17:23:27.726516Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-01-07T17:23:27.726537Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-01-07T17:23:27.726569Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-01-07T17:23:27.726585Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-01-07T17:23:27.726614Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-01-07T17:23:27.726641Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-01-07T17:23:27.726687Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-01-07T17:23:27.726706Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-01-07T17:23:27.726731Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-01-07T17:23:27.726752Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-01-07T17:23:27.726782Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-01-07T17:23:27.726802Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-01-07T17:23:27.726835Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-01-07T17:23:27.726854Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-01-07T17:23:27.726883Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-01-07T17:23:27.726901Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-01-07T17:23:27.726932Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-01-07T17:23:27.726952Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-01-07T17:23:27.726989Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-01-07T17:23:27.727009Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-01-07T17:23:27.727057Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-01-07T17:23:27.727087Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-01-07T17:23:27.727118Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-01-07T17:23:27.727138Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-01-07T17:23:27.727169Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-01-07T17:23:27.727188Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-01-07T17:23:27.727227Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-01-07T17:23:27.727247Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-01-07T17:23:27.727281Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-01-07T17:23:27.727300Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-01-07T17:23:27.727330Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-01-07T17:23:27.727348Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-01-07T17:23:27.727385Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-01-07T17:23:27.727405Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-01-07T17:23:27.744650Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2163:55] Status# ERROR ClientId# [1:2163:55] ServerId# [0:0:0] PipeClient# [1:2163:55] 2026-01-07T17:23:27.745988Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2164:37] Status# ERROR ClientId# [2:2164:37] ServerId# [0:0:0] PipeClient# [2:2164:37] 2026-01-07T17:23:27.746046Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2165:37] Status# ERROR ClientId# [3:2165:37] ServerId# [0:0:0] PipeClient# [3:2165:37] 2026-01-07T17:23:27.746096Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2166:37] Status# ERROR ClientId# [4:2166:37] ServerId# [0:0:0] PipeClient# [4:2166:37] 2026-01-07T17:23:27.746142Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2167:37] Status# ERROR ClientId# [5:2167:37] ServerId# [0:0:0] PipeClient# [5:2167:37] 2026-01-07T17:23:27.746177Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2168:37] Status# ERROR ClientId# [6:2168:37] ServerId# [0:0:0] PipeClient# [6:2168:37] 2026-01-07T17:23:27.746214Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2169:37] Status# ERROR ClientId# [7:2169:37] ServerId# [0:0:0] PipeClient# [7:2169:37] 2026-01-07T17:23:27.746250Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2170:37] Status# ERROR ClientId# [8:2170:37] ServerId# [0:0:0] PipeClient# [8:2170:37] 2026-01-07T17:23:27.746298Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2171:37] Status# ERROR ClientId# [9:2171:37] ServerId# [0:0:0] PipeClient# [9:2171:37] 2026-01-07T17:23:27.746339Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2172:37] Status# ERROR ClientId# [10:2172:37] ServerId# [0:0:0] PipeClient# [10:2172:37] 2026-01-07T17:23:27.746382Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2173:37] Status# ERROR ClientId# [11:2173:37] ServerId# [0:0:0] PipeClient# [11:2173:37] 2026-01-07T17:23:27.746423Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2174:37] Status# ERROR ClientId# [12:2174:37] ServerId# [0:0:0] PipeClient# [12:2174:37] 2026-01-07T17:23:27.746479Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2175:37] Status# ERROR ClientId# [13:2175:37] ServerId# [0:0:0] PipeClient# [13:2175:37] 2026-01-07T17:23:27.746531Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2176:37] Status# ERROR ClientId# [14:2176:37] ServerId# [0:0:0] PipeClient# [14:2176:37] 2026-01-07T17:23:27.746570Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2177:37] Status# ERROR ClientId# [15:2177:37] ServerId# [0:0:0] PipeClient# [15:2177:37] 2026-01-07T17:23:27.746606Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2178:37] Status# ERROR ClientId# [16:2178:37] ServerId# [0:0:0] PipeClient# [16:2178:37] 2026-01-07T17:23:27.746644Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2179:37] Status# ERROR ClientId# [17:2179:37] ServerId# [0:0:0] PipeClient# [17:2179:37] 2026-01-07T17:23:27.746692Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2180:37] Status# ERROR ClientId# [18:2180:37] ServerId# [0:0:0] PipeClient# [18:2180:37] 2026-01-07T17:23:27.746736Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2181:37] Status# ERROR ClientId# [19:2181:37] ServerId# [0:0:0] PipeClient# [19:2181:37] 2026-01-07T17:23:27.746772Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2182:37] Status# ERROR ClientId# [20:2182:37] ServerId# [0:0:0] PipeClient# [20:2182:37] 2026-01-07T17:23:27.746813Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2183:37] Status# ERROR ClientId# [21:2183:37] ServerId# [0:0:0] PipeClient# [21:2183:37] 2026-01-07T17:23:27.746855Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2184:37] Status# ERROR ClientId# [22:2184:37] ServerId# [0:0:0] PipeClient# [22:2184:37] 2026-01-07T17:23:27.746890Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2185:37] Status# ERROR ClientId# [23:2185:37] ServerId# [0:0:0] PipeClient# [23:2185:37] 2026-01-07T17:23:27.746922Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2186:37] Status# ERROR ClientId# [24:2186:37] ServerId# [0:0:0] PipeClient# [24:2186:37] 2026-01-07T17:23:27.746954Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2187:37] Status# ERROR ClientId# [25:2187:37] ServerId# [0:0:0] PipeClient# [25:2187:37] 2026-01-07T17:23:27.747017Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2188:37] Status# ERROR ClientId# [26:2188:37] ServerId# [0:0:0] PipeClient# [26:2188:37] 2026-01-07T17:23:27.747067Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2189:37] Status# ERROR ClientId# [27:2189:37] ServerId# [0:0:0] PipeClient# [27:2189:37] 2026-01-07T17:23:27.747108Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2190:37] Status# ERROR ClientId# [28:2190:37] ServerId# [0:0:0] PipeClient# [28:2190:37] 2026-01-07T17:23:27.747145Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2191:37] Status# ERROR ClientId# [29:2191:37] ServerId# [0:0:0] PipeClient# [29:2191:37] 2026-01-07T17:23:27.747195Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2192:37] Status# ERROR ClientId# [30:2192:37] ServerId# [0:0:0] PipeClient# [30:2192:37] 2026-01-07T17:23:27.747227Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2193:37] Status# ERROR ClientId# [31:2193:37] ServerId# [0:0:0] PipeClient# [31:2193:37] 2026-01-07T17:23:27.747261Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2194:37] Status# ERROR ClientId# [32:2194:37] ServerId# [0:0:0] PipeClient# [32:2194:37] 2026-01-07T17:23:27.871794Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2264:79] expected 1 current 0 2026-01-07T17:23:27.871865Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2265:38] expected 1 current 0 2026-01-07T17:23:27.871895Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2266:38] expected 1 current 0 2026-01-07T17:23:27.871922Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2267:38] expected 1 current 0 2026-01-07T17:23:27.871948Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2268:38] expected 1 current 0 2026-01-07T17:23:27.871975Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2269:38] expected 1 current 0 2026-01-07T17:23:27.872002Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [7:2270:38] expected 1 current 0 2026-01-07T17:23:27.872028Z 8 00h00m00.002560s :BS_NODE DEBUG: [8] CheckState from [8:2271:38] expected 1 current 0 2026-01-07T17:23:27.872056Z 9 00h00m00.002560s :BS_NODE DEBUG: [9] CheckState from [9:2272:38] expected 1 current 0 2026-01-07T17:23:27.872094Z 10 00h00m00.002560s :BS_NODE DEBUG: [10] CheckState from [10:2273 ... Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483670 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.652938Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483670 VDiskId# [80000016:2:0:5:0] DiskIsOk# true 2026-01-07T17:23:37.652959Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483670 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.652977Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483670 VDiskId# [80000016:2:0:7:0] DiskIsOk# true 2026-01-07T17:23:37.655435Z 1 05h15m00.121504s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483670 Items# [80000016:2:0:6:0]: 23:1001:1002 -> 25:1000:1013 ConfigTxSeqNo# 509 2026-01-07T17:23:37.655463Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483670 Success# true 2026-01-07T17:23:37.655553Z 17 05h15m00.121504s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-01-07T17:23:37.655596Z 17 05h15m00.121504s :BS_NODE DEBUG: [17] VDiskId# [80000016:2:0:0:0] -> [80000016:3:0:0:0] 2026-01-07T17:23:37.655653Z 18 05h15m00.121504s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2026-01-07T17:23:37.655685Z 18 05h15m00.121504s :BS_NODE DEBUG: [18] VDiskId# [80000016:2:0:1:0] -> [80000016:3:0:1:0] 2026-01-07T17:23:37.655735Z 19 05h15m00.121504s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2026-01-07T17:23:37.655766Z 19 05h15m00.121504s :BS_NODE DEBUG: [19] VDiskId# [80000016:2:0:2:0] -> [80000016:3:0:2:0] 2026-01-07T17:23:37.655816Z 21 05h15m00.121504s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-01-07T17:23:37.655847Z 21 05h15m00.121504s :BS_NODE DEBUG: [21] VDiskId# [80000016:2:0:4:0] -> [80000016:3:0:4:0] 2026-01-07T17:23:37.655893Z 22 05h15m00.121504s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2026-01-07T17:23:37.655921Z 22 05h15m00.121504s :BS_NODE DEBUG: [22] VDiskId# [80000016:2:0:5:0] -> [80000016:3:0:5:0] 2026-01-07T17:23:37.655963Z 23 05h15m00.121504s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.656032Z 24 05h15m00.121504s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2026-01-07T17:23:37.656074Z 24 05h15m00.121504s :BS_NODE DEBUG: [24] VDiskId# [80000016:2:0:7:0] -> [80000016:3:0:7:0] 2026-01-07T17:23:37.656151Z 25 05h15m00.121504s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2026-01-07T17:23:37.656189Z 25 05h15m00.121504s :BS_NODE DEBUG: [25] VDiskId# [80000016:3:0:6:0] PDiskId# 1000 VSlotId# 1013 created 2026-01-07T17:23:37.656251Z 25 05h15m00.121504s :BS_NODE DEBUG: [25] VDiskId# [80000016:3:0:6:0] status changed to INIT_PENDING 2026-01-07T17:23:37.656333Z 11 05h15m00.121504s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2026-01-07T17:23:37.656367Z 11 05h15m00.121504s :BS_NODE DEBUG: [11] VDiskId# [80000016:2:0:3:0] -> [80000016:3:0:3:0] 2026-01-07T17:23:37.656549Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483654 2026-01-07T17:23:37.657012Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.657040Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:2:0:0:0] DiskIsOk# true 2026-01-07T17:23:37.657066Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.657102Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:2:0:1:0] DiskIsOk# true 2026-01-07T17:23:37.657122Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.657140Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:2:0:2:0] DiskIsOk# true 2026-01-07T17:23:37.657159Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.657176Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:2:0:3:0] DiskIsOk# true 2026-01-07T17:23:37.657192Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.657209Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:2:0:4:0] DiskIsOk# true 2026-01-07T17:23:37.657226Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.657241Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:2:0:5:0] DiskIsOk# true 2026-01-07T17:23:37.657257Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:37.657274Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:2:0:7:0] DiskIsOk# true 2026-01-07T17:23:37.659696Z 1 05h15m00.122016s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483654 Items# [80000006:2:0:6:0]: 23:1001:1000 -> 32:1000:1013 ConfigTxSeqNo# 510 2026-01-07T17:23:37.659724Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483654 Success# true 2026-01-07T17:23:37.659817Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-01-07T17:23:37.659856Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] VDiskId# [80000006:2:0:0:0] -> [80000006:3:0:0:0] 2026-01-07T17:23:37.659912Z 18 05h15m00.122016s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2026-01-07T17:23:37.659942Z 18 05h15m00.122016s :BS_NODE DEBUG: [18] VDiskId# [80000006:2:0:1:0] -> [80000006:3:0:1:0] 2026-01-07T17:23:37.659992Z 19 05h15m00.122016s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2026-01-07T17:23:37.660020Z 19 05h15m00.122016s :BS_NODE DEBUG: [19] VDiskId# [80000006:2:0:2:0] -> [80000006:3:0:2:0] 2026-01-07T17:23:37.660070Z 21 05h15m00.122016s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-01-07T17:23:37.660101Z 21 05h15m00.122016s :BS_NODE DEBUG: [21] VDiskId# [80000006:2:0:4:0] -> [80000006:3:0:4:0] 2026-01-07T17:23:37.660148Z 22 05h15m00.122016s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2026-01-07T17:23:37.660179Z 22 05h15m00.122016s :BS_NODE DEBUG: [22] VDiskId# [80000006:2:0:5:0] -> [80000006:3:0:5:0] 2026-01-07T17:23:37.660220Z 23 05h15m00.122016s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.660265Z 24 05h15m00.122016s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2026-01-07T17:23:37.660293Z 24 05h15m00.122016s :BS_NODE DEBUG: [24] VDiskId# [80000006:2:0:7:0] -> [80000006:3:0:7:0] 2026-01-07T17:23:37.660342Z 11 05h15m00.122016s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2026-01-07T17:23:37.660372Z 11 05h15m00.122016s :BS_NODE DEBUG: [11] VDiskId# [80000006:2:0:3:0] -> [80000006:3:0:3:0] 2026-01-07T17:23:37.660443Z 32 05h15m00.122016s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2026-01-07T17:23:37.660473Z 32 05h15m00.122016s :BS_NODE DEBUG: [32] VDiskId# [80000006:3:0:6:0] PDiskId# 1000 VSlotId# 1013 created 2026-01-07T17:23:37.660516Z 32 05h15m00.122016s :BS_NODE DEBUG: [32] VDiskId# [80000006:3:0:6:0] status changed to INIT_PENDING 2026-01-07T17:23:37.661194Z 25 05h15m01.332504s :BS_NODE DEBUG: [25] VDiskId# [80000016:3:0:6:0] status changed to REPLICATING 2026-01-07T17:23:37.661436Z 32 05h15m01.363968s :BS_NODE DEBUG: [32] VDiskId# [80000011:6:0:1:0] status changed to REPLICATING 2026-01-07T17:23:37.661746Z 32 05h15m01.669016s :BS_NODE DEBUG: [32] VDiskId# [80000006:3:0:6:0] status changed to REPLICATING 2026-01-07T17:23:37.662030Z 32 05h15m01.848944s :BS_NODE DEBUG: [32] VDiskId# [8000001e:3:0:6:0] status changed to REPLICATING 2026-01-07T17:23:37.662390Z 25 05h15m02.258480s :BS_NODE DEBUG: [25] VDiskId# [80000036:3:0:6:0] status changed to REPLICATING 2026-01-07T17:23:37.662645Z 25 05h15m03.774432s :BS_NODE DEBUG: [25] VDiskId# [8000002e:3:0:6:0] status changed to REPLICATING 2026-01-07T17:23:37.662982Z 25 05h15m04.300920s :BS_NODE DEBUG: [25] VDiskId# [8000003e:3:0:6:0] status changed to REPLICATING 2026-01-07T17:23:37.663320Z 32 05h15m04.861992s :BS_NODE DEBUG: [32] VDiskId# [80000026:3:0:6:0] status changed to REPLICATING 2026-01-07T17:23:37.664195Z 25 05h15m05.511456s :BS_NODE DEBUG: [25] VDiskId# [8000000e:3:0:6:0] status changed to REPLICATING 2026-01-07T17:23:37.664612Z 25 05h15m09.869920s :BS_NODE DEBUG: [25] VDiskId# [8000003e:3:0:6:0] status changed to READY 2026-01-07T17:23:37.665371Z 23 05h15m09.870432s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.665412Z 23 05h15m09.870432s :BS_NODE DEBUG: [23] VDiskId# [8000003e:2:0:6:0] destroyed 2026-01-07T17:23:37.665687Z 32 05h15m14.845992s :BS_NODE DEBUG: [32] VDiskId# [80000026:3:0:6:0] status changed to READY 2026-01-07T17:23:37.666286Z 23 05h15m14.846504s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.666320Z 23 05h15m14.846504s :BS_NODE DEBUG: [23] VDiskId# [80000026:2:0:6:0] destroyed 2026-01-07T17:23:37.666710Z 32 05h15m16.954016s :BS_NODE DEBUG: [32] VDiskId# [80000006:3:0:6:0] status changed to READY 2026-01-07T17:23:37.667308Z 23 05h15m16.954528s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.667344Z 23 05h15m16.954528s :BS_NODE DEBUG: [23] VDiskId# [80000006:2:0:6:0] destroyed 2026-01-07T17:23:37.667450Z 32 05h15m17.056944s :BS_NODE DEBUG: [32] VDiskId# [8000001e:3:0:6:0] status changed to READY 2026-01-07T17:23:37.668028Z 23 05h15m17.057456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.668064Z 23 05h15m17.057456s :BS_NODE DEBUG: [23] VDiskId# [8000001e:2:0:6:0] destroyed 2026-01-07T17:23:37.668299Z 25 05h15m20.243456s :BS_NODE DEBUG: [25] VDiskId# [8000000e:3:0:6:0] status changed to READY 2026-01-07T17:23:37.668886Z 23 05h15m20.243968s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.668918Z 23 05h15m20.243968s :BS_NODE DEBUG: [23] VDiskId# [8000000e:2:0:6:0] destroyed 2026-01-07T17:23:37.669023Z 25 05h15m21.124432s :BS_NODE DEBUG: [25] VDiskId# [8000002e:3:0:6:0] status changed to READY 2026-01-07T17:23:37.669590Z 23 05h15m21.124944s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.669633Z 23 05h15m21.124944s :BS_NODE DEBUG: [23] VDiskId# [8000002e:2:0:6:0] destroyed 2026-01-07T17:23:37.669974Z 25 05h15m24.876480s :BS_NODE DEBUG: [25] VDiskId# [80000036:3:0:6:0] status changed to READY 2026-01-07T17:23:37.670579Z 23 05h15m24.876992s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.670611Z 23 05h15m24.876992s :BS_NODE DEBUG: [23] VDiskId# [80000036:2:0:6:0] destroyed 2026-01-07T17:23:37.671614Z 32 05h15m33.031968s :BS_NODE DEBUG: [32] VDiskId# [80000011:6:0:1:0] status changed to READY 2026-01-07T17:23:37.672192Z 23 05h15m33.032480s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.672227Z 23 05h15m33.032480s :BS_NODE DEBUG: [23] VDiskId# [80000011:5:0:1:0] destroyed 2026-01-07T17:23:37.672508Z 25 05h15m35.215504s :BS_NODE DEBUG: [25] VDiskId# [80000016:3:0:6:0] status changed to READY 2026-01-07T17:23:37.673136Z 23 05h15m35.216016s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:37.673184Z 23 05h15m35.216016s :BS_NODE DEBUG: [23] VDiskId# [80000016:2:0:6:0] destroyed |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 14442210884973697334 2026-01-07T17:23:41.838411Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.838514Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.838555Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.838628Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.838670Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.838713Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.838752Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.840127Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.840215Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.840269Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.840326Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.840381Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.840437Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.840486Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.840569Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.840709Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.840775Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.840865Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.841042Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.841110Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.841182Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.843242Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.843325Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.843388Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.843458Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.843517Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.843588Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.843652Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 2350100387795362075 2026-01-07T17:23:41.879039Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.879157Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.879215Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.879257Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.879303Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.879364Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.879408Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.879445Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.880840Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.880948Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.881007Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.881082Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.881155Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.881226Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.881281Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.881335Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.881407Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.881488Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.881547Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.881600Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.881636Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.881678Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.881715Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.881753Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:41.883893Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.884004Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.884061Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.884126Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.884175Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.884238Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.884299Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:41.884375Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |81.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |81.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] Test command err: RandomSeed# 13350003509570254041 2026-01-07T17:23:36.937275Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:36.938377Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7771089670456549769] 2026-01-07T17:23:37.242686Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:37.482033Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:37.483742Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8638724876354180033] 2026-01-07T17:23:37.499531Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:37.573261Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:37.574985Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15968438215435060210] 2026-01-07T17:23:37.585997Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:37.723261Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:37.724668Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11688486457334892507] 2026-01-07T17:23:37.742019Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:38.810931Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:38.812348Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2640917792974762328] 2026-01-07T17:23:38.840687Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:38.990093Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:38.991551Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14185915192542115667] 2026-01-07T17:23:39.214148Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:39.366300Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:39.367811Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4140633187545031171] 2026-01-07T17:23:39.377923Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:39.486560Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:39.488041Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3850886892904744997] 2026-01-07T17:23:39.498222Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:40.011263Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:40.012751Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14094951953791809317] 2026-01-07T17:23:40.021670Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:40.239209Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:40.240620Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10677149730266889365] 2026-01-07T17:23:40.393923Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:40.499273Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:40.500698Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 9727853470936067408] 2026-01-07T17:23:40.510074Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:40.814417Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:40.815938Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12577192058455373416] 2026-01-07T17:23:40.928888Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:41.516451Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:41.517903Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2893839215286933138] 2026-01-07T17:23:41.525275Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:41.746702Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:41.748513Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14665132866313780454] 2026-01-07T17:23:41.772180Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:41.863418Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:41.865225Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14275194376983091924] 2026-01-07T17:23:41.877630Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-07T17:23:42.006982Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:42.008684Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5304286850244690368] 2026-01-07T17:23:42.021622Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |81.3%| [AR] {RESULT} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |81.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |81.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |81.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> BlobDepot::VerifiedRandom [GOOD] >> BlobDepot::LoadPutAndRead >> BSCReadOnlyPDisk::ReadOnlySlay |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] >> BSCReadOnlyPDisk::ReadOnlyNotAllowed >> BSCReadOnlyPDisk::ReadOnlyOneByOne >> TStateStorageRingGroupState::TestStateStorageUpdateSig >> TStateStorageRingGroupState::TestStateStorageDoubleReply >> TStateStorage2RingGroups::TestStateStorageReplyOnce >> TStateStorageRingGroupState::TestBoardConfigMismatch >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 >> TStateStorageRingGroupState::TestProxyConfigMismatch >> VDiskRestart::Simple [GOOD] >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2026-01-07T17:23:27.567930Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-07T17:23:27.567978Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-07T17:23:27.568051Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-07T17:23:27.568080Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-07T17:23:27.568119Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-07T17:23:27.568137Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-07T17:23:27.568170Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-07T17:23:27.568189Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-07T17:23:27.568218Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-07T17:23:27.568238Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-07T17:23:27.568268Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-07T17:23:27.568288Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-07T17:23:27.568323Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-07T17:23:27.568360Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-07T17:23:27.568405Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-07T17:23:27.568430Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-07T17:23:27.568479Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-07T17:23:27.568499Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-07T17:23:27.568583Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-07T17:23:27.568605Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-07T17:23:27.568635Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-07T17:23:27.568655Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-07T17:23:27.568684Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-07T17:23:27.568702Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-07T17:23:27.568748Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-07T17:23:27.568770Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-07T17:23:27.568804Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-07T17:23:27.568823Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-07T17:23:27.568861Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-07T17:23:27.568890Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-07T17:23:27.568942Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-01-07T17:23:27.568964Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-01-07T17:23:27.568997Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-01-07T17:23:27.569014Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-01-07T17:23:27.569041Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-01-07T17:23:27.569061Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-01-07T17:23:27.569094Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-01-07T17:23:27.569112Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-01-07T17:23:27.569143Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-01-07T17:23:27.569162Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-01-07T17:23:27.569199Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-01-07T17:23:27.569219Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-01-07T17:23:27.569251Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-01-07T17:23:27.569271Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-01-07T17:23:27.569299Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-01-07T17:23:27.569329Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-01-07T17:23:27.569376Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-01-07T17:23:27.569394Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-01-07T17:23:27.569423Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-01-07T17:23:27.569444Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-01-07T17:23:27.569487Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-01-07T17:23:27.569508Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-01-07T17:23:27.569538Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-01-07T17:23:27.569559Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-01-07T17:23:27.569597Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-01-07T17:23:27.569615Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-01-07T17:23:27.569647Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-01-07T17:23:27.569666Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-01-07T17:23:27.569706Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-01-07T17:23:27.569725Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-01-07T17:23:27.569759Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-01-07T17:23:27.569788Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-01-07T17:23:27.569826Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-01-07T17:23:27.569844Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-01-07T17:23:27.569870Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-01-07T17:23:27.569888Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-01-07T17:23:27.569920Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-01-07T17:23:27.569952Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-01-07T17:23:27.569992Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-01-07T17:23:27.570010Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-01-07T17:23:27.570039Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-01-07T17:23:27.570062Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-01-07T17:23:27.587096Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-01-07T17:23:27.588341Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-01-07T17:23:27.588396Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-01-07T17:23:27.588432Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-01-07T17:23:27.588470Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-01-07T17:23:27.588509Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-01-07T17:23:27.588544Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-01-07T17:23:27.588580Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-01-07T17:23:27.588621Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-01-07T17:23:27.588657Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-01-07T17:23:27.588693Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-01-07T17:23:27.588731Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-01-07T17:23:27.588775Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-01-07T17:23:27.588841Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-01-07T17:23:27.588889Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-01-07T17:23:27.588945Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-01-07T17:23:27.588983Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-01-07T17:23:27.589036Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-01-07T17:23:27.589078Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-01-07T17:23:27.589113Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-01-07T17:23:27.589148Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-01-07T17:23:27.589201Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-01-07T17:23:27.589244Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-01-07T17:23:27.589284Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-01-07T17:23:27.589324Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-01-07T17:23:27.589364Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-01-07T17:23:27.589412Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-01-07T17:23:27.589452Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-01-07T17:23:27.589500Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-01-07T17:23:27.589554Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-01-07T17:23:27.589603Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-01-07T17:23:27.589642Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-01-07T17:23:27.589676Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-01-07T17:23:27.589715Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-01-07T17:23:27.589753Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... FHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483671 Items# [80000017:3:2:2:0]: 35:1001:1001 -> 32:1001:1009 ConfigTxSeqNo# 541 2026-01-07T17:23:45.058420Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483671 Success# true 2026-01-07T17:23:45.058616Z 17 05h45m00.123040s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-01-07T17:23:45.058695Z 17 05h45m00.123040s :BS_NODE DEBUG: [17] VDiskId# [80000017:3:1:0:0] -> [80000017:4:1:0:0] 2026-01-07T17:23:45.058791Z 35 05h45m00.123040s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.058880Z 20 05h45m00.123040s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2026-01-07T17:23:45.058932Z 20 05h45m00.123040s :BS_NODE DEBUG: [20] VDiskId# [80000017:3:1:1:0] -> [80000017:4:1:1:0] 2026-01-07T17:23:45.059034Z 6 05h45m00.123040s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-01-07T17:23:45.059087Z 6 05h45m00.123040s :BS_NODE DEBUG: [6] VDiskId# [80000017:3:0:0:0] -> [80000017:4:0:0:0] 2026-01-07T17:23:45.059179Z 23 05h45m00.123040s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:45.059223Z 23 05h45m00.123040s :BS_NODE DEBUG: [23] VDiskId# [80000017:3:1:2:0] -> [80000017:4:1:2:0] 2026-01-07T17:23:45.059320Z 8 05h45m00.123040s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-07T17:23:45.059385Z 8 05h45m00.123040s :BS_NODE DEBUG: [8] VDiskId# [80000017:3:0:1:0] -> [80000017:4:0:1:0] 2026-01-07T17:23:45.059470Z 26 05h45m00.123040s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2026-01-07T17:23:45.059525Z 26 05h45m00.123040s :BS_NODE DEBUG: [26] VDiskId# [80000017:3:2:1:0] -> [80000017:4:2:1:0] 2026-01-07T17:23:45.059616Z 11 05h45m00.123040s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2026-01-07T17:23:45.059678Z 11 05h45m00.123040s :BS_NODE DEBUG: [11] VDiskId# [80000017:3:0:2:0] -> [80000017:4:0:2:0] 2026-01-07T17:23:45.059785Z 29 05h45m00.123040s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2026-01-07T17:23:45.059834Z 29 05h45m00.123040s :BS_NODE DEBUG: [29] VDiskId# [80000017:3:2:0:0] -> [80000017:4:2:0:0] 2026-01-07T17:23:45.059926Z 32 05h45m00.123040s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2026-01-07T17:23:45.059968Z 32 05h45m00.123040s :BS_NODE DEBUG: [32] VDiskId# [80000017:4:2:2:0] PDiskId# 1001 VSlotId# 1009 created 2026-01-07T17:23:45.060060Z 32 05h45m00.123040s :BS_NODE DEBUG: [32] VDiskId# [80000017:4:2:2:0] status changed to INIT_PENDING 2026-01-07T17:23:45.060445Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483655 2026-01-07T17:23:45.061351Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:45.061424Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:3:0:0:0] DiskIsOk# true 2026-01-07T17:23:45.061471Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:45.061504Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:3:0:1:0] DiskIsOk# true 2026-01-07T17:23:45.061547Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:45.061585Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:3:0:2:0] DiskIsOk# true 2026-01-07T17:23:45.061620Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:45.061650Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:3:1:0:0] DiskIsOk# true 2026-01-07T17:23:45.061681Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:45.061711Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:3:1:1:0] DiskIsOk# true 2026-01-07T17:23:45.061768Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:45.061816Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:3:1:2:0] DiskIsOk# true 2026-01-07T17:23:45.061855Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:45.061884Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:3:2:0:0] DiskIsOk# true 2026-01-07T17:23:45.061924Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2026-01-07T17:23:45.061961Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:3:2:1:0] DiskIsOk# true 2026-01-07T17:23:45.068755Z 1 05h45m00.123552s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483655 Items# [80000007:3:2:2:0]: 35:1001:1000 -> 26:1001:1009 ConfigTxSeqNo# 542 2026-01-07T17:23:45.068815Z 1 05h45m00.123552s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483655 Success# true 2026-01-07T17:23:45.069038Z 17 05h45m00.123552s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-01-07T17:23:45.069120Z 17 05h45m00.123552s :BS_NODE DEBUG: [17] VDiskId# [80000007:3:1:0:0] -> [80000007:4:1:0:0] 2026-01-07T17:23:45.069239Z 35 05h45m00.123552s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.069344Z 20 05h45m00.123552s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2026-01-07T17:23:45.069393Z 20 05h45m00.123552s :BS_NODE DEBUG: [20] VDiskId# [80000007:3:1:1:0] -> [80000007:4:1:1:0] 2026-01-07T17:23:45.069490Z 3 05h45m00.123552s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-01-07T17:23:45.069541Z 3 05h45m00.123552s :BS_NODE DEBUG: [3] VDiskId# [80000007:3:0:0:0] -> [80000007:4:0:0:0] 2026-01-07T17:23:45.069634Z 23 05h45m00.123552s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-07T17:23:45.069682Z 23 05h45m00.123552s :BS_NODE DEBUG: [23] VDiskId# [80000007:3:1:2:0] -> [80000007:4:1:2:0] 2026-01-07T17:23:45.069778Z 8 05h45m00.123552s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-07T17:23:45.069830Z 8 05h45m00.123552s :BS_NODE DEBUG: [8] VDiskId# [80000007:3:0:1:0] -> [80000007:4:0:1:0] 2026-01-07T17:23:45.069925Z 26 05h45m00.123552s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2026-01-07T17:23:45.069970Z 26 05h45m00.123552s :BS_NODE DEBUG: [26] VDiskId# [80000007:4:2:2:0] PDiskId# 1001 VSlotId# 1009 created 2026-01-07T17:23:45.070055Z 26 05h45m00.123552s :BS_NODE DEBUG: [26] VDiskId# [80000007:4:2:2:0] status changed to INIT_PENDING 2026-01-07T17:23:45.070180Z 11 05h45m00.123552s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2026-01-07T17:23:45.070241Z 11 05h45m00.123552s :BS_NODE DEBUG: [11] VDiskId# [80000007:3:0:2:0] -> [80000007:4:0:2:0] 2026-01-07T17:23:45.070348Z 29 05h45m00.123552s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2026-01-07T17:23:45.070398Z 29 05h45m00.123552s :BS_NODE DEBUG: [29] VDiskId# [80000007:3:2:0:0] -> [80000007:4:2:0:0] 2026-01-07T17:23:45.070500Z 32 05h45m00.123552s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2026-01-07T17:23:45.070551Z 32 05h45m00.123552s :BS_NODE DEBUG: [32] VDiskId# [80000007:3:2:1:0] -> [80000007:4:2:1:0] 2026-01-07T17:23:45.071850Z 36 05h45m01.715504s :BS_NODE DEBUG: [36] VDiskId# [80000047:4:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.072334Z 26 05h45m02.144992s :BS_NODE DEBUG: [26] VDiskId# [80000057:5:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.072913Z 32 05h45m02.619040s :BS_NODE DEBUG: [32] VDiskId# [80000017:4:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.073387Z 32 05h45m02.719016s :BS_NODE DEBUG: [32] VDiskId# [80000037:4:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.073905Z 25 05h45m03.360480s :BS_NODE DEBUG: [25] VDiskId# [80000067:4:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.074271Z 27 05h45m03.681528s :BS_NODE DEBUG: [27] VDiskId# [80000027:4:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.075955Z 26 05h45m05.294552s :BS_NODE DEBUG: [26] VDiskId# [80000007:4:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.076550Z 35 05h45m06.024456s :BS_NODE DEBUG: [35] VDiskId# [8000001b:3:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.077498Z 36 05h45m06.097968s :BS_NODE DEBUG: [36] VDiskId# [80000077:5:2:2:0] status changed to REPLICATING 2026-01-07T17:23:45.078008Z 26 05h45m08.751992s :BS_NODE DEBUG: [26] VDiskId# [80000057:5:2:2:0] status changed to READY 2026-01-07T17:23:45.079301Z 35 05h45m08.752504s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.079381Z 35 05h45m08.752504s :BS_NODE DEBUG: [35] VDiskId# [80000057:4:2:2:0] destroyed 2026-01-07T17:23:45.080597Z 36 05h45m15.980968s :BS_NODE DEBUG: [36] VDiskId# [80000077:5:2:2:0] status changed to READY 2026-01-07T17:23:45.081706Z 35 05h45m15.981480s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.081772Z 35 05h45m15.981480s :BS_NODE DEBUG: [35] VDiskId# [80000077:4:2:2:0] destroyed 2026-01-07T17:23:45.081972Z 32 05h45m18.547016s :BS_NODE DEBUG: [32] VDiskId# [80000037:4:2:2:0] status changed to READY 2026-01-07T17:23:45.083049Z 35 05h45m18.547528s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.083109Z 35 05h45m18.547528s :BS_NODE DEBUG: [35] VDiskId# [80000037:3:2:2:0] destroyed 2026-01-07T17:23:45.083646Z 27 05h45m21.350528s :BS_NODE DEBUG: [27] VDiskId# [80000027:4:2:2:0] status changed to READY 2026-01-07T17:23:45.084590Z 35 05h45m21.351040s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.084646Z 35 05h45m21.351040s :BS_NODE DEBUG: [35] VDiskId# [80000027:3:2:2:0] destroyed 2026-01-07T17:23:45.085437Z 35 05h45m27.969456s :BS_NODE DEBUG: [35] VDiskId# [8000001b:3:2:2:0] status changed to READY 2026-01-07T17:23:45.087015Z 35 05h45m27.969968s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.087069Z 35 05h45m27.969968s :BS_NODE DEBUG: [35] VDiskId# [8000001b:2:2:2:0] destroyed 2026-01-07T17:23:45.088014Z 32 05h45m30.617040s :BS_NODE DEBUG: [32] VDiskId# [80000017:4:2:2:0] status changed to READY 2026-01-07T17:23:45.089153Z 35 05h45m30.617552s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.089210Z 35 05h45m30.617552s :BS_NODE DEBUG: [35] VDiskId# [80000017:3:2:2:0] destroyed 2026-01-07T17:23:45.089696Z 36 05h45m31.631504s :BS_NODE DEBUG: [36] VDiskId# [80000047:4:2:2:0] status changed to READY 2026-01-07T17:23:45.090681Z 35 05h45m31.632016s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.090734Z 35 05h45m31.632016s :BS_NODE DEBUG: [35] VDiskId# [80000047:3:2:2:0] destroyed 2026-01-07T17:23:45.091198Z 25 05h45m33.695480s :BS_NODE DEBUG: [25] VDiskId# [80000067:4:2:2:0] status changed to READY 2026-01-07T17:23:45.092067Z 35 05h45m33.695992s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.092147Z 35 05h45m33.695992s :BS_NODE DEBUG: [35] VDiskId# [80000067:3:2:2:0] destroyed 2026-01-07T17:23:45.093109Z 26 05h45m39.335552s :BS_NODE DEBUG: [26] VDiskId# [80000007:4:2:2:0] status changed to READY 2026-01-07T17:23:45.094127Z 35 05h45m39.336064s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-07T17:23:45.094176Z 35 05h45m39.336064s :BS_NODE DEBUG: [35] VDiskId# [80000007:3:2:2:0] destroyed |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] Test command err: RandomSeed# 3209604290920408973 2026-01-07T17:23:47.610327Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:145:6] SessionId# [0:0:0] Cookie# 7 2026-01-07T17:23:47.610382Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 7 SessionId# [1:145:6] Cookie# 7 CookieInFlight# true SubscriptionExists# true 2026-01-07T17:23:47.610432Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {7.0/6748696606390105601@[0:0:0]} 2026-01-07T17:23:47.610499Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [6:255:20] SessionId# [3:94:5] Cookie# 1580830067998968395 2026-01-07T17:23:47.610529Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [3:94:5] Inserted# false Subscription# {SessionId# [3:94:5] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:47.616040Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 6 Cookie# 1580830067998968395 SessionId# [3:94:5] Binding# {2.0/11924254461575627754@[3:109:2]} Record# {BoundNodes { NodeId { Host: "127.0.0.5" Port: 19001 NodeId: 5 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] Test command err: RandomSeed# 15543597748649273433 2026-01-07T17:23:47.836828Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [4:112:2] SessionId# [0:0:0] Cookie# 6 2026-01-07T17:23:47.836890Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 2 SessionId# [4:112:2] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2026-01-07T17:23:47.836977Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {2.0/16520084041831533167@[0:0:0]} 2026-01-07T17:23:47.837059Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:130:1] SessionId# [0:0:0] Cookie# 8 2026-01-07T17:23:47.837088Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 2 SessionId# [1:130:1] Cookie# 8 CookieInFlight# true SubscriptionExists# true 2026-01-07T17:23:47.837304Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [2:131:1] Cookie# 12105057953159572186 2026-01-07T17:23:47.837357Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-01-07T17:23:47.845021Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 12105057953159572186 SessionId# [2:131:1] Binding# {1.0/12105057953159572186@[2:131:1]} Record# {RootNodeId: 4 } 2026-01-07T17:23:47.845126Z 2 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007828s 2026-01-07T17:23:47.845189Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:235:51] SessionId# [4:137:1] Cookie# 6041751168459708256 2026-01-07T17:23:47.845246Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [4:137:1] Inserted# false Subscription# {SessionId# [4:137:1] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-07T17:23:47.845450Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 6041751168459708256 SessionId# [4:137:1] Binding# {2.0/16520084041831533167@[4:112:2]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] Test command err: RandomSeed# 10006561492918373725 2026-01-07T17:23:48.061952Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [9:276:20] SessionId# [3:103:8] Cookie# 16032714100541946071 2026-01-07T17:23:48.061996Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 9 SessionId# [3:103:8] Inserted# false Subscription# {SessionId# [3:103:8] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.068624Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 9 Cookie# 16032714100541946071 SessionId# [3:103:8] Binding# {2.1/15510862981109243961@[3:109:2]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] Test command err: RandomSeed# 8170485921505207813 2026-01-07T17:23:48.034839Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [1:139:4] Cookie# 625393733710004590 2026-01-07T17:23:48.034884Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [1:139:4] Inserted# false Subscription# {SessionId# [1:139:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-07T17:23:48.039388Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 625393733710004590 SessionId# [1:139:4] Binding# {5.3/625393733710004590@[1:139:4]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.039472Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {5.3/625393733710004590@[1:139:4]} Reason# binding cycle 2026-01-07T17:23:48.039566Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {5.3/625393733710004590@[1:139:4]} 2026-01-07T17:23:48.039648Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 5 Subscription# {SessionId# [1:139:4] SubscriptionCookie# 0} 2026-01-07T17:23:48.039743Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [8:61:5] Cookie# 12164752475678072339 2026-01-07T17:23:48.039881Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [8:61:5] Inserted# false Subscription# {SessionId# [8:61:5] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-07T17:23:48.039925Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 12164752475678072339 SessionId# [8:61:5] Binding# {5.3/12164752475678072339@[8:61:5]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.039990Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [5:248:20] SessionId# [3:91:4] Cookie# 10679485895409583796 2026-01-07T17:23:48.040011Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [3:91:4] Inserted# false Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-07T17:23:48.040277Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 5 Cookie# 10679485895409583796 SessionId# [3:91:4] Binding# Record# {BoundNodes { NodeId { Host: "127.0.0.7" Port: 19001 NodeId: 7 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.8" Port: 19001 NodeId: 8 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 1 StorageConfigGeneration# 0 KnownNode# true 2026-01-07T17:23:48.041253Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-07T17:23:48.041276Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.7:19001/7 2026-01-07T17:23:48.041380Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.1:19001/1 2026-01-07T17:23:48.041396Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.8:19001/8 2026-01-07T17:23:48.041411Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.5:19001/5 2026-01-07T17:23:48.041474Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.3:19001/3 2026-01-07T17:23:48.041670Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-07T17:23:48.041775Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 5 Binding# {5.0/625393733710004591@[0:0:0]} SessionId# [0:0:0] 2026-01-07T17:23:48.041918Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [1:139:4] Cookie# 625393733710004590 2026-01-07T17:23:48.041940Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [1:139:4] Inserted# false Subscription# {SessionId# [0:0:0] SubscriptionCookie# 5} NextSubscribeCookie# 6 2026-01-07T17:23:48.041970Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 625393733710004590 SessionId# [1:139:4] Binding# {5.0/625393733710004591@[0:0:0]} Record# {RootNodeId: 3 } 2026-01-07T17:23:48.042037Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [8:61:5] Cookie# 12164752475678072339 2026-01-07T17:23:48.042054Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [8:61:5] Inserted# false Subscription# {SessionId# [8:61:5] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-07T17:23:48.042141Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 12164752475678072339 SessionId# [8:61:5] Binding# {5.1/12164752475678072339@[8:61:5]} Record# {RootNodeId: 3 } 2026-01-07T17:23:48.042669Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:139:4] SessionId# [0:0:0] Cookie# 5 2026-01-07T17:23:48.042738Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 5 SessionId# [1:139:4] Cookie# 5 CookieInFlight# true SubscriptionExists# true 2026-01-07T17:23:48.042804Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {5.0/625393733710004591@[0:0:0]} 2026-01-07T17:23:48.042854Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [5:140:1] Cookie# 625393733710004590 2026-01-07T17:23:48.042881Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-07T17:23:48.042914Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 625393733710004590 SessionId# [5:140:1] Binding# {3.3/10679485895409583796@[5:92:3]} 2026-01-07T17:23:48.042936Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2026-01-07T17:23:48.042961Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2026-01-07T17:23:48.042993Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2026-01-07T17:23:48.043013Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2026-01-07T17:23:48.043028Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2026-01-07T17:23:48.043042Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2026-01-07T17:23:48.043055Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2026-01-07T17:23:48.043068Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2026-01-07T17:23:48.043080Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2026-01-07T17:23:48.043112Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2026-01-07T17:23:48.043144Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} 2026-01-07T17:23:48.043180Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 3165999804364634482 2026-01-07T17:23:48.043199Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.043246Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 3165999804364634482 SessionId# [2:131:1] Binding# {1.3/3165999804364634482@[2:131:1]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.048415Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [5:140:1] Cookie# 625393733710004591 2026-01-07T17:23:48.048563Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 625393733710004591 SessionId# [5:140:1] Binding# {3.3/10679485895409583796@[5:92:3]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>ClusterStateGeneration=0 clusterStateGeneration=10 Info->ClusterStateGuid=0 clusterStateGuid=0 2026-01-07T17:23:48.150545Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 10 ClusterStateGuid: 0 2026-01-07T17:23:48.157895Z 1 00h00m22.006726s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.157998Z 1 00h00m22.006726s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.158045Z 1 00h00m22.006726s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.158078Z 1 00h00m22.006726s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.158105Z 1 00h00m22.006726s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.158142Z 1 00h00m22.006726s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.158206Z 1 00h00m22.006726s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.158250Z 1 00h00m22.006726s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.158277Z 1 00h00m22.006726s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.158319Z 1 00h00m22.006726s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.158342Z 1 00h00m22.006726s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.172240Z 1 00h00m48.250167s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.172333Z 1 00h00m48.250167s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.172370Z 1 00h00m48.250167s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.172395Z 1 00h00m48.250167s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.172418Z 1 00h00m48.250167s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.172459Z 1 00h00m48.250167s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.172502Z 1 00h00m48.250167s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.172545Z 1 00h00m48.250167s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.172577Z 1 00h00m48.250167s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.172607Z 1 00h00m48.250167s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.172640Z 1 00h00m48.250167s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] Test command err: RandomSeed# 5317823528092871241 2026-01-07T17:23:48.056044Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [6:143:1] Cookie# 15668210045010503537 2026-01-07T17:23:48.056106Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.056171Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 15668210045010503537 SessionId# [6:143:1] Binding# {3.1/10719756627200517148@[6:95:3]} 2026-01-07T17:23:48.056219Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2026-01-07T17:23:48.056256Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2026-01-07T17:23:48.056303Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2026-01-07T17:23:48.056334Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2026-01-07T17:23:48.056373Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2026-01-07T17:23:48.056401Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2026-01-07T17:23:48.056427Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2026-01-07T17:23:48.056461Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2026-01-07T17:23:48.056500Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2026-01-07T17:23:48.056563Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} 2026-01-07T17:23:48.056651Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [9:152:1] Cookie# 18269659463880911012 2026-01-07T17:23:48.056684Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [9:152:1] Inserted# false Subscription# {SessionId# [9:152:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-07T17:23:48.065510Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 18269659463880911012 SessionId# [9:152:1] Binding# {1.6/18269659463880911012@[9:152:1]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.065586Z 9 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008935s 2026-01-07T17:23:48.065644Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 9179434042125290762 2026-01-07T17:23:48.065695Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-07T17:23:48.065751Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 9179434042125290762 SessionId# [2:131:1] Binding# {1.6/9179434042125290762@[2:131:1]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.065796Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [5:140:1] Cookie# 15736481424425549885 2026-01-07T17:23:48.065828Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-07T17:23:48.065890Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 15736481424425549885 SessionId# [5:140:1] Binding# {1.6/15736481424425549885@[5:140:1]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.066035Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [7:98:3] Cookie# 915537769404805608 2026-01-07T17:23:48.066085Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [7:98:3] Inserted# false Subscription# {SessionId# [7:98:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-07T17:23:48.066143Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 915537769404805608 SessionId# [7:98:3] Binding# {3.1/915537769404805608@[7:98:3]} Record# {RootNodeId: 6 } 2026-01-07T17:23:48.066189Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [4:89:3] Cookie# 5097377388798693391 2026-01-07T17:23:48.066226Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-07T17:23:48.066266Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 5097377388798693391 SessionId# [4:89:3] Binding# {3.1/5097377388798693391@[4:89:3]} Record# {RootNodeId: 6 } 2026-01-07T17:23:48.066304Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [6:95:3] Cookie# 10719756627200517148 2026-01-07T17:23:48.066336Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [6:95:3] Inserted# false Subscription# {SessionId# [6:95:3] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.066405Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 10719756627200517148 SessionId# [6:95:3] Binding# {3.1/10719756627200517148@[6:95:3]} Record# {RootNodeId: 6 } 2026-01-07T17:23:48.066452Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {3.1/10719756627200517148@[6:95:3]} Reason# binding cycle 2026-01-07T17:23:48.066508Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {3.1/10719756627200517148@[6:95:3]} 2026-01-07T17:23:48.066573Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 3 Subscription# {SessionId# [6:95:3] SubscriptionCookie# 0} 2026-01-07T17:23:48.066620Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.066670Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 3 Binding# {3.0/10719756627200517149@[0:0:0]} SessionId# [0:0:0] 2026-01-07T17:23:48.066998Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [6:95:3] SessionId# [0:0:0] Cookie# 6 2026-01-07T17:23:48.067038Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 3 SessionId# [6:95:3] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2026-01-07T17:23:48.067098Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {3.0/10719756627200517149@[0:0:0]} 2026-01-07T17:23:48.067162Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [6:255:20] SessionId# [3:94:5] Cookie# 10719756627200517148 2026-01-07T17:23:48.067196Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [3:94:5] Inserted# false Subscription# {SessionId# [3:94:5] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-07T17:23:48.067405Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 6 Cookie# 10719756627200517148 SessionId# [3:94:5] Binding# {2.6/6516654227776652065@[3:109:2]} Record# {DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.4" Port: 19001 NodeId: 4 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.8" Port: 19001 NodeId: 8 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 6 StorageConfigGeneration# 0 KnownNode# true 2026-01-07T17:23:48.067457Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [3:94:5] Inserted# false Subscription# {SessionId# [3:94:5] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-07T17:23:48.067513Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.7:19001/7 2026-01-07T17:23:48.067562Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.9:19001/9 2026-01-07T17:23:48.067610Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.4:19001/4 2026-01-07T17:23:48.067637Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.1:19001/1 2026-01-07T17:23:48.067664Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.8:19001/8 2026-01-07T17:23:48.067690Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.2:19001/2 2026-01-07T17:23:48.067719Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.5:19001/5 2026-01-07T17:23:48.067745Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.3:19001/3 2026-01-07T17:23:48.067829Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [3:109:2] Cookie# 6516654227776652065 2026-01-07T17:23:48.067861Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-07T17:23:48.067935Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 6516654227776652065 SessionId# [3:109:2] Binding# {2.6/6516654227776652065@[3:109:2]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.067990Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [8:61:5] Cookie# 12374172233055316631 2026-01-07T17:23:48.068022Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [8:61:5] Inserted# false Subscription# {SessionId# [8:61:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-07T17:23:48.068069Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 12374172233055316631 SessionId# [8:61:5] Binding# {5.6/12374172233055316631@[8:61:5]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.06894 ... leLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.144231Z 1 00h00m01.585543s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.144264Z 1 00h00m01.585543s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.144319Z 1 00h00m01.585543s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.144347Z 1 00h00m01.585543s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.145944Z 1 00h00m03.374732s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.146005Z 1 00h00m03.374732s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.146048Z 1 00h00m03.374732s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.146079Z 1 00h00m03.374732s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.146117Z 1 00h00m03.374732s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.146146Z 1 00h00m03.374732s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.146187Z 1 00h00m03.374732s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.146222Z 1 00h00m03.374732s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.146250Z 1 00h00m03.374732s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.146275Z 1 00h00m03.374732s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.146301Z 1 00h00m03.374732s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149328Z 1 00h00m07.346731s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.149407Z 1 00h00m07.346731s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.149467Z 1 00h00m07.346731s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.149502Z 1 00h00m07.346731s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.149527Z 1 00h00m07.346731s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.149551Z 1 00h00m07.346731s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.149597Z 1 00h00m07.346731s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149637Z 1 00h00m07.346731s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149670Z 1 00h00m07.346731s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149702Z 1 00h00m07.346731s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149728Z 1 00h00m07.346731s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.153083Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.153177Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:23:48.153225Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.153256Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:23:48.153285Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-07T17:23:48.153312Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-07T17:23:48.153379Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.153450Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.153479Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.157828Z 1 00h00m15.529048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.157921Z 1 00h00m15.529048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.157963Z 1 00h00m15.529048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.158005Z 1 00h00m15.529048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.158033Z 1 00h00m15.529048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.158059Z 1 00h00m15.529048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.158115Z 1 00h00m15.529048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.158159Z 1 00h00m15.529048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 1 ClusterStateGuid: 2} 2026-01-07T17:23:48.158188Z 1 00h00m15.529048s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-01-07T17:23:48.158283Z 1 00h00m15.529048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2026-01-07T17:23:48.162541Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.162652Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:23:48.162716Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.162746Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:23:48.162775Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-07T17:23:48.162802Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-07T17:23:48.162855Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.162905Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 2 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.162928Z 1 00h00m20.100000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-01-07T17:23:48.163024Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2026-01-07T17:23:48.163137Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:23:48.163177Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-01-07T17:23:48.163245Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-01-07T17:23:48.163360Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaUpdate TabletID: 72057594037932033} 2026-01-07T17:23:48.163393Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-01-07T17:23:48.163441Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-01-07T17:23:48.167661Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: NKikimrStateStorage.TEvCleanup TabletID: 72057594037932033 ProposedLeader { RawX1: 0 RawX2: 0 } ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-01-07T17:23:48.167724Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-01-07T17:23:48.167791Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] Test command err: RandomSeed# 3106821187634838621 2026-01-07T17:23:48.040318Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [6:255:20] SessionId# [8:44:6] Cookie# 10249109731048581145 2026-01-07T17:23:48.040425Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [8:44:6] Inserted# false Subscription# {SessionId# [8:44:6] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-07T17:23:48.045701Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 6 Cookie# 10249109731048581145 SessionId# [8:44:6] Binding# {6.6/10249109731048581145@[8:44:6]} Record# {RootNodeId: 2 } 2026-01-07T17:23:48.045816Z 8 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.005553s 2026-01-07T17:23:48.045862Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [2:108:2] Cookie# 11151196287996289475 2026-01-07T17:23:48.045906Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.046053Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 3 Cookie# 11151196287996289475 SessionId# [2:108:2] Binding# {1.6/8806333080519377903@[2:131:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] Test command err: RandomSeed# 15496812985591097010 2026-01-07T17:23:48.037083Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 16712719773227337726 2026-01-07T17:23:48.037183Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 9 2026-01-07T17:23:48.051082Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 16712719773227337726 SessionId# [1:130:1] Binding# {2.2/16712719773227337726@[1:130:1]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.051207Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {2.2/16712719773227337726@[1:130:1]} Reason# binding cycle 2026-01-07T17:23:48.051294Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {2.2/16712719773227337726@[1:130:1]} 2026-01-07T17:23:48.051607Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 2 Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} 2026-01-07T17:23:48.051741Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 9 2026-01-07T17:23:48.051910Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 6 Binding# {6.0/16712719773227337727@[0:0:0]} SessionId# [0:0:0] 2026-01-07T17:23:48.051995Z 1 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.014940s 2026-01-07T17:23:48.052085Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [6:118:2] Cookie# 5206374830983766065 2026-01-07T17:23:48.052121Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [6:118:2] Inserted# false Subscription# {SessionId# [6:118:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-07T17:23:48.052212Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 5206374830983766065 SessionId# [6:118:2] Binding# {2.2/5206374830983766065@[6:118:2]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.052258Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [4:241:20] SessionId# [1:136:3] Cookie# 10074155687460166330 2026-01-07T17:23:48.052289Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [1:136:3] Inserted# false Subscription# {SessionId# [1:136:3] SubscriptionCookie# 0} NextSubscribeCookie# 10 2026-01-07T17:23:48.053077Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 4 Cookie# 10074155687460166330 SessionId# [1:136:3] Binding# {6.0/16712719773227337727@[0:0:0]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032> 2026-01-07T17:23:48.056009Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2026-01-07T17:23:48.056045Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2026-01-07T17:23:48.056097Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2026-01-07T17:23:48.056135Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2026-01-07T17:23:48.056167Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2026-01-07T17:23:48.056195Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2026-01-07T17:23:48.056235Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2026-01-07T17:23:48.056264Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2026-01-07T17:23:48.056319Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} 2026-01-07T17:23:48.056396Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.056450Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 1 Binding# {1.0/16187663750402566331@[0:0:0]} SessionId# [0:0:0] 2026-01-07T17:23:48.056496Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 5669574186246101354 2026-01-07T17:23:48.056531Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-07T17:23:48.056633Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 5669574186246101354 SessionId# [7:146:1] Binding# {1.2/5669574186246101354@[7:146:1]} Record# {RootNode ... 8.146006Z 1 00h00m00.797468s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.146031Z 1 00h00m00.797468s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.146057Z 1 00h00m00.797468s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.146088Z 1 00h00m00.797468s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.146120Z 1 00h00m00.797468s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.146149Z 1 00h00m00.797468s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.146218Z 1 00h00m00.797468s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.146259Z 1 00h00m00.797468s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.147212Z 1 00h00m01.635087s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.147276Z 1 00h00m01.635087s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.147324Z 1 00h00m01.635087s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.147354Z 1 00h00m01.635087s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.147380Z 1 00h00m01.635087s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.147419Z 1 00h00m01.635087s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.147454Z 1 00h00m01.635087s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.147484Z 1 00h00m01.635087s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.147511Z 1 00h00m01.635087s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.147539Z 1 00h00m01.635087s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.147569Z 1 00h00m01.635087s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149244Z 1 00h00m03.377334s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.149317Z 1 00h00m03.377334s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.149348Z 1 00h00m03.377334s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.149394Z 1 00h00m03.377334s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.149435Z 1 00h00m03.377334s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.149481Z 1 00h00m03.377334s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.149512Z 1 00h00m03.377334s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149544Z 1 00h00m03.377334s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149572Z 1 00h00m03.377334s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149616Z 1 00h00m03.377334s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.149648Z 1 00h00m03.377334s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.152794Z 1 00h00m07.175432s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.152880Z 1 00h00m07.175432s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.152917Z 1 00h00m07.175432s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.152966Z 1 00h00m07.175432s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.152993Z 1 00h00m07.175432s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.153029Z 1 00h00m07.175432s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.153085Z 1 00h00m07.175432s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.153127Z 1 00h00m07.175432s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.153161Z 1 00h00m07.175432s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.153215Z 1 00h00m07.175432s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.153254Z 1 00h00m07.175432s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.156518Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 2026-01-07T17:23:48.156618Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 1 2026-01-07T17:23:48.161083Z 1 00h00m15.455285s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.161190Z 1 00h00m15.455285s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.161242Z 1 00h00m15.455285s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.161270Z 1 00h00m15.455285s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.161301Z 1 00h00m15.455285s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.161332Z 1 00h00m15.455285s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.161371Z 1 00h00m15.455285s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.161423Z 1 00h00m15.455285s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.161451Z 1 00h00m15.455285s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.161490Z 1 00h00m15.455285s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.161530Z 1 00h00m15.455285s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.165884Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 5 ClusterStateGuid: 6 2026-01-07T17:23:48.174141Z 1 00h00m30.177526s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 5 ClusterStateGuid: 6 2026-01-07T17:23:48.177093Z 1 00h00m34.002155s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.177213Z 1 00h00m34.002155s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.177260Z 1 00h00m34.002155s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.177291Z 1 00h00m34.002155s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.177318Z 1 00h00m34.002155s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.177346Z 1 00h00m34.002155s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.177454Z 1 00h00m34.002155s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.177506Z 1 00h00m34.002155s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.177536Z 1 00h00m34.002155s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.177567Z 1 00h00m34.002155s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.177594Z 1 00h00m34.002155s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] Test command err: RandomSeed# 3964342574453835436 2026-01-07T17:23:48.045020Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [3:249:20] SessionId# [7:98:3] Cookie# 10854828482992990372 2026-01-07T17:23:48.045073Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [7:98:3] Inserted# false Subscription# {SessionId# [7:98:3] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.049658Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 3 Cookie# 10854828482992990372 SessionId# [7:98:3] Binding# {6.3/1866277333031258038@[7:41:6]} Record# {DeletedBoundNodeIds { Host: "127.0.0.6" Port: 19001 NodeId: 6 } DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.8" Port: 19001 NodeId: 8 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } } RootNodeId# 3 StorageConfigGeneration# 0 KnownNode# true 2026-01-07T17:23:48.049710Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [7:98:3] Inserted# false Subscription# {SessionId# [7:98:3] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.049741Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.6:19001/6 2026-01-07T17:23:48.049772Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.7:19001/7 2026-01-07T17:23:48.049796Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.9:19001/9 2026-01-07T17:23:48.049820Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.1:19001/1 2026-01-07T17:23:48.049835Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.8:19001/8 2026-01-07T17:23:48.049871Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.2:19001/2 2026-01-07T17:23:48.049898Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.5:19001/5 2026-01-07T17:23:48.049961Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [5:115:2] Cookie# 9974982183338420585 2026-01-07T17:23:48.049995Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [5:115:2] Inserted# false Subscription# {SessionId# [5:115:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-07T17:23:48.050042Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 9974982183338420585 SessionId# [5:115:2] Binding# {2.3/9974982183338420585@[5:115:2]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.050081Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:242:20] SessionId# [1:130:1] Cookie# 9855704086768918940 2026-01-07T17:23:48.050100Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-07T17:23:48.050194Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 9855704086768918940 SessionId# [1:130:1] Binding# {3.0/18404508980329589656@[1:133:2]} Record# {BoundNodes { NodeId { Host: "127.0.0.4" Port: 19001 NodeId: 4 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] Test command err: RandomSeed# 17343310766072774256 2026-01-07T17:23:48.055884Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.055947Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.055973Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.056001Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.056026Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.056050Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.056090Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.056117Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.056142Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.056166Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.059063Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639257 Sender# [1:340:56] SessionId# [0:0:0] Cookie# 0 2026-01-07T17:23:48.059339Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.059415Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:235:51] SessionId# [2:131:1] Cookie# 14404738577909492489 2026-01-07T17:23:48.059453Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-07T17:23:48.067869Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 14404738577909492489 SessionId# [2:131:1] Binding# Record# {CacheUpdate { } } RootNodeId# 2 StorageConfigGeneration# 0 KnownNode# true 2026-01-07T17:23:48.067935Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-07T17:23:48.068017Z 2 00h00m00.002048s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008602s 2026-01-07T17:23:48.068068Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [9:152:1] Cookie# 13874279368127012981 2026-01-07T17:23:48.068109Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [9:152:1] Inserted# false Subscription# {SessionId# [9:152:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-07T17:23:48.068203Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 13874279368127012981 SessionId# [9:152:1] Binding# {1.2/13874279368127012981@[9:152:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2026-01-07T17:23:48.068253Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [6:143:1] Cookie# 3009700403889814385 2026-01-07T17:23:48.068284Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-07T17:23:48.068329Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 3009700403889814385 SessionId# [6:143:1] Binding# {1.2/3009700403889814385@[6:143:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2026-01-07T17:23:48.068379Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.068457Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.068584Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.068620Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.068643Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.068665Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.068702Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.068729Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.068754Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.068779Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.068806Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.068830Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.068895Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.068968Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069011Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069051Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069084Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069135Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069157Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069179Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069200Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069230Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.069282Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [1:130:1] Cookie# 14404738577909492489 2026-01-07T17:23:48.069319Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.069379Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 14404738577909492489 SessionId# [1:130:1] Binding# {2.2/14404738577909492489@[1:130:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2026-01-07T17:23:48.069432Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [3:109:2] Cookie# 5778429684395833697 2026-01-07T17:23:48.069459Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-07T17:23:48.069503Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 5778429684395833697 SessionId# [3:109:2] Binding# {2.2/5778429684395833697@[3:109:2]} Record# {RootNodeId: 2 CacheUpdate { } } 2026-01-07T17:23:48.069551Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [4:112:2] Cookie# 16820742617607835335 2026-01-07T17:23:48.069578Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-07T17:23:48.069621Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 16820742617607835335 SessionId# [4:112:2] Binding# {2.2/16820742617607835335@[4:112:2]} Record# {RootNodeId: 2 CacheUpdate { } } 2026-01-07T17:23:48.069676Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [5:115:2] Cookie# 12306075919115652568 2026-01-07T17:23:48.069710Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [5:115:2] Inserted# false Subscription# {SessionId# [5:115:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-07T17:23:48.069760Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 12306075919115652568 SessionId# [5:115:2] Binding# {2.2/12306075919115652568@[5:115:2]} Record# {RootNodeId: 2 CacheUpdate { } } 2026-01-07T17:23:48.069850Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2026-01-07T17:23:48.069956Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}, {[1:3298559226995:0] : 157}}}} 2026-01-07T17:23:48.070104Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyReq ... E DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.138725Z 1 00h00m08.393466s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.138754Z 1 00h00m08.393466s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.138839Z 1 00h00m08.393466s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.138868Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139054Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139079Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139101Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139181Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139274Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139296Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139329Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139352Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139383Z 1 00h00m08.393466s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.139454Z 1 00h00m08.393466s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2026-01-07T17:23:48.139508Z 1 00h00m08.393466s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}, {[1:3298559226995:0] : 157}}}} 2026-01-07T17:23:48.139705Z 1 00h00m08.393466s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:9895628993651:0] : 163}, {[1:7696605738099:0] : 161}, {[1:8796117365875:0] : 162}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2026-01-07T17:23:48.139903Z 1 00h00m08.393466s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037936131 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2026-01-07T17:23:48.143107Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-01-07T17:23:48.143270Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-01-07T17:23:48.143385Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-01-07T17:23:48.143453Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:23:48.143523Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.143549Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:23:48.143576Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-07T17:23:48.143627Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-07T17:23:48.143671Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:23:48.143716Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.143738Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:23:48.143776Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-07T17:23:48.143814Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-07T17:23:48.143892Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.143958Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.143988Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144042Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144091Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144136Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144165Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144210Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144241Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144286Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144368Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2026-01-07T17:23:48.144425Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2026-01-07T17:23:48.144490Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2026-01-07T17:23:48.144564Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:7696605738099:0] : 161}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2026-01-07T17:23:48.144626Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 8 Signature: {{[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2026-01-07T17:23:48.144697Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] Test command err: RandomSeed# 828660728480735258 2026-01-07T17:23:48.048409Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [1:139:4] Cookie# 4351867188870959373 2026-01-07T17:23:48.048495Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [1:139:4] Inserted# false Subscription# {SessionId# [1:139:4] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-01-07T17:23:48.057149Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 4351867188870959373 SessionId# [1:139:4] Binding# {5.0/4351867188870959373@[1:139:4]} Record# {Rejected: true } 2026-01-07T17:23:48.057209Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {5.0/4351867188870959373@[1:139:4]} Reason# binding rejected by peer 2026-01-07T17:23:48.057251Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {5.0/4351867188870959373@[1:139:4]} 2026-01-07T17:23:48.057306Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 5 Subscription# {SessionId# [1:139:4] SubscriptionCookie# 0} 2026-01-07T17:23:48.057344Z 1 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008996s 2026-01-07T17:23:48.057398Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [8:269:20] SessionId# [3:100:7] Cookie# 3786238103772950632 2026-01-07T17:23:48.057491Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 8 Cookie# 3786238103772950632 SessionId# [3:100:7] Binding# {2.1/12435610204758347053@[3:109:2]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.8" Port: 19001 NodeId: 8 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.6" Port: 19001 NodeId: 6 } DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } } RootNodeId# 1 StorageConfigGeneration# 0 KnownNode# true 2026-01-07T17:23:48.057928Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-01-07T17:23:48.057951Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.6:19001/6 2026-01-07T17:23:48.057988Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.7:19001/7 2026-01-07T17:23:48.058006Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.9:19001/9 2026-01-07T17:23:48.058022Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.1:19001/1 2026-01-07T17:23:48.058060Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [5:71:4] Cookie# 7844622119446214199 2026-01-07T17:23:48.058079Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [5:71:4] Inserted# false Subscription# {SessionId# [5:71:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-07T17:23:48.058113Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 7844622119446214199 SessionId# [5:71:4] Binding# {4.8/7844622119446214199@[5:71:4]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.058147Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 3086127525401418716 2026-01-07T17:23:48.058168Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-01-07T17:23:48.058208Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 3086127525401418716 SessionId# [1:130:1] Binding# Record# {DeletedBoundNodeIds { Host: "127.0.0.8" Port: 19001 NodeId: 8 } } RootNodeId# 1 StorageConfigGeneration# 0 KnownNode# true 2026-01-07T17:23:48.058224Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-01-07T17:23:48.058240Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.8:19001/8 2026-01-07T17:23:48.058312Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 8 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-01-07T17:23:48.058355Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 8 Binding# {8.0/4351867188870959374@[0:0:0]} SessionId# [0:0:0] 2026-01-07T17:23:48.058500Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [3:100:7] SessionId# [0:0:0] Cookie# 2 2026-01-07T17:23:48.058534Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 8 SessionId# [3:100:7] Cookie# 2 CookieInFlight# true SubscriptionExists# true 2026-01-07T17:23:48.058636Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:148:7] SessionId# [0:0:0] Cookie# 8 2026-01-07T17:23:48.058653Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 8 SessionId# [1:148:7] Cookie# 8 CookieInFlight# true SubscriptionExists# true 2026-01-07T17:23:48.058685Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {8.0/4351867188870959374@[0:0:0]} 2026-01-07T17:23:48.058739Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [8:101:3] Cookie# 3786238103772950632 2026-01-07T17:23:48.058762Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [8:101:3] Inserted# false Subscription# {SessionId# [8:101:3] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-07T17:23:48.058796Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 3786238103772950632 SessionId# [8:101:3] Binding# {3.0/3786238103772950632@[8:101:3]} Record# {RootNodeId: 1 } 2026-01-07T17:23:48.058822Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [2:108:2] Cookie# 12435610204758347053 2026-01-07T17:23:48.058840Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-07T17:23:48.058897Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 3 Cookie# 12435610204758347053 SessionId# [2:108:2] Binding# {1.1/3086127525401418716@[2:131:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.8" Port: 19001 NodeId: 8 } Meta { Fingerprint: "\3403\207\365\032>ClusterStateGeneration=0 msgGeneration=0 Info->ClusterStateGuid=2 msgGuid=0 2026-01-07T17:23:48.128420Z 1 00h00m30.180205s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:23:48.128437Z 1 00h00m30.180205s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-07T17:23:48.128453Z 1 00h00m30.180205s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-07T17:23:48.128476Z 1 00h00m30.180205s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.128504Z 1 00h00m30.180205s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 0 2026-01-07T17:23:48.128533Z 1 00h00m30.180205s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 2 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.128565Z 1 00h00m30.180205s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=0 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-01-07T17:23:48.128612Z 1 00h00m30.180205s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 2 2026-01-07T17:23:48.133042Z 1 00h00m39.050018s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.133104Z 1 00h00m39.050018s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:23:48.133140Z 1 00h00m39.050018s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:23:48.133158Z 1 00h00m39.050018s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:23:48.133183Z 1 00h00m39.050018s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-07T17:23:48.133222Z 1 00h00m39.050018s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-07T17:23:48.133244Z 1 00h00m39.050018s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.133267Z 1 00h00m39.050018s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.133282Z 1 00h00m39.050018s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.133297Z 1 00h00m39.050018s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.133320Z 1 00h00m39.050018s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:23:48.135021Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.135114Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:23:48.135221Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.135243Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:23:48.135320Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-07T17:23:48.135381Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-07T17:23:48.135409Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.135466Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.135516Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144633Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:23:48.144713Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:23:48.144749Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:23:48.144802Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:23:48.144835Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-07T17:23:48.144875Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-07T17:23:48.144951Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.144995Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-07T17:23:48.145016Z 1 00h00m50.300000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=0 2026-01-07T17:23:48.145084Z 1 00h00m50.300000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> BSCStopPDisk::PDiskStop |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 11596658783351934075 2026-01-07T17:23:48.275506Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275601Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275633Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275658Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275692Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275717Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275740Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276532Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276614Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276654Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276706Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276751Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276791Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276834Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276899Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276980Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.277016Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.277091Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.277126Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.277151Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.277171Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.278499Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.278557Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.278591Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.278636Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.278666Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.278697Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.278726Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 16452955699059813036 2026-01-07T17:23:48.274981Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275060Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275087Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275120Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275152Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275178Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275199Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275220Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.275983Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276049Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276081Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276110Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276136Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276163Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276200Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276234Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.276285Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276347Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276376Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276412Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276431Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276449Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276467Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.276488Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-07T17:23:48.277727Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.277775Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.277804Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.277830Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.277855Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.277890Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.277923Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.277961Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-07T17:23:48.411332Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:1:0]} 2026-01-07T17:23:48.411407Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:2:0]} 2026-01-07T17:23:48.411452Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:3:0]} 2026-01-07T17:23:48.411489Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:4:0]} 2026-01-07T17:23:48.411515Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:5:0]} 2026-01-07T17:23:48.411541Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:6:0]} 2026-01-07T17:23:48.411567Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:7:0]} |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |81.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} >> BSCStopPDisk::PDiskStop [GOOD] |81.4%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 6754985559861618513 2026-01-07T17:23:38.601409Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.102673s 2026-01-07T17:23:38.602040Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.103663s |81.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] Test command err: RandomSeed# 16171823727607611215 |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |81.4%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> CheckIntegrityBlock42::PlacementOk >> CheckIntegrityBlock42::PlacementBlobIsLost >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts |81.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> CheckIntegrityMirror3of4::PlacementOk >> CheckIntegrityMirror3dc::PlacementOk >> CheckIntegrityMirror3dc::PlacementBlobIsLost >> CheckIntegrityBlock42::DataOk >> CheckIntegrityBlock42::PlacementWrongDisks >> CheckIntegrityMirror3dc::PlacementOkWithErrors |81.4%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> CheckIntegrityBlock42::PlacementOkWithErrors >> BlobDepot::LoadPutAndRead [GOOD] >> BlobDepot::DecommitPutAndRead >> CheckIntegrityBlock42::PlacementBlobIsLost [GOOD] >> CheckIntegrityBlock42::PlacementOk [GOOD] >> CheckIntegrityBlock42::PlacementAllOnHandoff >> CheckIntegrityBlock42::PlacementOkHandoff >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken >> CheckIntegrityMirror3of4::PlacementOk [GOOD] >> CheckIntegrityMirror3dc::PlacementOk [GOOD] >> CheckIntegrityMirror3dc::PlacementOkHandoff >> CheckIntegrityMirror3of4::PlacementMissingParts >> CheckIntegrityBlock42::PlacementWrongDisks [GOOD] >> CheckIntegrityMirror3dc::DataErrorOneCopy >> CheckIntegrityBlock42::DataOk [GOOD] >> CheckIntegrityBlock42::DataOkAdditionalEqualParts >> CheckIntegrityMirror3dc::PlacementOkWithErrors [GOOD] >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks >> CheckIntegrityMirror3dc::PlacementBlobIsLost [GOOD] >> CheckIntegrityMirror3dc::PlacementDisintegrated >> CheckIntegrityBlock42::PlacementOkWithErrors [GOOD] >> CheckIntegrityBlock42::PlacementWithErrorsOnBlobDisks >> CheckIntegrityBlock42::PlacementAllOnHandoff [GOOD] >> CheckIntegrityBlock42::PlacementDisintegrated >> CheckIntegrityBlock42::PlacementOkHandoff [GOOD] >> CheckIntegrityBlock42::PlacementMissingParts >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken >> CheckIntegrityBlock42::PlacementWithErrorsOnBlobDisks [GOOD] >> CheckIntegrityBlock42::PlacementStatusUnknown >> CheckIntegrityMirror3dc::DataErrorOneCopy [GOOD] >> CheckIntegrityMirror3dc::DataErrorManyCopies >> CheckIntegrityBlock42::DataOkAdditionalEqualParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken >> CheckIntegrityMirror3of4::PlacementMissingParts [GOOD] >> CheckIntegrityMirror3of4::PlacementDisintegrated >> CheckIntegrityMirror3dc::PlacementOkHandoff [GOOD] >> CheckIntegrityMirror3dc::PlacementMissingParts >> CheckIntegrityMirror3dc::PlacementDisintegrated [GOOD] >> CheckIntegrityMirror3dc::DataOk >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks [GOOD] >> CheckIntegrityMirror3of4::PlacementBlobIsLost >> CheckIntegrityBlock42::PlacementDisintegrated [GOOD] >> CheckIntegrityBlock42::DataStatusUnknown >> CheckIntegrityBlock42::PlacementMissingParts [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken >> CheckIntegrityBlock42::PlacementStatusUnknown [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken [GOOD] >> CheckIntegrityBlock42::DataOkErasureFiveParts >> CheckIntegrityMirror3of4::PlacementDisintegrated [GOOD] >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::PlacementMissingParts [GOOD] Test command err: RandomSeed# 4055724542496937723 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] >> CheckIntegrityMirror3dc::DataOk [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::PlacementStatusUnknown [GOOD] Test command err: RandomSeed# 6799573588438022990 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] Test command err: RandomSeed# 3375835191192025491 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] Test command err: RandomSeed# 17229013112998983310 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 11092262644255914309 2026-01-07T17:23:48.872760Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:48.874072Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11345744877683350551] 2026-01-07T17:23:48.893015Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementDisintegrated [GOOD] Test command err: RandomSeed# 17632630012226367069 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** Group is disintegrated or has network problems ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] Test command err: RandomSeed# 13432024582229458850 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: part 2: part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataOk [GOOD] Test command err: RandomSeed# 17554644046508288978 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] Test command err: RandomSeed# 15237649307190384524 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 ], ver1 disks [ 2 ] ERROR: There are unequal parts *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ], ver1 disks [ 3 4 5 ] ERROR: There are unequal parts |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] Test command err: RandomSeed# 568709945089198959 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 7 ], ver2 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 0 ] part 2: ver0 disks [ 6 ], ver1 disks [ 1 ] part 3: ver0 disks [ 6 ], ver1 disks [ 2 ] part 4: ver0 disks [ 3 ], ver1 disks [ 6 ] part 5: ver0 disks [ 4 ], ver1 disks [ 6 ] part 6: ver0 disks [ 5 ], ver1 disks [ 6 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 11262797692095106012 2026-01-07T17:23:47.217949Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.130481s 2026-01-07T17:23:47.218313Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.131235s |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BlobDepot::DecommitPutAndRead [GOOD] >> BlobDepot::DecommitVerifiedRandom ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 8990820239821604087 |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] Test command err: RandomSeed# 308509110268941047 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 0 ] part 2: ver0 disks [ 7 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |81.4%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> BlobDepot::DecommitVerifiedRandom [GOOD] >> BlobDepot::CheckIntegrity >> BlobDepot::CheckIntegrity [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest >> BlobDepot::CheckIntegrity [GOOD] Test command err: Mersenne random seed 1433268113 RandomSeed# 1863486933695335507 Mersenne random seed 3785176515 Mersenne random seed 1569637826 Mersenne random seed 2165272753 Mersenne random seed 973484565 2026-01-07T17:23:34.430452Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.430595Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.430645Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.430692Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.430743Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.430791Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.430857Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.430904Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.431182Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [2a6e4c35f37bdafc] Result# TEvPutResult {Id# [15:1:1:0:1:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2026-01-07T17:23:34.432282Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.432427Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.432476Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.432524Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.432573Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.432629Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.432675Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.432723Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448005Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448230Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448287Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448353Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:2] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448404Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448455Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448508Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448557Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:23:34.448770Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [8b745c3af26e5828] Result# TEvPutResult {Id# [16:2:2:0:2:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Mersenne random seed 4173875366 Read over the barrier, blob id# [15:1:1:0:1:100:0] Read over the barrier, blob id# [15:1:2:0:1:100:0] 2026-01-07T17:23:35.931336Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-07T17:23:35.932687Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-07T17:23:35.932760Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-07T17:23:35.932823Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-07T17:23:35.933074Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-07T17:23:35.933309Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-07T17:23:35.933375Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-07T17:23:35.933623Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 Put over the barrier, blob id# [15:1:1:0:99:100:0] Put over the barrier, blob id# [15:1:3:0:99:100:0] 2026-01-07T17:23:36.047906Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-07T17:23:36.048704Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-07T17:23:36.048781Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-07T17:23:36.049187Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-07T17:23:36.049422Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-07T17:23:36.049707Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-07T17:23:36.049777Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-07T17:23:36.050024Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 Read over the barrier, blob id# [15:1:5:0:1:100:0] Read over the barrier, blob id# [15:1:6:0:1:100:0] Read over the barrier, blob id# [15:1:19:0:1:100:0] Read over the barrier, blob id# [15:2:1:0:1:100:0] Read over the barrier, blob id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:1:17:0:1:100:0] TEvRange returned collected blob with id# [15:1:19:0:1:100:0] TEvRange returned collected blob with id# [15:2:1:0:1:100:0] TEvRange returned collected blob with id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:2:3:0:1:100:0] TEvRange returned collected blob with id# [15:2:4:0:1:100:0] TEvRange returned collected blob with id# [15:2:5:0:1:100:0] TEvRange returned collected blob with id# [15:2:6:0:1:100:0] Read over the barrier, blob id# [100:1:3:0:1:100:0] Read over the barrier, blob id# [100:1:5:0:1:100:0] Read over the barrier, blob id# [100:1:6:0:1:100:0] Read over the barrier, blob id# [100:2:1:0:1:100:0] Read over the barrier, blob id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:3:0:1:100:0] TEvRange returned collected blob with id# [100:2:4:0:1:100:0] TEvRange returned collected blob with id# [100:2:5:0:1:100:0] TEvRange returned collected blob with id# [100:2:6:0:1:100:0] Mersenne random seed 797291121 Read over the barrier, blob id# [101:2:1:2:12302034:150:0] Read over the barrier, blob id# [101:2:1:2:12302034:150:0] Read over the barrier, blob id# [101:2:1:2:12302034:150:0] Read over the barrier, blob id# [101:2:1:2:12302034:150:0] Read over the barrier, blob id# [101:2:1:2:123 ... eGCCmd: decreasing barrier: existing key# [15 1 12 0 hard] barrier# 1:0 new key# [15 1 16 2 hard] barrier# 0:2 Read over the barrier, blob id# [17:2:1:1:9310987:895:0] Read over the barrier, blob id# [17:3:3:1:4493983:283:0] Read over the barrier, blob id# [17:3:6:1:2235932:967:0] Read over the barrier, blob id# [17:2:1:2:14067723:964:0] Read over the barrier, blob id# [17:3:6:1:1141396:985:0] 2026-01-07T17:23:56.401922Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 8 0 soft] barrier# 0:1 new key# [17 1 15 3 soft] barrier# 0:0 2026-01-07T17:23:56.402370Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 8 0 soft] barrier# 0:1 new key# [17 1 15 3 soft] barrier# 0:0 2026-01-07T17:23:56.402479Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 8 0 soft] barrier# 0:1 new key# [17 1 15 3 soft] barrier# 0:0 2026-01-07T17:23:56.402589Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 8 0 soft] barrier# 0:1 new key# [17 1 15 3 soft] barrier# 0:0 2026-01-07T17:23:56.402684Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 8 0 soft] barrier# 0:1 new key# [17 1 15 3 soft] barrier# 0:0 2026-01-07T17:23:56.402783Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 8 0 soft] barrier# 0:1 new key# [17 1 15 3 soft] barrier# 0:0 2026-01-07T17:23:56.402880Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 8 0 soft] barrier# 0:1 new key# [17 1 15 3 soft] barrier# 0:0 2026-01-07T17:23:56.402988Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 8 0 soft] barrier# 0:1 new key# [17 1 15 3 soft] barrier# 0:0 Read over the barrier, blob id# [17:3:3:1:4493983:283:0] Read over the barrier, blob id# [17:3:2:0:11887409:566:0] TEvRange returned collected blob with id# [15:2:5:2:3544791:667:0] TEvRange returned collected blob with id# [15:2:5:2:14397774:170:0] Read over the barrier, blob id# [16:2:6:0:14322140:89:0] Read over the barrier, blob id# [16:2:6:0:13506237:322:0] Read over the barrier, blob id# [17:3:1:0:10196989:109:0] Read over the barrier, blob id# [17:3:4:0:6072865:121:0] Read over the barrier, blob id# [17:3:1:0:10196989:109:0] Read over the barrier, blob id# [17:3:6:0:13199642:372:0] Read over the barrier, blob id# [17:3:6:0:13199642:372:0] Read over the barrier, blob id# [17:4:13:0:6754004:477:0] Read over the barrier, blob id# [17:3:4:0:6072865:121:0] Read over the barrier, blob id# [17:3:1:0:10196989:109:0] Read over the barrier, blob id# [17:2:1:1:7602740:996:0] Read over the barrier, blob id# [17:4:13:0:6754004:477:0] TEvRange returned collected blob with id# [16:2:6:0:14322140:89:0] Read over the barrier, blob id# [17:3:4:1:1587:763:0] Read over the barrier, blob id# [17:3:4:0:6072865:121:0] Read over the barrier, blob id# [17:3:1:0:10196989:109:0] Read over the barrier, blob id# [17:3:4:1:1587:763:0] Read over the barrier, blob id# [17:2:1:1:7602740:996:0] Read over the barrier, blob id# [17:2:1:1:9310987:895:0] Read over the barrier, blob id# [17:2:1:1:9310987:895:0] Read over the barrier, blob id# [17:3:3:1:4493983:283:0] Read over the barrier, blob id# [17:2:1:1:9310987:895:0] Read over the barrier, blob id# [17:3:6:0:13199642:372:0] 2026-01-07T17:23:56.815862Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 2 15 0 hard] barrier# 3:0 new key# [17 2 22 1 hard] barrier# 2:3 2026-01-07T17:23:56.816405Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 2 15 0 hard] barrier# 3:0 new key# [17 2 22 1 hard] barrier# 2:3 2026-01-07T17:23:56.816534Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 2 15 0 hard] barrier# 3:0 new key# [17 2 22 1 hard] barrier# 2:3 2026-01-07T17:23:56.816689Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 2 15 0 hard] barrier# 3:0 new key# [17 2 22 1 hard] barrier# 2:3 2026-01-07T17:23:56.816821Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 2 15 0 hard] barrier# 3:0 new key# [17 2 22 1 hard] barrier# 2:3 2026-01-07T17:23:56.816965Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 2 15 0 hard] barrier# 3:0 new key# [17 2 22 1 hard] barrier# 2:3 2026-01-07T17:23:56.817093Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 2 15 0 hard] barrier# 3:0 new key# [17 2 22 1 hard] barrier# 2:3 2026-01-07T17:23:56.817202Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 2 15 0 hard] barrier# 3:0 new key# [17 2 22 1 hard] barrier# 2:3 Read over the barrier, blob id# [15:2:5:2:3544791:667:0] Read over the barrier, blob id# [15:2:5:2:3544791:667:0] Read over the barrier, blob id# [15:2:5:1:4497061:201:0] Read over the barrier, blob id# [15:2:5:1:4497061:201:0] Read over the barrier, blob id# [15:2:5:2:14397774:170:0] Read over the barrier, blob id# [15:2:5:1:7628771:297:0] Read over the barrier, blob id# [17:3:2:0:11887409:566:0] Read over the barrier, blob id# [17:3:2:0:11887409:566:0] Read over the barrier, blob id# [17:4:13:0:6754004:477:0] Read over the barrier, blob id# [17:3:6:1:1141396:985:0] 2026-01-07T17:23:57.054891Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 20 0 soft] barrier# 5:2 new key# [17 0 22 4 soft] barrier# 4:3 2026-01-07T17:23:57.055457Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 20 0 soft] barrier# 5:2 new key# [17 0 22 4 soft] barrier# 4:3 2026-01-07T17:23:57.055771Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 20 0 soft] barrier# 5:2 new key# [17 0 22 4 soft] barrier# 4:3 2026-01-07T17:23:57.056074Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 20 0 soft] barrier# 5:2 new key# [17 0 22 4 soft] barrier# 4:3 2026-01-07T17:23:57.056389Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 20 0 soft] barrier# 5:2 new key# [17 0 22 4 soft] barrier# 4:3 2026-01-07T17:23:57.056496Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 20 0 soft] barrier# 5:2 new key# [17 0 22 4 soft] barrier# 4:3 2026-01-07T17:23:57.056791Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 20 0 soft] barrier# 5:2 new key# [17 0 22 4 soft] barrier# 4:3 2026-01-07T17:23:57.057558Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 0 20 0 soft] barrier# 5:2 new key# [17 0 22 4 soft] barrier# 4:3 Read over the barrier, blob id# [17:4:13:0:6754004:477:0] Read over the barrier, blob id# [17:3:2:0:11887409:566:0] Read over the barrier, blob id# [17:3:6:0:13199642:372:0] Read over the barrier, blob id# [17:3:4:0:6072865:121:0] Read over the barrier, blob id# [17:3:1:0:10196989:109:0] Read over the barrier, blob id# [16:2:6:0:13506237:322:0] Read over the barrier, blob id# [16:2:6:0:7569501:108:0] TEvRange returned collected blob with id# [17:3:1:0:10196989:109:0] TEvRange returned collected blob with id# [17:3:2:0:11887409:566:0] TEvRange returned collected blob with id# [17:3:4:0:6072865:121:0] TEvRange returned collected blob with id# [17:3:6:0:13199642:372:0] TEvRange returned collected blob with id# [17:4:13:0:6754004:477:0] TEvRange returned collected blob with id# [17:5:14:0:11525947:600:0] TEvRange returned collected blob with id# [15:4:12:2:15172091:664:0] Read over the barrier, blob id# [17:4:13:0:6754004:477:0] Read over the barrier, blob id# [17:3:4:0:6072865:121:0] Read over the barrier, blob id# [17:5:14:0:11525947:600:0] Read over the barrier, blob id# [17:3:2:0:11887409:566:0] Read over the barrier, blob id# [17:2:1:2:14067723:964:0] Read over the barrier, blob id# [15:2:5:1:4497061:201:0] Read over the barrier, blob id# [15:4:12:2:15172091:664:0] TEvRange returned collected blob with id# [16:2:6:2:4432212:119:0] TEvRange returned collected blob with id# [16:2:6:2:4432212:119:0] Read over the barrier, blob id# [17:4:13:0:6754004:477:0] Read over the barrier, blob id# [17:5:14:0:11525947:600:0] TEvRange returned collected blob with id# [15:4:12:2:15172091:664:0] Read over the barrier, blob id# [17:2:1:1:7602740:996:0] TEvRange returned collected blob with id# [16:2:6:2:4432212:119:0] Read over the barrier, blob id# [17:3:4:0:6072865:121:0] Read over the barrier, blob id# [17:3:4:0:6072865:121:0] Read over the barrier, blob id# [15:3:9:1:9862892:333:0] Read over the barrier, blob id# [15:2:5:2:3544791:667:0] Read over the barrier, blob id# [15:2:5:2:3544791:667:0] Read over the barrier, blob id# [15:3:9:1:2057419:963:0] Read over the barrier, blob id# [15:2:5:2:3544791:667:0] TEvRange returned collected blob with id# [17:5:14:0:11525947:600:0] Mersenne random seed 159396833 ErrorReason DataInfo Disks: 0: [82000000:1:0:2:0] 1: [82000000:1:0:3:0] 2: [82000000:1:0:4:0] 3: [82000000:1:0:5:0] 4: [82000000:1:0:6:0] 5: [82000000:1:0:7:0] 6: [82000000:1:0:0:0] 7: [82000000:1:0:1:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ErrorReason DataInfo [72075186224037888:1:1:3:1:100:0] Disks: 0: [82000000:1:0:7:0] 1: [82000000:1:0:0:0] 2: [82000000:1:0:1:0] 3: [82000000:1:0:2:0] 4: [82000000:1:0:3:0] 5: [82000000:1:0:4:0] 6: [82000000:1:0:5:0] 7: [82000000:1:0:6:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |81.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TM] {RESULT} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |81.4%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |81.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] Test command err: RandomSeed# 7836403240505315179 readBody# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge discover maxId#[0:0:0:0:0:0:0] readBody# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100501:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100502:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100503:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100508:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100509:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge discover maxId#[100510:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge discover maxId#[100511:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge discover maxId#[100512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge discover maxId#[100513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge discover maxId#[100514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge discover maxId#[100515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge discover maxId#[100516:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge discover maxId#[100517:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge discover maxId#[100518:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge discover maxId#[100519:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge discover maxId#[100520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge discover maxId#[100521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge discover maxId#[100522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge discover maxId#[100523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge discover maxId#[100524:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge discover maxId#[100525:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge discover maxId#[100526:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge discover maxId#[100527:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge discover maxId#[100528:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge discover maxId#[100529:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge discover maxId#[100530:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge discover maxId#[100531:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge discover maxId#[100532:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge discover maxId#[100533:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge discover maxId#[100534:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge discover maxId#[100535:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge discover maxId#[100536:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge discover maxId#[100537:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge discover maxId#[100538:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge discover maxId#[100539:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge discover maxId#[100540:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge discover maxId#[100541:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge discover maxId#[100542:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge discover maxId#[100543:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge discover maxId#[100544:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge discover maxId#[100545:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge discover maxId#[100546:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge discover maxId#[100547:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge discover maxId#[100548:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge discover maxId#[100549:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge discover maxId#[100550:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge discover maxId#[100551:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge discover maxId#[100552:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge discover maxId#[100553:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge discover maxId#[100554:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge discover maxId#[100555:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge discover maxId#[100556:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge discover maxId#[100557:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge discover maxId#[100558:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge discover maxId#[100559:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge discover maxId#[100560:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge discover maxId#[100561:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge discover maxId#[100562:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge discover maxId#[100563:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge discover maxId#[100564:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100565:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100566:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100567:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100568:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100569:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100570:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100571:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100572:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100573:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 ... iscover maxId#[101450:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101451:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101452:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101453:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101454:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101455:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101456:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101457:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101458:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101459:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge discover maxId#[101460:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge discover maxId#[101461:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge discover maxId#[101462:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge discover maxId#[101463:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge discover maxId#[101464:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge discover maxId#[101465:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge discover maxId#[101466:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge discover maxId#[101467:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge discover maxId#[101468:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge discover maxId#[101469:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge discover maxId#[101470:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge discover maxId#[101471:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge discover maxId#[101472:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge discover maxId#[101473:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge discover maxId#[101474:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge discover maxId#[101475:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge discover maxId#[101476:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge discover maxId#[101477:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge discover maxId#[101478:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge discover maxId#[101479:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge discover maxId#[101480:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge discover maxId#[101481:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge discover maxId#[101482:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge discover maxId#[101483:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge discover maxId#[101484:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge discover maxId#[101485:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge discover maxId#[101486:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge discover maxId#[101487:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge discover maxId#[101488:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge discover maxId#[101489:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge discover maxId#[101490:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge discover maxId#[101491:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge discover maxId#[101492:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge discover maxId#[101493:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge discover maxId#[101494:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge discover maxId#[101495:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge discover maxId#[101496:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge discover maxId#[101497:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge discover maxId#[101498:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge discover maxId#[101499:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge discover maxId#[101500:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge discover maxId#[101501:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge discover maxId#[101502:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge discover maxId#[101503:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge discover maxId#[101504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge discover maxId#[101505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge discover maxId#[101506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge discover maxId#[101507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge discover maxId#[101508:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge discover maxId#[101509:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge discover maxId#[101510:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge discover maxId#[101511:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge discover maxId#[101512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge discover maxId#[101513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge discover maxId#[101514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101516:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101517:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101518:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101519:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |81.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |81.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/gateway/ut/gtest >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |81.5%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest |81.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 9636011640957698042 2026-01-07T17:23:38.740785Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.151415s 2026-01-07T17:23:38.741444Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.152666s |81.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] >> TestFilterSet::FilterGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 3095014741141251513 |81.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |81.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |81.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut >> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] |81.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |81.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut >> TestFilterSet::FilterGroup [GOOD] >> TestFilterSet::DuplicationValidation >> TDescriberTests::TopicExists >> Discovery::DelayedNameserviceResponse >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TWriteQuoterTests::WaitDeduplicationIdQuota |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> TestFilterSet::DuplicationValidation [GOOD] >> TestFilterSet::CompilationValidation >> HttpRouter::Basic [GOOD] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] |81.6%| [TS] {BAZEL_UPLOAD} ydb/core/public_http/ut/unittest |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.6%| [TS] {RESULT} ydb/core/public_http/ut/unittest >> TMLPChangerTests::TopicNotExists |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |81.6%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TCloudEventsProcessorTests::TestCreateCloudEventProcessor >> TestFilterSet::CompilationValidation [GOOD] >> TestFilterSet::Watermark >> Coordinator::ReadStepSubscribe |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> DataShardReassign::AutoReassignOnYellowFlag >> TestFilterSet::Watermark [GOOD] >> TestFilterSet::WatermarkWhere >> KqpExecuter::TestSuddenAbortAfterReady >> Discovery::DelayedNameserviceResponse [GOOD] >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse >> KeyValueGRPCService::TestSimpleExecuteTransactionV1 >> Splitter::Simple >> TDescriberTests::TopicExists [GOOD] >> TDescriberTests::TopicNotExists >> TWriteQuoterTests::WaitDeduplicationIdQuota [GOOD] >> TestFilterSet::WatermarkWhere [GOOD] >> TestFilterSet::WatermarkWhereFalse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/quota/ut/unittest >> TWriteQuoterTests::WaitDeduplicationIdQuota [GOOD] Test command err: processed_blobs=41800 quoted_time=9.980000s 2026-01-07T17:24:43.037128Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592671893080668237:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:43.037566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:24:43.081385Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:24:43.204417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:24:43.207005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:24:43.207076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:24:43.223623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:24:43.296314Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:24:43.297152Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592671893080668207:2081] 1767806683035608 != 1767806683035611 2026-01-07T17:24:43.361938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0019e4/r3tmp/yandexDzA535.tmp 2026-01-07T17:24:43.361962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0019e4/r3tmp/yandexDzA535.tmp 2026-01-07T17:24:43.362223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0019e4/r3tmp/yandexDzA535.tmp 2026-01-07T17:24:43.362418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:24:43.414770Z INFO: TTestServer started on Port 29056 GrpcPort 27285 2026-01-07T17:24:43.459147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:27285 2026-01-07T17:24:43.645840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:24:43.702185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:44.044271Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:24:45.215184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671901670603831:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:45.215289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671901670603840:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:45.215338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:45.215787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671901670603847:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:45.215834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:45.218941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:24:45.227492Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592671901670603846:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:24:45.471308Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592671901670603912:2640] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:24:45.493805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:45.522307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:45.598354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:45.605882Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592671901670603920:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:24:45.607679Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YjcwYzU0NjEtMTcyNGQyZjEtYzk2Mjc4ZmUtNTg0NmYyNDI=, ActorId: [1:7592671901670603829:2329], ActorState: ExecuteState, LegacyTraceId: 01kecqtm0xf4h38gq2x3pkccyy, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:24:45.627487Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592671901670604203:2817] 2026-01-07T17:24:48.037524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592671893080668237:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:48.037584Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-07T17:24:51.117978Z :TEST_CASE_NAME INFO: TTopicSdkTestSetup started |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/quota/ut/unittest >> Splitter::Simple [GOOD] >> Splitter::Small >> Splitter::Small [GOOD] >> Splitter::Minimal >> Splitter::Minimal [GOOD] >> Splitter::Trivial >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall >> TestFilterSet::WatermarkWhereFalse [GOOD] >> TestFormatHandler::ManyJsonClients >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] Test command err: RandomSeed# 14486263326136155424 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 2 *** performing bridge rang ... sk2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> TCloudEventsProcessorTests::TestCreateCloudEventProcessor [GOOD] |81.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} >> TMLPChangerTests::TopicNotExists [GOOD] >> TMLPChangerTests::ConsumerNotExists >> ProtoTests::CreateQueueFiller [GOOD] >> ProtoTests::UpdateQueueFiller [GOOD] >> ProtoTests::DeleteQueueFiller [GOOD] >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> ProtoTests::DeleteQueueFiller [GOOD] Test command err: 2026-01-07T17:24:46.279495Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592671905614142290:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:46.279569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:24:46.707817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:24:46.730275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:24:46.730379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:24:46.748394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:24:46.808447Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:24:46.813076Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592671905614142262:2081] 1767806686278120 != 1767806686278123 2026-01-07T17:24:46.937146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:24:46.937710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:24:46.937720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:24:46.937726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:24:46.937867Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:24:47.284912Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:24:47.358605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:24:51.279820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592671905614142290:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:51.279873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:24:55.556182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671944268848917:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:55.556206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671944268848909:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:55.556335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:55.556896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671944268848924:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:55.556979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:55.561465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:24:55.576038Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592671944268848923:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:24:55.586918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:55.775217Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592671944268849027:2629] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1767806696996,8594418834399873448,'queue1','CreateMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1767806697843,11550194842081988795,'queue1','UpdateMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1767806698294,8608450020453051350,'queue1','DeleteMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== 2026-01-07T17:25:01.622292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:25:01.622315Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:24:46.770643Z: component=schemeshard, tx_id=281474976715657, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/.sys], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.771561Z: component=schemeshard, tx_id=281474976715658, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.771764Z: component=schemeshard, tx_id=281474976715659, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_effective_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.771898Z: component=schemeshard, tx_id=281474976715660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_users], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.772008Z: component=schemeshard, tx_id=281474976715661, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_tables], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.772150Z: component=schemeshard, tx_id=281474976715662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_pdisks], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.772269Z: component=schemeshard, tx_id=281474976715663, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_group_members], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.772386Z: component=schemeshard, tx_id=281474976715664, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.772518Z: component=schemeshard, tx_id=281474976715665, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.772643Z: component=schemeshard, tx_id=281474976715666, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_vslots], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.772781Z: component=schemeshard, tx_id=281474976715667, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.772911Z: component=schemeshard, tx_id=281474976715668, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/nodes], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.777326Z: component=schemeshard, tx_id=281474976715669, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.777445Z: component=schemeshard, tx_id=281474976715670, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.777540Z: component=schemeshard, tx_id=281474976715671, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.777636Z: component=schemeshard, tx_id=281474976715672, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/streaming_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.777754Z: component=schemeshard, tx_id=281474976715673, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/hive_tablets], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.777845Z: component=schemeshard, tx_id=281474976715674, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.777949Z: component=schemeshard, tx_id=281474976715675, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778031Z: component=schemeshard, tx_id=281474976715676, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pool_classifiers], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778136Z: component=schemeshard, tx_id=281474976715677, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/compile_cache_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778246Z: component=schemeshard, tx_id=281474976715678, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_owners], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778336Z: component=schemeshard, tx_id=281474976715679, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_sessions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778425Z: component=schemeshard, tx_id=281474976715680, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778516Z: component=schemeshard, tx_id=281474976715681, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_class], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778616Z: component=schemeshard, tx_id=281474976715682, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/tables], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778696Z: component=schemeshard, tx_id=281474976715683, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778795Z: component=schemeshard, tx_id=281474976715684, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.778907Z: component=schemeshard, tx_id=281474976715685, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/partition_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.779010Z: component=schemeshard, tx_id=281474976715686, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.779121Z: component=schemeshard, tx_id=281474976715687, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.779213Z: component=schemeshard, tx_id=281474976715688, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.779305Z: component=schemeshard, tx_id=281474976715689, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_metrics_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.779404Z: component=schemeshard, tx_id=281474976715690, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.779493Z: component=schemeshard, tx_id=281474976715691, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:46.779594Z: component=schemeshard, tx_id=281474976715692, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:47.359290Z: component=schemeshard, tx_id=281474976710657, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//Root], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:47.365588Z: component=schemeshard, tx_id=281474976710658, remote_address={none}, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/SQS], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:48.611654Z: component=schemeshard, tx_id=281474976710659, remote_address={none}, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/SQS/Root/SQS/CreateCloudEventProcessor], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:55.562501Z: component=schemeshard, tx_id=281474976710660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE RESOURCE POOL, paths=[.metadata/workload_manager/pools/default], status=SUCCESS, detailed_status=StatusAccepted, new_owner=metadata@system, acl_add=[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin] 2026-01-07T17:24:55.587883Z: component=schemeshard, tx_id=281474976710661, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE TABLE, paths=[/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:24:55.774071Z: component=schemeshard, tx_id=281474976710662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE RESOURCE POOL, paths=[default], status=SUCCESS, detailed_status=StatusAlreadyExists, reason=Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges), new_owner=metadata@system, acl_add=[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin] 2026-01-07T17:24:59.415998Z: component=ymq, id=8594418834399873448$CreateMessageQueue$2026-01-07T17:24:59.409286Z, operation=CreateMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.create, created_at=2026-01-07T17:24:56.996000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=8594418834399873448$CreateMessageQueue$2026-01-07T17:24:56.996000Z, queue=queue1, labels={"k1" : "v1"} 2026-01-07T17:24:59.417115Z: component=ymq, id=11550194842081988795$UpdateMessageQueue$2026-01-07T17:24:59.415544Z, operation=UpdateMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.setAttributes, created_at=2026-01-07T17:24:57.843000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=11550194842081988795$UpdateMessageQueue$2026-01-07T17:24:57.843000Z, queue=queue1, labels={"k1" : "v1"} 2026-01-07T17:24:59.417973Z: component=ymq, id=8608450020453051350$DeleteMessageQueue$2026-01-07T17:24:59.415586Z, operation=DeleteMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.delete, created_at=2026-01-07T17:24:58.294000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=8608450020453051350$DeleteMessageQueue$2026-01-07T17:24:58.294000Z, queue=queue1, labels={"k1" : "v1"} |81.8%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> TDescriberTests::TopicNotExists [GOOD] >> TDescriberTests::TopicNotTopic >> TestFormatHandler::ManyJsonClients [GOOD] >> TestFormatHandler::ManyRawClients ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/discovery/unittest >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse [GOOD] Test command err: Trying to start YDB, gRPC: 26926, MsgBus: 17393 2026-01-07T17:24:42.980877Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592671886751915269:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:42.981540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:24:43.196744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:24:43.196848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:24:43.201938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:24:43.245155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:24:43.275974Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:24:43.276412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592671886751915241:2081] 1767806682979697 != 1767806682979700 2026-01-07T17:24:43.330682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:24:43.330702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:24:43.330724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:24:43.330799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:24:43.474589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:24:43.742474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:24:43.803791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:43.917411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:44.013426Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:24:44.049101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:44.102493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.040653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671903931786305:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.040763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.041106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671903931786315:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.041160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.291873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.318791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.360151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.415920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.456340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.533478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.683580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.738191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:46.873210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671903931787184:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.873275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.873425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671903931787189:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.873611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671903931787191:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.873653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:46.880157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:24:46.900733Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592671903931787192:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:24:46.968301Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592671903931787244:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:24:47.981189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592671886751915269:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:47.981252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15906, MsgBus: 14325 2026-01-07T17:24:52.916327Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592671929653621840:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:52.917470Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:24:52.985130Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:24:53.111264Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:24:53.368848Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:24:53.429704Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:24:53.429778Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:24:53.434944Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:24:53.439561Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:24:53.449643Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592671929653621814:2081] 1767806692905890 != 1767806692905893 2026-01-07T17:24:53.705045Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:24:53.782007Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:24:53.782028Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:24:53.782286Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:24:53.782354Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:24:53.982552Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:24:55.526612Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:24:55.545721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:55.601100Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:55.838547Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:55.921999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:57.914993Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592671929653621840:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:57.915053Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:24:59.625290Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592671959718394781:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:59.626146Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:59.627349Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592671959718394791:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:59.627811Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:59.783919Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:59.881148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:00.008544Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:00.428768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:00.599374Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:00.887960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:00.985051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:01.498751Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:02.085269Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592671972603297590:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:02.085335Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:02.085549Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592671972603297595:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:02.085581Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592671972603297596:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:02.086243Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:02.093986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:25:02.133882Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592671972603297599:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:25:02.209233Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592671972603297650:3798] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/discovery/unittest >> TestFormatHandler::ManyRawClients [GOOD] >> TestFormatHandler::ClientValidation >> KeyValueGRPCService::TestSimpleExecuteTransactionV1 [GOOD] >> KeyValueGRPCService::TestSimpleExecuteTransactionV2 |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.7%| [TM] {RESULT} ydb/core/persqueue/pqtablet/quota/ut/unittest |81.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [TS] {RESULT} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest |81.8%| [TM] {RESULT} ydb/core/kqp/ut/discovery/unittest |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> TestFormatHandler::ClientValidation [GOOD] |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut >> TestFormatHandler::ClientError >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> KeyValueGRPCService::TestSimpleExecuteTransactionV2 [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV1 |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd >> SequenceProxy::Basics |81.8%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:25:04.934594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:25:05.666043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:25:05.797155Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:25:05.801192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:25:05.801535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:25:10.896419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:25:10.897295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:25:11.752901Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767806691669038 != 1767806691669042 2026-01-07T17:25:11.853474Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:11.917760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:25:12.155838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:25:13.203409Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2026-01-07T17:25:13.204279Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:13.223379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:25:13.223666Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{189, redo 997b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2026-01-07T17:25:13.223746Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:13.224679Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:190:1:24576:527:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.224808Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:190:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.224958Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} commited cookie 1 for step 190 2026-01-07T17:25:13.225298Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-07T17:25:13.225352Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:13.225550Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{190, redo 175b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2026-01-07T17:25:13.225603Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:13.225899Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:191:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.225957Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:191:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.226034Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} commited cookie 1 for step 191 2026-01-07T17:25:13.226186Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-07T17:25:13.226227Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:13.226370Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{191, redo 175b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2026-01-07T17:25:13.226426Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:13.226652Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:192:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.226719Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:192:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.226784Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} commited cookie 1 for step 192 2026-01-07T17:25:13.226890Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-07T17:25:13.226920Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:13.227035Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{192, redo 121b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-07T17:25:13.227077Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:13.227228Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:193:1:24576:97:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.227274Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:193:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.227335Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} commited cookie 1 for step 193 2026-01-07T17:25:13.227869Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2026-01-07T17:25:13.227940Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:13.228023Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{193, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:25:13.228079Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:13.248856Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:25:13.381251Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-07T17:25:13.381345Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:13.381566Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{8, redo 264b alter 0b annex 0, ~{ 0, 4, 2 } -{ }, 0 gb} 2026-01-07T17:25:13.381625Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:13.382059Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:9:1:24576:165:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.382111Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.382197Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:10} commited cookie 1 for step 9 2026-01-07T17:25:13.382733Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep 2026-01-07T17:25:13.382783Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:13.383294Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} hope 1 -> done Change{193, redo 819b alter 0b annex 0, ~{ 1, 18, 33, 27, 35, 42, 48, 4 } -{ }, 0 gb} 2026-01-07T17:25:13.383384Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:13.383764Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:194:1:24576:427:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:13.383804Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:1 ... 88150Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:21.988490Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:21.988565Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:21.988679Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2026-01-07T17:25:22.124564Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-07T17:25:22.124634Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:22.124760Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{26, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-07T17:25:22.124798Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:22.125114Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.125181Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.125256Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2026-01-07T17:25:22.259548Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-07T17:25:22.259615Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:22.259741Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{27, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-07T17:25:22.259791Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:22.260099Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.260181Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.260256Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2026-01-07T17:25:22.272962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:25:22.273011Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:22.295961Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2026-01-07T17:25:22.296032Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:22.296126Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:25:22.296196Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:22.418689Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-07T17:25:22.418756Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:22.418856Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{28, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-07T17:25:22.418919Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:22.419232Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.419283Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.419343Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2026-01-07T17:25:22.512704Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-07T17:25:22.512762Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-07T17:25:22.512847Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2026-01-07T17:25:22.512891Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:22.512963Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 16000 last cleanup 0 2026-01-07T17:25:22.513020Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:25:22.513053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-07T17:25:22.513085Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:25:22.513109Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:25:22.513167Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:25:22.513209Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:22.513347Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:25:22.575208Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-07T17:25:22.575301Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:22.575414Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{29, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-07T17:25:22.575464Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:22.575835Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.575930Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.576011Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2026-01-07T17:25:22.598766Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2026-01-07T17:25:22.598839Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:22.599128Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{13, redo 193b alter 0b annex 0, ~{ 1, 16, 4 } -{ }, 0 gb} 2026-01-07T17:25:22.599187Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:22.599423Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} queued, type NKikimr::NHive::TTxProcessTabletMetrics 2026-01-07T17:25:22.599499Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:25:22.599647Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} hope 1 -> done Change{14, redo 329b alter 0b annex 0, ~{ 16 } -{ }, 0 gb} 2026-01-07T17:25:22.599706Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:25:22.610512Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037968897:2:11:0:0:265:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:25:22.610644Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} commited cookie 1 for step 11 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2026-01-07T17:25:22.724625Z node 1 :TABLET_EXECUTOR NOTICE: Leader{72075186224037888:1:10} CheckYellow current light yellow move channels: [ 0 1 ] 2026-01-07T17:25:22.724696Z node 1 :TABLET_EXECUTOR NOTICE: Leader{72075186224037888:1:10} CheckYellow reassign channels: [ 0 1 ] tablet# 72075186224037888 hive# 72057594037968897 --- Captured TEvReassignTablet event |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_reassign/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> DataShardBackgroundCompaction::ShouldCompact >> TestFormatHandler::ClientError [GOOD] >> TestFormatHandler::ClientErrorWithEmptyFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 15581591370097430339 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> TMLPChangerTests::ConsumerNotExists [GOOD] >> TMLPChangerTests::PartitionNotExists >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries |81.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV1 [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV2 >> DataShardSnapshotIsolation::ReadWriteNoLocksNoConflict >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> TestFormatHandler::Watermark >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_1 [GOOD] >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_2 [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] |81.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/raw_socket/ut/unittest >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_2 [GOOD] |81.8%| [TS] {BAZEL_UPLOAD} ydb/core/raw_socket/ut/unittest |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |81.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest |81.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [TS] {RESULT} ydb/core/raw_socket/ut/unittest |81.8%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] Test command err: Trying to start YDB, gRPC: 26967, MsgBus: 22308 ... waiting for SysViewsRoster update finished 2026-01-07T17:25:06.840398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:25:08.205572Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:25:08.387456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:25:08.399980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:25:08.400641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:25:11.854796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:25:11.854911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:25:12.206688Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767806692986521 != 1767806692986525 2026-01-07T17:25:12.229494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:12.342346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:25:12.679379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:25:15.427180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:25:15.428186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:25:15.428844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:25:15.438927Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:25:15.470402Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:25:20.293921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:25:20.540422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:25:20.540493Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:20.552750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:20.904021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:21.283875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:21.552216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:22.183800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1974:3580], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:22.183961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:22.184587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2048:3599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:22.184640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:22.203048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:22.430546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:22.658193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:22.892733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:23.128854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:23.422016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:23.671376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:23.985072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:24.768576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2881:4262], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:24.768667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:24.773299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2885:4266], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:24.773932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:24.774005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2888:4269], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:24.788203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:25:25.061419Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2890:4271], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:25:25.396903Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2951:4313] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Got NActors::TEvents::TEvWakeup [1:301:2344] [0:0:0] Got NActors::TEvents::TEvWakeup [1:303:2346] [0:0:0] Got NActors::TEvents::TEvWakeup [1: ... t NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1259:2990] [1:1126:2957] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1128:2959] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1128:2959] [1:1014:2873] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1282:2996] [1:1128:2959] Got NKikimr::NGRpcService::TGrpcRequestCall> [1:829:2724] [1:8320808721877066593:7169396] Got NKikimr::NGRpcService::TEvGrpcMon::TEvReportPeer [0:6011864930757669447:2188150] [1:8320808721877066593:7169396] Got NActors::IEventHandle [1:3244:4555] [1:829:2724] Got NKikimr::NKqp::NPrivateEvents::TEvQueryRequest [1:8678280833929343339:121] [1:3244:4555] Got NKikimr::NKqp::NPrivateEvents::TEvQueryRequest [1:3238:4549] [1:66:2113] Got NKikimr::NKqp::NWorkload::TEvSubscribeOnPoolChanges [1:7742373267896299883:25708] [1:66:2113] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:66:2113] Got NActors::IEventHandle [1:3246:4557] [1:304:2347] Got NKikimr::NKqp::NWorkload::TEvPlaceRequestIntoPool [1:7742373267896299883:25708] [1:3238:4549] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:66:2113] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3246:4557] Got NActors::IEventHandle [1:3247:4558] [1:304:2347] Got NActors::IEventHandle [1:3248:4559] [1:308:2351] Got NActors::IEventHandle [1:3249:4560] [1:3247:4558] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:66:2113] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3249:4560] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3246:4557] [1:3248:4559] Got NActors::IEventHandle [1:3250:4561] [1:308:2351] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFetchPoolResponse [1:304:2347] [1:3246:4557] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvUpdatePoolSubscription [1:2959:4320] [1:8320808721877066593:7169396] Got NKikimr::NKqp::NWorkload::TEvUpdatePoolInfo [1:66:2113] [1:2959:4320] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3249:4560] [1:3250:4561] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFetchPoolResponse [1:3247:4558] [1:3249:4560] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvResolvePoolResponse [1:7742373267896299883:25708] [1:3247:4558] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvResolvePoolResponse [1:2959:4320] [1:3247:4558] Got NKikimr::NKqp::NWorkload::TEvContinueRequest [1:3238:4549] [1:2959:4320] Got NKikimr::NKqp::NPrivateEvents::TEvCompileRequest [1:8101253777303040363:6646889] [1:3238:4549] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvPlaceRequestIntoPoolResponse [1:7742373267896299883:25708] [1:2959:4320] Got NActors::IEventHandle [1:3251:4562] [1:300:2343] Got NActors::IEventHandle [1:3253:4564] [1:3251:4562] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3253:4564] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest [1:2979:4335] [1:308:2351] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest [1:2979:4335] [1:308:2351] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2981:4335] [1:2979:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2982:4335] [1:2979:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2983:4335] [1:2979:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2984:4335] [1:2981:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:24339059:0] [1:2984:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2986:4335] [1:2982:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:1099535966835:0] [1:2986:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2988:4335] [1:2983:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2984:4335] [1:2:2049] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2199047594611:0] [1:2988:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2986:4335] [1:5:2052] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2981:4335] [1:2984:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2988:4335] [1:8:2055] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2982:4335] [1:2986:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2979:4335] [1:2981:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2983:4335] [1:2988:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2979:4335] [1:2982:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse [1:308:2351] [1:2979:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2979:4335] [1:2983:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2981:4335] [1:2979:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2982:4335] [1:2979:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2983:4335] [1:2979:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2984:4335] [1:2981:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:24339059:0] [1:2984:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2986:4335] [1:2982:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2988:4335] [1:2983:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:1099535966835:0] [1:2986:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2199047594611:0] [1:2988:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2984:4335] [1:2:2049] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2986:4335] [1:5:2052] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2988:4335] [1:8:2055] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2981:4335] [1:2984:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2982:4335] [1:2986:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2983:4335] [1:2988:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2979:4335] [1:2981:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2979:4335] [1:2982:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2979:4335] [1:2983:4335] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse [1:308:2351] [1:2979:4335] Got NActors::IEventHandle [1:3254:4565] [1:308:2351] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3253:4564] [1:3254:4565] Got NActors::IEventHandle [1:3255:4566] [1:3253:4564] Got NKikimr::NStat::TEvStatistics::TEvGetStatistics [1:8534995652929746003:6644585] [1:3255:4566] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:77:2124] Got NActors::IEventHandle [1:3256:4567] [1:308:2351] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:77:2124] [1:3256:4567] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:77:2124] Got NActors::IEventHandle [1:3257:4568] [1:308:2351] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:77:2124] [1:3257:4568] Got NKikimr::NStat::TEvStatistics::TEvGetStatisticsResult [1:3255:4566] [1:77:2124] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3251:4562] [1:8320808721877066593:7169396] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3251:4562] [1:8320808721877066593:7169396] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3251:4562] [1:8320808721877066593:7169396] Got NKikimr::NKqp::NPrivateEvents::TEvCompileResponse [1:300:2343] [1:3251:4562] Got NKikimr::NKqp::NPrivateEvents::TEvCompileResponse [1:3238:4549] [1:300:2343] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3238:4549] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3258:4549] [1:68:2115] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3258:4549] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3258:4549] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3258:4549] 2026-01-07T17:25:36.165023Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3258:4549] TxId: 281474976715673. Ctx: { TraceId: 01kecqw2n4c8s2bjbsn829kbpx, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmE3YWQzZmYtYmEwZmYwYTQtOTlmY2MwODMtZTI0YjQ3Mzk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3258:4549] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3238:4549] [1:3258:4549] 2026-01-07T17:25:36.166174Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NmE3YWQzZmYtYmEwZmYwYTQtOTlmY2MwODMtZTI0YjQ3Mzk=, ActorId: [1:3238:4549], ActorState: ExecuteState, LegacyTraceId: 01kecqw2n4c8s2bjbsn829kbpx, Create QueryResponse for error on request, msg: status# STATUS_CODE_UNSPECIFIED issues# trace_id# Got NActors::TEvents::TEvPoison [1:3258:4549] [1:3258:4549] Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3258:4549] Got NKikimr::NKqp::NWorkload::TEvCleanupRequest [1:2959:4320] [1:3238:4549] Got NKikimr::NSysView::TEvSysView::TEvCollectQueryStats [1:6014387330472966483:2188150] [1:3238:4549] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3238:4549] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFinishRequestInPool [1:7742373267896299883:25708] [1:2959:4320] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3258:4549] [1:305:2348] Got NKikimr::NKqp::NWorkload::TEvCleanupResponse [1:3238:4549] [1:2959:4320] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3259:4549] [1:68:2115] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3259:4549] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3259:4549] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3259:4549] 2026-01-07T17:25:36.169561Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3259:4549] TxId: 281474976715674. Ctx: { TraceId: 01kecqw2n4c8s2bjbsn829kbpx, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmE3YWQzZmYtYmEwZmYwYTQtOTlmY2MwODMtZTI0YjQ3Mzk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3259:4549] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3238:4549] [1:3259:4549] 2026-01-07T17:25:36.169682Z node 1 :KQP_SESSION ERROR: {KQPSA@kqp_session_actor.cpp:3081} SessionId: ydb://session/3?node_id=1&id=NmE3YWQzZmYtYmEwZmYwYTQtOTlmY2MwODMtZTI0YjQ3Mzk=, ActorId: [1:3238:4549], ActorState: CleanupState, LegacyTraceId: 01kecqw2n4c8s2bjbsn829kbpx, Failed to cleanup issues# trace_id# Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3259:4549] Got NActors::TEvents::TEvPoison [1:3259:4549] [1:3259:4549] Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:66:2113] [1:3238:4549] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3259:4549] [1:305:2348] Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:3244:4555] [1:66:2113] Got NActors::TEvents::TEvPoison [1:3245:4556] [1:66:2113] Got NActors::TEvents::TEvWakeup [1:20:2067] [0:0:0] Got NActors::TEvents::TEvWakeup [1:97:2143] [0:0:0] |81.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/executer_actor/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus >> TestFormatHandler::Watermark [GOOD] >> TestFormatHandler::WatermarkWhere >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |81.9%| [TM] {RESULT} ydb/core/kqp/executer_actor/ut/unittest |81.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/describer/ut/unittest >> TDescriberTests::TopicNotTopic 2026-01-07 17:25:30,182 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-07 17:25:30,846 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 13243 58.8M 58.9M 32.8M test_tool run_ut @/home/runner/.ya/build/build_root/ec9d/002c76/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args 13330 1.4G 1.4G 910M └─ ydb-core-persqueue-public-describer-ut --trace-path-append /home/runner/.ya/build/build_root/ec9d/002c76/ydb/core/persqueue/public/describer/ut/test-results/unittest/ytes Test command err: 2026-01-07T17:24:42.526322Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592671886963538716:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:42.527071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:24:42.555107Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:24:42.869444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:24:42.902296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:24:42.902423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:24:42.931496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:24:42.975819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:24:42.978446Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592671886963538613:2081] 1767806682475109 != 1767806682475112 2026-01-07T17:24:43.041691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/002c76/r3tmp/yandexo6Nrtj.tmp 2026-01-07T17:24:43.041719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/002c76/r3tmp/yandexo6Nrtj.tmp 2026-01-07T17:24:43.041878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/002c76/r3tmp/yandexo6Nrtj.tmp 2026-01-07T17:24:43.041976Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:24:43.069372Z INFO: TTestServer started on Port 15423 GrpcPort 5928 2026-01-07T17:24:43.136832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:5928 2026-01-07T17:24:43.257836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:24:43.323922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:43.516328Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:24:44.897926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671895553474240:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:44.898013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671895553474248:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:44.898065Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:44.898780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671895553474256:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:44.898937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:44.902344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:24:44.912849Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592671895553474254:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:24:45.042893Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592671895553474321:2640] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:24:45.065659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:45.094575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:45.169080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592671899848441909:2818] 2026-01-07T17:24:47.503722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592671886963538716:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:47.503810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-07T17:24:51.032114Z :TopicExists INFO: TTopicSdkTestSetup started 2026-01-07T17:24:51.138304Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:24:51.174402Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592671925618245899:2921] connected; active server actors: 1 2026-01-07T17:24:51.175825Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:24:51.179049Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T17:24:51.226878Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:24:51.230980Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:24:51.231551Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:24:51.231696Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T17:24:51.231727Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:24:51.231758Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01-07T17:24:51.231891Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [1:7592671925618245898:2920], now have 1 active actors on pipe 2026-01-07T17:24:51.231906Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [1:7592671925618245920:2449], now have 1 active actors on pipe 2026-01-07T17:24:51.231961Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2026-01-07T17:24:51.258985Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:24:51.260076Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:24:51.261375Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [1:7592671925618245947:2949], now have 1 active actors on pipe 2026-01-07T17:24:51.272624Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [test-topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:24:51.273185Z node 1 :PERSQUEUE DEBUG: partition.cpp:1324: [72075186224037892][Partition][0][StateInit] HandleOnInit TEvPQ::TEvProposePartitionConfig 2026-01-07T17:24:51.273918Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [test-topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:24:51.274196Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037892][Partition][0][StateInit] bootstrapping 0 [1:7592671925618245950:2450] 2026-01-07T17:24:51.277452Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [test-topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:24:51.277748Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [test-topic:0:Initializer] Initializing completed. 2026-01-07T17:24:51.277768Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][0][StateInit] init complete for topic 'test-topic' partition 0 generation 1 [1:7592671925618245950:2450] 2026-01-07T17:24:51.278023Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [7 ... dingWrites: 0 2026-01-07T17:25:30.677041Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:25:30.677055Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:25:30.677065Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:25:30.681374Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592672034731465147:2098], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:25:30.681456Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592672034731465147:2098], cacheItem# { Subscriber: { Subscriber: [3:7592672034731465200:2124] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 17 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767806720470 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:25:30.681580Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592672094861008867:3035], recipient# [3:7592672094861008866:2479], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:25:30.682550Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7592672034731465147:2098], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:25:30.682636Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [3:7592672034731465147:2098], cacheItem# { Subscriber: { Subscriber: [3:7592672060501269978:2756] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767806722710 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:25:30.682963Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [3:7592672034731465147:2098], cacheItem# { Subscriber: { Subscriber: [3:7592672060501269824:2661] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767806722619 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:25:30.683115Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592672094861008871:3037], recipient# [3:7592672094861008870:2454], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:25:30.685108Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7592672034731465147:2098], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2026-01-07T17:25:30.685205Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [3:7592672034731465147:2098], cacheItem# { Subscriber: { Subscriber: [3:7592672060501269978:2756] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767806722710 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:25:30.685271Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7592672034731465147:2098], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:25:30.685319Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [3:7592672034731465147:2098], cacheItem# { Subscriber: { Subscriber: [3:7592672060501269824:2661] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767806722619 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:25:30.685423Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592672094861008878:3038], recipient# [3:7592672094861008876:2480], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2026-01-07T17:25:30.685516Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592672094861008879:3039], recipient# [3:7592672094861008877:2481], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:25:30.777031Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:25:30.777062Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:25:30.777074Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:25:30.777089Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:25:30.777101Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/002c76/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/002c76/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |81.9%| [TS] {RESULT} ydb/core/persqueue/public/describer/ut/unittest >> test_select.py::TestPgSelect::test_as_table >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData |81.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |81.9%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/public/describer/ut/unittest >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_auditlog.py::test_dml_requests_logged_when_unauthorized >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> SequenceProxy::DropRecreate [GOOD] >> TestFormatHandler::WatermarkWhere [GOOD] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut >> TestFormatHandler::WatermarkWhereFalse >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2026-01-07T17:25:32.090703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:25:32.091655Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:36.627559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:25:40.701166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2026-01-07T17:25:41.762133Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:25:41.798031Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0000f4/r3tmp/tmp32o3o4/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:25:41.813691Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0000f4/r3tmp/tmp32o3o4/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0000f4/r3tmp/tmp32o3o4/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17720662748672948568 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:25:47.805531Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:25:47.805609Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:48.059267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:25:48.838785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2026-01-07T17:25:49.241696Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:25:49.242102Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0000f4/r3tmp/tmpBBH7FD/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:25:49.242303Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0000f4/r3tmp/tmpBBH7FD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0000f4/r3tmp/tmpBBH7FD/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2534474037468795118 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:25:49.699845Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp:353) 2026-01-07T17:25:50.007089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |81.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceproxy/ut/unittest |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV2 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV1 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::Crit 2026-01-07 17:25:49,324 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-07 17:25:49,435 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 14202 58.6M 58.6M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/ec9d/003612/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/testing_out_stuff/test_tool.args 14271 1.3G 1.3G 1.1G └─ ydb-core-tx-columnshard-splitter-ut --trace-path-append /home/runner/.ya/build/build_root/ec9d/003612/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/ytest.repo Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1320960;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1320960;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1320960;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1320960;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1320960;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/003612/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/003612/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/columnshard/splitter/ut/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TMLPChangerTests::PartitionNotExists [GOOD] >> TMLPChangerTests::CommitTest >> TestFormatHandler::WatermarkWhereFalse [GOOD] |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> TestJsonParser::Simple1 |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.0%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest |82.0%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> DataShardSnapshotIsolation::ReadWriteNoLocksNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnUncommittedWrite >> TestJsonParser::Simple1 [GOOD] >> TestJsonParser::Simple2 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> TestJsonParser::Simple2 [GOOD] >> TestJsonParser::Simple3 |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> TestJsonParser::Simple3 [GOOD] >> TestJsonParser::Simple4 >> TestJsonParser::Simple4 [GOOD] >> TestJsonParser::LargeStrings >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] >> TestJsonParser::LargeStrings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> TestJsonParser::ManyValues >> TestJsonParser::ManyValues [GOOD] >> TestJsonParser::MissingFields >> TestJsonParser::MissingFields [GOOD] >> TestJsonParser::NestedTypes >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> Coordinator::LastStepSubscribe [GOOD] >> Coordinator::RestoreDomainConfiguration |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> TestJsonParser::NestedTypes [GOOD] >> TestJsonParser::SimpleBooleans >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> TestJsonParser::SimpleBooleans [GOOD] >> TestJsonParser::ChangeParserSchema >> TestJsonParser::ChangeParserSchema [GOOD] >> TestJsonParser::ManyBatches >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> TestJsonParser::ManyBatches [GOOD] >> TestJsonParser::LittleBatches >> TestJsonParser::LittleBatches [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV1 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV2 >> TestJsonParser::MissingFieldsValidation >> TestJsonParser::MissingFieldsValidation [GOOD] >> TestJsonParser::TypeKindsValidation >> TMLPChangerTests::CommitTest [GOOD] >> TMLPChangerTests::ReadAndReleaseTest >> TestJsonParser::TypeKindsValidation [GOOD] >> TestJsonParser::NumbersValidation >> TestJsonParser::NumbersValidation [GOOD] >> TestJsonParser::StringsValidation >> TestJsonParser::StringsValidation [GOOD] >> TestJsonParser::NestedJsonValidation |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> TestJsonParser::NestedJsonValidation [GOOD] >> TestJsonParser::BoolsValidation >> TestJsonParser::BoolsValidation [GOOD] >> TestJsonParser::JsonStructureValidation >> TestJsonParser::JsonStructureValidation [GOOD] >> TestJsonParser::SkipErrors_Simple1 [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection >> TestJsonParser::SkipErrors_StringValidation >> TestJsonParser::SkipErrors_StringValidation [GOOD] >> TestJsonParser::SkipErrors_NoField >> TestJsonParser::SkipErrors_NoField [GOOD] >> TestJsonParser::SkipErrors_NoJson >> TestJsonParser::SkipErrors_NoJson [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnUncommittedWrite [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitWithEffects >> TestJsonParser::SkipErrors_Optional |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TestJsonParser::SkipErrors_Optional [GOOD] >> TestJsonParser::SkipErrors1JsonIn2Messages |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TestJsonParser::SkipErrors1JsonIn2Messages [GOOD] >> TestPurecalcFilter::Simple1 |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.0%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TestPurecalcFilter::Simple1 [GOOD] >> TestPurecalcFilter::Simple2 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> TMLPChangerTests::ReadAndReleaseTest [GOOD] >> TMLPChangerTests::CapacityTest [GOOD] >> TMLPReaderTests::TopicNotExists >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitWithEffects [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitAfterAnotherCommit >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding >> KeyValueGRPCService::SimpleAcquireLockV2 [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV1 >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding >> test.py::test[solomon-LabelColumnAliases-default.txt] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |82.1%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> TestPurecalcFilter::Simple2 [FAIL] >> TestPurecalcFilter::ManyValues >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> test_select.py::TestPgSelect::test_as_table [GOOD] |82.1%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitAfterAnotherCommit [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertTwiceThenCommit >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:25:49.745307Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:25:49.862576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:25:49.879931Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:25:49.880459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:25:49.880569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:25:50.167401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:25:50.167548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:25:50.255191Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767806732144127 != 1767806732144131 2026-01-07T17:25:50.267380Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:50.317669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:25:50.435717Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:25:50.812532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:25:50.828565Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:25:50.958465Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:25:50.958546Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:25:50.958663Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:25:51.347636Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:25:51.347736Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:25:51.348679Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:25:51.348772Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:25:51.349126Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:25:51.349262Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:25:51.349412Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:25:51.349900Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:25:51.351872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:51.353239Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:25:51.353305Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:25:51.385615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:25:51.386713Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:25:51.387079Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:25:51.387374Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:25:51.433691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:25:51.434472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:25:51.434594Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:25:51.436461Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:25:51.436539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:25:51.436614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:25:51.436969Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:25:51.437108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:25:51.437203Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:25:51.448523Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:25:51.506971Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:25:51.507171Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:25:51.507279Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:25:51.507320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:25:51.507356Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:25:51.507390Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:25:51.507616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:25:51.507664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:25:51.507983Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:25:51.508067Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:25:51.508129Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:25:51.508164Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:25:51.508221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:25:51.508260Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:25:51.508292Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:25:51.508319Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:25:51.508362Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:25:51.508476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:25:51.508531Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:25:51.508573Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:25:51.509101Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:25:51.509150Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:25:51.509243Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:25:51.509458Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:25:51.509505Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:25:51.509584Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... Z node 5 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T17:27:51.035322Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:27:51.035768Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T17:27:51.036197Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T17:27:51.045023Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:27:51.045710Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:27:51.046659Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T17:27:51.047102Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:27:51.047153Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:27:51.047654Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:27:51.047692Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:27:51.047725Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-07T17:27:51.061216Z node 5 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:27:51.061610Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:27:51.062506Z node 5 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T17:27:51.063034Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:27:51.077973Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [5:68:2115] Handle TEvNavigate describe path /Root/table-1 2026-01-07T17:27:51.079197Z node 5 :TX_PROXY DEBUG: describe.cpp:270: Actor# [5:1057:2876] HANDLE EvNavigateScheme /Root/table-1 2026-01-07T17:27:51.083603Z node 5 :TX_PROXY DEBUG: describe.cpp:354: Actor# [5:1057:2876] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:27:51.085001Z node 5 :TX_PROXY DEBUG: describe.cpp:433: Actor# [5:1057:2876] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2026-01-07T17:27:51.090652Z node 5 :TX_PROXY DEBUG: describe.cpp:446: Actor# [5:1057:2876] Handle TEvDescribeSchemeResult Forward to# [5:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 2000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-07T17:27:51.099603Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1061:2880], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:27:51.100077Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:27:51.102849Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1060:2879], serverId# [5:1061:2880], sessionId# [0:0:0] 2026-01-07T17:27:51.104333Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [5:1059:2878], Recipient [5:886:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-07T17:27:51.117145Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1064:2883], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:27:51.117576Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:27:51.118807Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1063:2882], serverId# [5:1064:2883], sessionId# [0:0:0] 2026-01-07T17:27:51.119365Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [5:1062:2881], Recipient [5:886:2765]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-07T17:27:51.120266Z node 5 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186224037888 tableId# 38 localTid# 1001, requested from [5:1062:2881], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2026-01-07T17:27:51.289684Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.549789Z 2026-01-07T17:27:51.290303Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2026-01-07T17:27:51.291530Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:1062:2881]pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:27:51.294181Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [5:877:2759], Recipient [5:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-07T17:27:51.298008Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1071:2889], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:27:51.298972Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:27:51.299431Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1070:2888], serverId# [5:1071:2889], sessionId# [0:0:0] 2026-01-07T17:27:51.300477Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [5:1069:2887], Recipient [5:886:2765]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-07T17:27:51.301324Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:118: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 38], requested from# [5:1069:2887] is not needed |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_background_compaction/unittest |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TMLPReaderTests::TopicNotExists [GOOD] >> TMLPReaderTests::TopicWithoutConsumer |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> TestPurecalcFilter::ManyValues [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TestPurecalcFilter::NullValues >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV2 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> TestPurecalcFilter::NullValues [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus >> TestPurecalcFilter::PartialPush |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export >> DataShardSnapshotIsolation::ReadWriteUpsertTwiceThenCommit [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainOnCommitNoConflict |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |82.2%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |82.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] >> TMLPReaderTests::TopicWithoutConsumer [GOOD] >> TMLPReaderTests::EmptyTopic >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TestPurecalcFilter::PartialPush [GOOD] >> TestPurecalcFilter::CompilationValidation |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainOnCommitNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainTwiceOnCommitNoConflict >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_as_table [GOOD] |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV1 >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> TestPurecalcFilter::CompilationValidation [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> TestPurecalcFilter::Emtpy >> TestPurecalcFilter::Emtpy [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> TestPurecalcFilter::Watermark >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |82.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding >> DataShardSnapshotIsolation::ReadWriteUpsertAgainTwiceOnCommitNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileConflict |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |82.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters >> KeyValueGRPCService::SimpleConcatUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV2 >> TestPurecalcFilter::Watermark [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding >> TMLPReaderTests::EmptyTopic [GOOD] >> TMLPReaderTests::TopicWithData >> TestPurecalcFilter::WatermarkWhere >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.3%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> TestPurecalcFilter::WatermarkWhere [GOOD] >> TestPurecalcFilter::WatermarkWhereFalse >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.3%| [TM] {asan, default-linux-x86_64, pic, release} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] |82.4%| [TM] {BAZEL_UPLOAD} ydb/library/yql/tests/sql/solomon/pytest |82.4%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/00288f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit_log.u7hod8_3.txt 2026-01-07T17:28:43.044285Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:43.044250Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-07T17:28:43.000141Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:28:43.279011Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:43.278978Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-07T17:28:43.210550Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:28:43.476717Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:43.476690Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-07T17:28:43.452581Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:28:43.801087Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:43.801053Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-07T17:28:43.772012Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:28:44.229328Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:44.229303Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-07T17:28:44.027747Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:28:44.374473Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:44.374442Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-07T17:28:44.338136Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileNoConflict >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002892/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit_log._7hw40xu.txt 2026-01-07T17:28:51.640985Z: {"tx_id":"01kecr24nr6atd7pskt0zy02q6","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:51.640918Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2026-01-07T17:28:51.640218Z","grpc_method":"Ydb.Table.V1.TableService/BeginTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2026-01-07T17:28:51.833176Z: {"tx_id":"01kecr24nr6atd7pskt0zy02q6","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:51.833121Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-07T17:28:51.651003Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:28:51.855820Z: {"tx_id":"01kecr24nr6atd7pskt0zy02q6","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:28:51.855780Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2026-01-07T17:28:51.840603Z","grpc_method":"Ydb.Table.V1.TableService/CommitTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> TestPurecalcFilter::WatermarkWhereFalse [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection >> TestRawParser::Simple >> TestRawParser::Simple [GOOD] >> TestRawParser::ManyValues >> TestRawParser::ManyValues [GOOD] >> TestRawParser::ChangeParserSchema >> TestRawParser::ChangeParserSchema [GOOD] >> TestRawParser::TypeKindsValidation >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> TestRawParser::TypeKindsValidation [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV1 |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |82.4%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> TMLPReaderTests::TopicWithData [GOOD] >> TMLPReaderTests::TopicWithManyIterationsData >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/00288d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit_log.z5jauqzk.txt |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> test_simple.py::TestSimple::test[alter_table] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileConflict >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery >> KeyValueGRPCService::SimpleCopyUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV2 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |82.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> listing_paging.py::TestListingPaging::test_listing_paging_solomon |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> data_paging.py::TestDataPaging::test_data_paging_solomon >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileNoConflict >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |82.5%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |82.6%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TMLPReaderTests::TopicWithManyIterationsData [GOOD] >> TMLPReaderTests::TopicWithBigMessage >> KeyValueGRPCService::SimpleCopyUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadV1 |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2026-01-07T17:24:28.863616Z node 1 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_0 == "str1", FALSE) AS _filter, _offset FROM Input; 2026-01-07T17:24:28.864028Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-07T17:24:28.864044Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_0 == "str1", FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-07T17:24:28.864113Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-07T17:24:28.864405Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7592671826012134427:2051] 2026-01-07T17:24:39.697655Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [1:7592671826012134427:2051] [id 1]: Started compile request 2026-01-07T17:24:41.986418Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [1:7592671826012134427:2051] [id 1]: Compilation completed for request 2026-01-07T17:24:41.989666Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [1:7592671826012134427:2051] 2026-01-07T17:24:41.989789Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-07T17:24:41.989806Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:24:41.989854Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-07T17:24:41.989910Z node 1 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_1 == "str2", FALSE) AS _filter, _offset FROM Input; 2026-01-07T17:24:41.990134Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [1:0:0] 2026-01-07T17:24:41.990147Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_1 == "str2", FALSE) AS _filter, _offset FROM Input; ' (client id: [1:0:0]) 2026-01-07T17:24:41.990176Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 2 2026-01-07T17:24:41.990538Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7592671826012134427:2051] 2026-01-07T17:24:41.990580Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [1:7592671826012134427:2051] [id 2]: Started compile request 2026-01-07T17:24:42.206224Z node 2 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 = "str1", FALSE) AS _filter, _offset FROM Input; 2026-01-07T17:24:42.206397Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-07T17:24:42.206408Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 = "str1", FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-07T17:24:42.206434Z node 2 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-07T17:24:42.206500Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7592671885072248209:2051] 2026-01-07T17:24:44.503271Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [2:7592671885072248209:2051] [id 1]: Started compile request 2026-01-07T17:24:44.544362Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [2:7592671885072248209:2051] [id 1]: Compilation completed for request 2026-01-07T17:24:44.544510Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [2:7592671885072248209:2051] 2026-01-07T17:24:44.544563Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-07T17:24:44.544597Z node 2 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:24:44.544623Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-07T17:24:44.544656Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-07T17:24:44.544690Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:94: TTopicFilters: Remove program with client id [0:0:0] 2026-01-07T17:24:44.726835Z node 3 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; 2026-01-07T17:24:44.726992Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-07T17:24:44.727001Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-07T17:24:44.727036Z node 3 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-07T17:24:44.727109Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7592671894029244462:2051] 2026-01-07T17:24:47.188863Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [3:7592671894029244462:2051] [id 1]: Started compile request 2026-01-07T17:24:47.194347Z node 3 :FQ_ROW_DISPATCHER ERROR: compile_service.cpp:67: TPurecalcCompileActor [3:7592671894029244462:2051] [id 1]: Compilation failed for request 2026-01-07T17:24:47.194499Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [3:7592671894029244462:2051] 2026-01-07T17:24:47.194552Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-07T17:24:47.194637Z node 3 :FQ_ROW_DISPATCHER ERROR: purecalc_filter.cpp:375: TProgramCompileHandler: Program compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:3:27: Error: extraneous input '(' expecting {, ';'} } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; } } 2026-01-07T17:24:47.542037Z node 4 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-07T17:24:47.542461Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-07T17:24:47.542485Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; ' (client id: [0:0:0]) 2026-01-07T17:24:47.542513Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-07T17:24:47.542588Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [4:7592671908158521726:2051] 2026-01-07T17:24:50.289713Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [4:7592671908158521726:2051] [id 1]: Started compile request 2026-01-07T17:24:50.668888Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [4:7592671908158521726:2051] [id 1]: Compilation completed for request 2026-01-07T17:24:50.668986Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [4:7592671908158521726:2051] 2026-01-07T17:24:50.669073Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-07T17:24:50.669091Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:24:50.669124Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-07T17:24:50.669169Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 1 clients, number rows: 2 2026-01-07T17:24:50.669182Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 2 rows to purecalc filter (client id: [0:0:0]) 2026-01-07T17:24:50.669365Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-07T17:24:50.718991Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 1 clients, number rows: 2 2026-01-07T17:24:50.719027Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 2 rows to purecalc filter (client id: [0:0:0]) 2026-01-07T17:24:50.719034Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-07T17:24:50.719573Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:94: TTopicFilters: Remove program with client id [0:0:0] 2026-01-07T17:24:50.719595Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 0 clients, number rows: 2 2026-01-07T17:24:51.384633Z node 5 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(pass > 0, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-07T17:24:51.385462Z node 5 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-07T17:24:51.385685Z node 5 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(pass > 0, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; ' (client id: [0:0:0]) 2026-01-07T17:24:51.385895Z node 5 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-07T17:24:51.386716Z node 5 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [5:7592671924131952570:2051] 2026-01-07T17:24:55.017215Z node 5 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [5:7592671924131952570:2051] [id 1]: Started compile request 2026-01-07T17:24:55.440870Z node 5 :FQ_ROW_DISPATCHER TRACE: compile_se ... ption.h:145: TWithBackTrace<> @ 0x2D216250 2. /tmp//-S/ydb/library/actors/testlib/test_runtime.cpp:1391: DispatchEventsInternal @ 0x2D2120D1 3. /tmp//-S/ydb/library/actors/testlib/test_runtime.cpp:1570: WaitForEdgeEvents @ 0x2D218573 4. /-S/ydb/library/actors/testlib/test_runtime.h:506: GrabEdgeEventIf @ 0xEF9D463 5. /-S/ydb/library/actors/testlib/test_runtime.h:555: GrabEdgeEvent @ 0xEF9CFEF 6. /-S/ydb/library/actors/testlib/test_runtime.h:561: GrabEdgeEvent @ 0xEF9CFEF 7. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:218: CompileProgram @ 0xEF8A42C 8. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:157: MakeProgram @ 0xEF8A42C 9. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:374: Execute_ @ 0xEF08AC6 10. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:324: operator() @ 0xEF92627 11. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:324:1) &> @ 0xEF92627 12. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:324:1) &> @ 0xEF92627 13. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0xEF92627 14. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0xEF92627 15. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0xF90F149 16. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0xF90F149 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0xF90F149 18. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0xF8E69C7 19. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:324: Execute @ 0xEF9197C 20. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0xF8E817F 21. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0xF908FAC 22. ??:0: ?? @ 0x7F70EB3C4D8F 23. ??:0: ?? @ 0x7F70EB3C4E3F 24. ??:0: ?? @ 0xC8A3028 2026-01-07T17:27:47.134219Z node 42 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 100, FALSE) AS _filter, _offset FROM Input; 2026-01-07T17:27:47.134529Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-07T17:27:47.205183Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [42:7592672679739986554:2051] 2026-01-07T17:28:04.739186Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [42:7592672679739986554:2051] [id 0]: Started compile request 2026-01-07T17:28:05.432737Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [42:7592672679739986554:2051] [id 0]: Compilation completed for request 2026-01-07T17:28:05.433689Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:28:05.434203Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-07T17:28:08.639374Z node 43 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 is null, FALSE) AS _filter, _offset FROM Input; 2026-01-07T17:28:08.639648Z node 43 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-07T17:28:08.795698Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [43:7592672773438021627:2051] 2026-01-07T17:28:32.834294Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [43:7592672773438021627:2051] [id 0]: Started compile request 2026-01-07T17:28:33.281271Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [43:7592672773438021627:2051] [id 0]: Compilation completed for request 2026-01-07T17:28:33.281373Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [43:7592672773438021627:2051] 2026-01-07T17:28:33.285219Z node 43 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:28:33.286163Z node 43 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-07T17:28:36.051791Z node 44 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 50, FALSE) AS _filter, _offset FROM Input; 2026-01-07T17:28:36.052104Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-07T17:28:36.070534Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [44:7592672891375275912:2051] 2026-01-07T17:28:48.769278Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [44:7592672891375275912:2051] [id 0]: Started compile request 2026-01-07T17:28:48.805436Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [44:7592672891375275912:2051] [id 0]: Compilation completed for request 2026-01-07T17:28:48.805526Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [44:7592672891375275912:2051] 2026-01-07T17:28:48.809423Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:28:48.809539Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-07T17:28:49.840675Z node 45 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; 2026-01-07T17:28:49.841033Z node 45 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-07T17:28:49.896246Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [45:7592672949789183057:2051] 2026-01-07T17:28:58.559923Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [45:7592672949789183057:2051] [id 0]: Started compile request 2026-01-07T17:28:58.573465Z node 45 :FQ_ROW_DISPATCHER ERROR: compile_service.cpp:67: TPurecalcCompileActor [45:7592672949789183057:2051] [id 0]: Compilation failed for request 2026-01-07T17:28:58.573580Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [45:7592672949789183057:2051] 2026-01-07T17:28:59.824049Z node 47 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-07T17:28:59.824376Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-07T17:28:59.829216Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [47:7592672990914223581:2051] 2026-01-07T17:29:06.453238Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [47:7592672990914223581:2051] [id 0]: Started compile request 2026-01-07T17:29:06.509564Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [47:7592672990914223581:2051] [id 0]: Compilation completed for request 2026-01-07T17:29:06.509670Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [47:7592672990914223581:2051] 2026-01-07T17:29:06.509914Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:29:06.510034Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-07T17:29:07.251644Z node 48 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(pass > 0, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-07T17:29:07.258279Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-07T17:29:07.260252Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [48:7592673023166277936:2051] 2026-01-07T17:29:11.830070Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [48:7592673023166277936:2051] [id 0]: Started compile request 2026-01-07T17:29:11.861592Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [48:7592673023166277936:2051] [id 0]: Compilation completed for request 2026-01-07T17:29:11.861712Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [48:7592673023166277936:2051] 2026-01-07T17:29:11.865090Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:29:11.865219Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-07T17:29:11.865378Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-07T17:29:12.397346Z node 49 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(FALSE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-07T17:29:12.397649Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-07T17:29:12.397835Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [49:7592673048364676439:2051] 2026-01-07T17:29:20.427071Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [49:7592673048364676439:2051] [id 0]: Started compile request 2026-01-07T17:29:20.456678Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [49:7592673048364676439:2051] [id 0]: Compilation completed for request 2026-01-07T17:29:20.456832Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [49:7592673048364676439:2051] 2026-01-07T17:29:20.457018Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-07T17:29:20.457103Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-07T17:29:20.457208Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |82.6%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |82.6%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertNotBlockedByOlderVolatile >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection >> data_paging.py::TestDataPaging::test_data_paging_solomon [GOOD] >> data_paging.py::TestDataPaging::test_listing_paging_monitoring |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002961/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit_log.zo9vlvky.txt 2026-01-07T17:29:32.689018Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:29:32.688967Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-07T17:29:32.575638Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:29:33.053353Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:29:33.053310Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-07T17:29:32.806249Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:29:33.572878Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:29:33.572822Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-07T17:29:33.180335Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:29:33.931062Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:29:33.931004Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-07T17:29:33.688650Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:29:34.215953Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:29:34.215908Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-07T17:29:34.045869Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-07T17:29:34.466546Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T17:29:34.466508Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-07T17:29:34.337156Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KeyValueGRPCService::SimpleWriteReadV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadV2 |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] >> IcbAsActorTests::TestHttpGetResponse >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |82.7%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertNotBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitNotBlockedByOlderVolatile >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |82.7%| [LD] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |82.7%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |82.7%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs >> TMLPReaderTests::TopicWithBigMessage [GOOD] >> TMLPReaderTests::TopicWithKeepMessageOrder >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitNotBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertBlockedByOlderVolatile >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KeyValueGRPCService::SimpleWriteReadV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV1 |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> IcbAsActorTests::TestHttpPostReaction >> IcbAsActorTests::TestHttpPostReaction [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 7506597793928286250 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2026-01-07T17:23:29.379257Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2026-01-07T17:23:29.525994Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2026-01-07T17:23:33.383200Z 1 00h01m30.100512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999866} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999866} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Stop node 3 2026-01-07T17:23:37.293734Z 1 00h02m00.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2026-01-07T17:23:37.555505Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:23:37.556986Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12894246293850143429] 2026-01-07T17:23:37.612569Z 1 00h02m15.161024s :BS_PROXY ERROR: StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3 Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4). GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3" ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult ... [1:1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 3 2026-01-07T17:29:41.502224Z 1 00h25m01.086520s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Stop node 1 2026-01-07T17:29:41.868090Z 1 00h25m11.087032s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 0 2026-01-07T17:29:43.458349Z 9 00h25m41.101024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:116274:347] ServerId# [1:117318:179] TabletId# 72057594037932033 PipeClientId# [9:116274:347] 2026-01-07T17:29:43.458648Z 8 00h25m41.101024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:146937:17] ServerId# [1:146945:4109] TabletId# 72057594037932033 PipeClientId# [8:146937:17] 2026-01-07T17:29:43.458821Z 7 00h25m41.101024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:145868:17] ServerId# [1:145874:3980] TabletId# 72057594037932033 PipeClientId# [7:145868:17] 2026-01-07T17:29:43.458957Z 6 00h25m41.101024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:122921:17] ServerId# [1:122927:1016] TabletId# 72057594037932033 PipeClientId# [6:122921:17] 2026-01-07T17:29:43.459097Z 5 00h25m41.101024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:142976:17] ServerId# [1:142983:3601] TabletId# 72057594037932033 PipeClientId# [5:142976:17] 2026-01-07T17:29:43.459239Z 4 00h25m41.101024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:151863:17] ServerId# [1:151872:4707] TabletId# 72057594037932033 PipeClientId# [4:151863:17] 2026-01-07T17:29:43.459436Z 3 00h25m41.101024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:141860:17] ServerId# [1:141869:3473] TabletId# 72057594037932033 PipeClientId# [3:141860:17] 2026-01-07T17:29:43.459593Z 2 00h25m41.101024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:150896:17] ServerId# [1:150902:4598] TabletId# 72057594037932033 PipeClientId# [2:150896:17] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999768} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Stop node 7 2026-01-07T17:29:46.466334Z 1 00h26m21.103072s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999841} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Starting nodes Start compaction 1 Start checking |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |82.8%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:25:17.368107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:25:18.367850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:25:18.392912Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:25:18.398293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:25:18.401317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:25:18.401807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:25:18.412672Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:25:18.415053Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:25:18.415548Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:25:18.417265Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:25:20.676170Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:20.794404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:25:20.794546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:25:20.795922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:25:20.796010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:25:20.843067Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:25:20.843796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:25:20.844104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:25:20.937756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:25:21.061966Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) ... waiting for the first mediator step 2026-01-07T17:25:21.645511Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:25:21.645641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... found first step to be 1500 2026-01-07T17:25:21.951942Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 1500 ... waiting for the next mediator step ... found second step to be 2000 ... read step subscribe result: [1500, 2000] ... read step subscribe update: 3000 2026-01-07T17:25:22.933040Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 3000 ... read step subscribe result: [3000, 3000] ... read step subscribe update: 3500 ... read step subscribe update: 3500 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe update: 7000 ... read step subscribe result: [3000, 7000] 2026-01-07T17:25:26.136227Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936129] NodeDisconnected NodeId# 2 2026-01-07T17:25:26.136616Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-07T17:25:26.136646Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 2 2026-01-07T17:25:26.136951Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037968897] NodeDisconnected NodeId# 2 2026-01-07T17:25:26.136975Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-07T17:25:26.138667Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:122:2097] ServerId# [1:1137:2699] TabletId# 72057594037932033 PipeClientId# [2:122:2097] 2026-01-07T17:25:26.145519Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:259:2138] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-07T17:25:26.147327Z node 1 :HIVE WARN: hive_impl.cpp:824: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2026-01-07T17:25:26.148002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2026-01-07T17:25:26.156408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2026-01-07T17:25:26.313551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:25:26.377258Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:25:26.390120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 ... read step subscribe update: 12000 2026-01-07T17:25:42.795251Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.010058s ... waiting for SysViewsRoster update finished 2026-01-07T17:25:56.256981Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:25:56.258270Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:25:56.342829Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:25:56.357681Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:25:56.379145Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:616:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:25:56.381096Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:25:56.382808Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:25:56.395594Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:611:2281], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:25:56.404089Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:25:56.404190Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:26:00.784828Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:26:01.397225Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:26:01.398031Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:26:01.414043Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:26:01.414142Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:26:01.484594Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:26:01.495105Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:26:01.496371Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:26:01.776069Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:26:01.917793Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:26:04.043174Z node 3 :TX_COORDINATOR DEBUG: coordinator__last_step_subscriptions.cpp:52: Processing TEvSubscribeLastStep from [4:1493:2378] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2026-01-07T17:26:04.142371Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:26:04.143591Z node 3 :TX_CON ... ut responses 2026-01-07T17:30:19.253795Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2026-01-07T17:30:19.253893Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 10000000 step# 200 Status# 16 SEND to# [20:829:2724] Proxy marker# C1 ... coordinator 72057594046316545 gen 2 is planning step 200 2026-01-07T17:30:19.265544Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 10000000 has been planned 2026-01-07T17:30:19.265735Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:10:1:24576:168:0] response ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 250 ... starting a new coordinator instance ... waiting for migrated state 2026-01-07T17:30:19.304108Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2026-01-07T17:30:19.304662Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2026-01-07T17:30:19.350536Z node 20 :TX_COORDINATOR INFO: coordinator__init.cpp:120: tablet# 72057594046316545 CreateTxInit Complete ... blocking state response from [20:536:2394] to [20:873:2744] LastSentStep: 150 LastAcquiredStep: 0 LastConfirmedStep: 150 ... unblocking put responses and requests 2026-01-07T17:30:19.351226Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2026-01-07T17:30:19.351626Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 10000000 stepId# 200 Status# 17 SEND EvProposeTransactionStatus to# [20:829:2724] Proxy 2026-01-07T17:30:19.364686Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:322: tablet# 72057594046382081 server# [20:542:2481] disconnnected 2026-01-07T17:30:19.364834Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:201: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:542:2481] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2026-01-07T17:30:19.396951Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594046382081 server# [20:880:2754] connected 2026-01-07T17:30:19.397306Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2026-01-07T17:30:19.397384Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:876:2752] Cookie# 1 CompleteStep# 150 LatestKnownStep# 150 SubjectiveTime# 102 Coordinator# 72057594046316545 2026-01-07T17:30:19.397584Z node 20 :TX_COORDINATOR NOTICE: coordinator_impl.cpp:412: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2026-01-07T17:30:19.397650Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 200, txid# 10000000 marker# C2 2026-01-07T17:30:19.397777Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2026-01-07T17:30:19.397844Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 10000011 step# 250 Status# 16 SEND to# [20:829:2724] Proxy marker# C1 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 200 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-07T17:30:19.423310Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 200 2026-01-07T17:30:19.423401Z node 20 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [200] transactions [1] 2026-01-07T17:30:19.423561Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594046382081 SEND EvCommitStep to# [20:557:2490] ExecQueue {TMediateStep From 150 To# 200Steps: {{TCoordinatorStep step# 200 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2026-01-07T17:30:19.423770Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 150 To# 200Steps: {{TCoordinatorStep step# 200 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2026-01-07T17:30:19.423851Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND Ev to# [20:558:2491] step# 200 forTablet# 72057594047365120 txid# 10000000 marker# M3 2026-01-07T17:30:19.423938Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 200 2026-01-07T17:30:19.423991Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 200 2026-01-07T17:30:19.424118Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 200 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}} marker# M4 2026-01-07T17:30:19.424303Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 200} 2026-01-07T17:30:19.424450Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 200} 2026-01-07T17:30:19.449469Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:884:2757] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T17:30:19.449623Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [20:558:2491] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 200 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 250 2026-01-07T17:30:19.461353Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 10000011 has been planned 2026-01-07T17:30:19.461521Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2026-01-07T17:30:19.462535Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 250, txid# 10000011 marker# C2 2026-01-07T17:30:19.462631Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 10000011 stepId# 250 Status# 17 SEND EvProposeTransactionStatus to# [20:829:2724] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 250 PrevStep: 200 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-07T17:30:19.463003Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 250 2026-01-07T17:30:19.463070Z node 20 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [250] transactions [1] 2026-01-07T17:30:19.463223Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594046382081 SEND EvCommitStep to# [20:557:2490] ExecQueue {TMediateStep From 200 To# 250Steps: {{TCoordinatorStep step# 250 PrevStep# 200Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2026-01-07T17:30:19.463390Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 200 To# 250Steps: {{TCoordinatorStep step# 250 PrevStep# 200Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2026-01-07T17:30:19.463479Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND Ev to# [20:558:2491] step# 250 forTablet# 72057594047365120 txid# 10000011 marker# M3 2026-01-07T17:30:19.463576Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 250 2026-01-07T17:30:19.463635Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 250 2026-01-07T17:30:19.463781Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 250 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}} marker# M4 2026-01-07T17:30:19.463882Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [20:558:2491] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 250 MediatorId# 72057594046382081 TabletID 72057594047365120} 2026-01-07T17:30:19.463967Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 250} 2026-01-07T17:30:19.464119Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 250} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 300 ... observed step: Step: 300 PrevStep: 250 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-07T17:30:19.494570Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 250 To# 300Steps: {{TCoordinatorStep step# 300 PrevStep# 250}}} marker# M1 2026-01-07T17:30:19.494635Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 300 2026-01-07T17:30:19.494678Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 300 2026-01-07T17:30:19.494741Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 300} 2026-01-07T17:30:19.494782Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 300} |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/coordinator/ut/unittest |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |82.8%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest |82.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> TContinuousBackupWithRebootsTests::Basic |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |82.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersion1 >> TNebiusAccessServiceTest::Authenticate |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut >> TNebiusAccessServiceTest::Authenticate [GOOD] >> test_auditlog.py::test_dynconfig |82.9%| [LD] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut >> IndexBuildTest::RejectsCreate |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> VectorIndexBuildTest::Metering_Documentation_Formula [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> KqpPg::InsertFromSelect_Simple+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2026-01-07T17:30:27.654506Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d389bce2ed0] Connect to grpc://localhost:31834 2026-01-07T17:30:27.675733Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d389bce2ed0] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2026-01-07T17:30:27.696363Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d389bce2ed0] Status 7 Permission Denied 2026-01-07T17:30:27.697119Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d389bce2ed0] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2026-01-07T17:30:27.705362Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d389bce2ed0] Response AuthenticateResponse { account { user_account { id: "1234" } } } |82.9%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> IndexBuildTest::CheckLimitWithDroppedIndexUniq >> KqpCompileFallback::FallbackMechanismWorks >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding >> IndexBuildTest::Metering_Documentation_Formula [GOOD] >> IndexBuildTest::LockUniq >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertDuplicateKeyAtSnapshot >> TMLPReaderTests::TopicWithKeepMessageOrder [GOOD] >> TMLPWriterTests::TopicNotExists >> KqpPg::ReadPgArray >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings >> KqpPg::EmptyQuery+useSink |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsCreateUniq |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |82.9%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV2 >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] >> KqpPg::NoTableQuery+useSink >> KqpPg::JoinWithQueryService+StreamLookup >> IndexBuildTest::CheckLimitWithDroppedIndexUniq [GOOD] >> IndexBuildTest::Lock >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding >> IndexBuildTest::LockUniq [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] Test command err: Trying to start YDB, gRPC: 8967, MsgBus: 14359 2026-01-07T17:30:26.285146Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673362923596790:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:26.285337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:26.711679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:26.732536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:26.732618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:26.739806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:26.937060Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673362923596560:2081] 1767807026223570 != 1767807026223573 2026-01-07T17:30:26.947270Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:26.999621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:27.102449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:27.102472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:27.102478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:27.102549Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:27.285065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:27.883419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:28.019455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:28.383613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:28.798174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:28.915842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:31.198744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673384398434923:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:31.198863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:31.199210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673384398434933:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:31.199260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:31.282194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673362923596790:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:31.282270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:31.730596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:31.787024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:31.846918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:31.903833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:31.961576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:32.056743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:32.116303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:32.188518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:32.332859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673388693403115:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:32.332953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:32.333285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673388693403121:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:32.333581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673388693403120:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:32.333621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:32.336467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:32.347056Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673388693403124:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:30:32.447068Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673388693403175:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:30:34.262521Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:30:34.262628Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007CD50C1701D8 2026-01-07T17:30:34.267402Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7592673397283338085:2536], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kecr58wnfpj7rb24w1461rr6, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzYwYzEwYjEtYTAxYjJkYzktZDBiOGJhNTYtYzFlNzdhNDQ=, PoolId: default, IsStreamingQuery: 0} 2026-01-07T17:30:34.267612Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:30:34.267681Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7592673397283338085:2536], queueSize: 1 2026-01-07T17:30:34.268261Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7592673397283338085:2536], compileActor: [1:7592673397283338093:2541] 2026-01-07T17:30:34.268320Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-07T17:30:34.268369Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7592673397283338093:2541], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2026-01-07T17:30:34.268310Z 2026-01-07T17:30:34.384806Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592673397283338093:2541]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767807034","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"8d4ebe2a-911f34e-aa323768-3ddb8a41","version":"1.0"} 2026-01-07T17:30:34.385473Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592673397283338093:2541], duration: 0.117139s 2026-01-07T17:30:34.385509Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592673397283338093:2541], owner: [1:7592673384398434882:2384], status: SUCCESS, issues: , uid: 8d4ebe2a-911f34e-aa323768-3ddb8a41 2026-01-07T17:30:34.385611Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592673397283338085:2536], status: SUCCESS, compileActor: [1:7592673397283338093:2541] 2026-01-07T17:30:34.385671Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592673397283338085:2536], queryUid: 8d4ebe2a-911f34e-aa323768-3ddb8a41, status:SUCCESS |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> IndexBuildTest::RejectsCreateUniq [GOOD] >> IndexBuildTest::RejectsDropIndex |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/pgwire/pgwire |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |83.0%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertDuplicateKeyAtSnapshot [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertMissingKeyAtSnapshot >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> DqOutputChannelTests::SingleRead [GOOD] >> DqOutputChannelTests::PopAll [GOOD] >> DqOutputChannelWithStorageTests::Spill [GOOD] >> DqOutputChannelWithStorageTests::Overflow [GOOD] >> DqOutputWideChannelTests::Overflow [GOOD] >> DqOutputWideChannelTests::BigRow |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> IndexBuildTest::IndexPartitioningIsPersistedUniq >> DqOutputWideChannelTests::BigRow [GOOD] >> DqOutputWideChannelTests::ChunkSizeLimit [GOOD] >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] Test command err: Trying to start YDB, gRPC: 5298, MsgBus: 23720 2026-01-07T17:30:27.782184Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673368402538711:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:27.782330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:27.893785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:30:28.561043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:28.613787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:28.613906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:28.870703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:28.927153Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:28.944069Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:28.945134Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673368402538571:2081] 1767807027773561 != 1767807027773564 2026-01-07T17:30:29.061509Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:29.246319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:29.246344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:29.246351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:29.246451Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:30.488430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:30.495378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:30:30.598228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:30.995558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:31.317748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:31.511421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:32.784257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673368402538711:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:32.784332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:35.087203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673402762278839:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.087356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.097199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673402762278849:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.097318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.741667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.778122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.842787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.907126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.956616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:36.080156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:36.161344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:36.268649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:36.423507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673407057247038:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:36.423582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:36.424048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673407057247043:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:36.424088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673407057247044:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:36.424344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:36.428560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:36.459863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:30:36.461627Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673407057247047:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:30:36.546938Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673407057247101:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:30:38.844001Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:30:38.844143Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C4D1F2180A8 2026-01-07T17:30:38.844180Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7592673415647182006:2543], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kecr5dbtc1erj3xr7gnn920c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTlkOTc5ZTYtYzA2NDFiZDktYmE4NjYxMWQtOTNiZWE5NmQ=, PoolId: default, IsStreamingQuery: 0} 2026-01-07T17:30:38.844314Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:30:38.844365Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7592673415647182006:2543], queueSize: 1 2026-01-07T17:30:38.845029Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7592673415647182006:2543], compileActor: [1:7592673415647182014:2548] 2026-01-07T17:30:38.845084Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-07T17:30:38.845114Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7592673415647182014:2548], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-07T17:30:38.845074Z 2026-01-07T17:30:39.013889Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592673415647182014:2548]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767807039","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"e290281e-38325037-36cd5f62-4dd9d202","version":"1.0"} 2026-01-07T17:30:39.014414Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592673415647182014:2548], duration: 0.169312s 2026-01-07T17:30:39.014445Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592673415647182014:2548], owner: [1:7592673398467311501:2388], status: SUCCESS, issues: , uid: e290281e-38325037-36cd5f62-4dd9d202 2026-01-07T17:30:39.015530Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592673415647182006:2543], status: SUCCESS, compileActor: [1:7592673415647182014:2548] 2026-01-07T17:30:39.015594Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592673415647182006:2543], queryUid: e290281e-38325037-36cd5f62-4dd9d202, status:SUCCESS |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] >> IndexBuildTest::RejectsDropIndexUniq |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> DqUnboxedValueToNativeArrowConversion::VariantOverTupleWithOptionals [GOOD] >> HashShuffle::BackPressureInMemory [GOOD] >> HashShuffle::BackPressureInMemoryMulti [GOOD] >> HashShuffle::BackPressureInMemoryLoad |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputWideChannelTests::ChunkSizeLimit [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> KqpCompileFallback::FallbackMechanismWorks [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |83.0%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorks [GOOD] Test command err: Trying to start YDB, gRPC: 63423, MsgBus: 30159 2026-01-07T17:30:31.054221Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673387397045336:2206];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:31.054296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:31.098713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:30:31.892113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:31.917166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:31.990976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:32.295538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:32.295903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:32.297616Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:32.319084Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673387397045167:2081] 1767807031021475 != 1767807031021478 2026-01-07T17:30:32.627403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:32.733589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:32.733612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:32.733619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:32.733761Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:34.320619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:34.333597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:30:34.449742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:34.860123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.228344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.404052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:36.055311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673387397045336:2206];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:36.055380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:38.637467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673417461818138:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:38.637584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:38.638361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673417461818148:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:38.638423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.082643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.126771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.170454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.220635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.281281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.354894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.439662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.533119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.665096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673421756786323:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.665176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.665642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673421756786328:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.665711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673421756786329:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.665751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.670145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:39.689520Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673421756786332:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:30:39.769763Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673421756786383:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:30:42.377414Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:30:42.377536Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C6220054A08 2026-01-07T17:30:42.377577Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7592673434641688596:2543], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kecr5gt54zxyqw4qvcvvadz6, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmIzMmQxODUtODRlMmE0ZjAtZDNiMDBjZjctMTNmNDQ0N2Y=, PoolId: default, IsStreamingQuery: 0} 2026-01-07T17:30:42.377720Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:30:42.377778Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7592673434641688596:2543], queueSize: 1 2026-01-07T17:30:42.378335Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n 2026-01-07T17:30:42.378379Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7592673434641688596:2543], compileActor: [1:7592673434641688604:2548] 2026-01-07T17:30:42.378428Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-07T17:30:42.378466Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7592673434641688604:2548], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2026-01-07T17:30:42.378418Z 2026-01-07T17:30:42.401398Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7592673434641688604:2548], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n " 2026-01-07T17:30:42.712899Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592673434641688604:2548]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767807042","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"7276d7f6-b12724c1-89ff9dc0-d35e77dd","version":"1.0"} 2026-01-07T17:30:42.713532Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592673434641688604:2548], duration: 0.335083s 2026-01-07T17:30:42.713559Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592673434641688604:2548], owner: [1:7592673417461818097:2389], status: SUCCESS, issues: , uid: 7276d7f6-b12724c1-89ff9dc0-d35e77dd 2026-01-07T17:30:42.716315Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592673434641688596:2543], status: SUCCESS, compileActor: [1:7592673434641688604:2548] 2026-01-07T17:30:42.716391Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592673434641688596:2543], queryUid: 7276d7f6-b12724c1-89ff9dc0-d35e77dd, status:SUCCESS |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> IndexBuildTest::RejectsDropIndexUniq [GOOD] >> IndexBuildTest::RejectsCancelUniq >> IndexBuildTest::IndexPartitioningIsPersistedUniq [GOOD] >> IndexBuildTest::DropIndex |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV1 |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] [GOOD] |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> IndexBuildTest::DropIndex [GOOD] >> IndexBuildTest::DropIndexUniq |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_auditlog.py::test_dynconfig [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertMissingKeyAtSnapshot [GOOD] >> DataShardSnapshotIsolation::ReadWriteObserveOwnChanges >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] [GOOD] >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink >> TMLPWriterTests::TopicNotExists [GOOD] >> TMLPWriterTests::EmptyWrite >> TTicketParserTest::TicketFromCertificateWithValidationGood |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TTicketParserTest::BulkAuthorizationRetryError |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> IndexBuildTest::DropIndexUniq [GOOD] >> TTicketParserTest::LoginRefreshGroupsWithError |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] [GOOD] >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndexUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:31.043041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:31.043135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:31.043186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:31.043230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:31.043287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:31.043323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:31.043396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:31.043468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:31.044286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:31.044600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:31.185884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:31.185954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:31.205128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:31.205482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:31.205652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:31.212803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:31.213152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:31.213886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:31.214145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:31.216950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:31.217146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:31.218364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:31.218437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:31.218556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:31.218607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:31.218725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:31.218923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:31.407054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.408873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.409015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.409123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:31.409205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 594046678944, cookie: 105 2026-01-07T17:30:52.254518Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:30:52.254553Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-07T17:30:52.254586Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 5 2026-01-07T17:30:52.255251Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.255334Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.255364Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:30:52.255393Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 18446744073709551615 2026-01-07T17:30:52.255421Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-07T17:30:52.257116Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.257207Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.257243Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:30:52.257273Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2026-01-07T17:30:52.257304Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:30:52.258986Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.259090Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.259135Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:30:52.259705Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.259786Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.259814Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:30:52.260239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:30:52.260285Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:30:52.260528Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2026-01-07T17:30:52.260636Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2026-01-07T17:30:52.260666Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2026-01-07T17:30:52.260699Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2026-01-07T17:30:52.260724Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2026-01-07T17:30:52.260755Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2026-01-07T17:30:52.261594Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.261674Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:30:52.261701Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:30:52.261733Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2026-01-07T17:30:52.261769Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 4 2026-01-07T17:30:52.261848Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2026-01-07T17:30:52.264690Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2026-01-07T17:30:52.264753Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:30:52.264985Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2026-01-07T17:30:52.265149Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2026-01-07T17:30:52.265181Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-07T17:30:52.265217Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2026-01-07T17:30:52.265244Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-07T17:30:52.265277Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2026-01-07T17:30:52.265337Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:821:2776] message: TxId: 105 2026-01-07T17:30:52.265386Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-07T17:30:52.265426Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:30:52.265453Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:30:52.265548Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-07T17:30:52.265586Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:1 2026-01-07T17:30:52.265608Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:1 2026-01-07T17:30:52.265637Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 2 2026-01-07T17:30:52.265662Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:2 2026-01-07T17:30:52.265682Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:2 2026-01-07T17:30:52.265717Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2026-01-07T17:30:52.266036Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:30:52.266153Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:30:52.266288Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:30:52.266321Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:30:52.266356Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:30:52.268849Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:30:52.270114Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:30:52.270161Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1349:3270] TestWaitNotification: OK eventTxId 105 |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] [GOOD] >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> IndexBuildTest::RejectsCancelUniq [GOOD] >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq >> TSchemeShardSecretTest::DefaultDescribeSecret >> TSchemeShardSecretTest::CreateSecret >> TSchemeShardSecretTest::CreateNotInDatabase >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV2 >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes [GOOD] >> TSchemeShardSecretTest::ReadOnlyMode >> TSchemeShardSecretTest::DropSecret >> TSchemeShardSecretTest::CreateSecretInSubdomain >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> TSchemeShardSecretTest::DefaultDescribeSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingSecret >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> TSchemeShardSecretTest::CreateSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs >> TSchemeShardSecretTest::CreateNotInDatabase [GOOD] >> TSchemeShardSecretTest::AsyncDropSameSecret >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] >> TSchemeShardSecretTest::CreateSecretInSubdomain [GOOD] >> TSchemeShardSecretTest::CreateSecretInheritPermissions >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink >> TSchemeShardSecretTest::DropSecret [GOOD] >> TSchemeShardSecretTest::DropNotASecret >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] >> DataShardSnapshotIsolation::ReadWriteObserveOwnChanges [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictThenRead >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:55.305146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:55.305266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:55.305310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:55.305359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:55.305397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:55.305426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:55.305504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:55.305584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:55.306460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:55.306803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:55.398963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:55.399040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:55.416427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:55.416983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:55.417266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:55.424699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:55.425132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:55.425846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:55.426160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:55.429442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:55.429675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:55.430928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:55.430994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:55.431116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:55.431166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:55.431260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:55.431463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:55.567339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.570955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.571998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:55.572077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... : true 2026-01-07T17:30:58.904775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-07T17:30:58.904948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2026-01-07T17:30:58.906321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:30:58.906634Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:58.906751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:30:58.906802Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_secret.cpp:66: [72057594046678944] TCreateSecret::TPropose, opId: 103:0HandleReply TEvOperationPlan: step# 5000039 2026-01-07T17:30:58.906944Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-07T17:30:58.907112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:30:58.907172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:30:58.909381Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:58.909419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:30:58.909544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:30:58.909655Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:58.909685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:848:2805], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-07T17:30:58.909720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:848:2805], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:30:58.909770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:30:58.909806Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:30:58.909906Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:30:58.909941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:30:58.909977Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:30:58.910012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:30:58.910051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:30:58.910089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:30:58.910124Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:30:58.910154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:30:58.910219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:30:58.910257Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-07T17:30:58.910293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-07T17:30:58.910320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:30:58.911015Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:58.911083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:58.911119Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:30:58.911156Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:30:58.911192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:30:58.912431Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:58.912501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:58.912545Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:30:58.912577Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:30:58.912605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:30:58.912673Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:30:58.916027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:30:58.916312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:30:58.916571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:30:58.916623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:30:58.917062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:30:58.917145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:30:58.917181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:880:2835] TestWaitNotification: OK eventTxId 103 2026-01-07T17:30:58.917628Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-name" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:30:58.917790Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-name" took 188us result status StatusSuccess 2026-01-07T17:30:58.918055Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-name" PathDescription { Self { Name: "test-name" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-name" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] >> TSchemeShardSecretTest::DropNotASecret [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:56.189006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:56.189125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:56.189178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:56.189227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:56.189267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:56.189312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:56.189411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:56.189505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:56.190464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:56.190789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:56.280085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:56.280154Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:56.296291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:56.296698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:56.296951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:56.303696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:56.304075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:56.304878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:56.305177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:56.312438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:56.312685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:56.314018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:56.314092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:56.314230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:56.314288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:56.314404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:56.314598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:56.466498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.467526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.467651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.467730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.467806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.467882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.467950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.468042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.468139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.468206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.468289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.468359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.468420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.468517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.468598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 01-07T17:30:59.069863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-07T17:30:59.069894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:30:59.070486Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:30:59.070571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:30:59.070607Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:30:59.070673Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:30:59.070726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:30:59.071150Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:30:59.071224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:30:59.071255Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:30:59.071286Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:30:59.071317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:30:59.071394Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:30:59.074775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:30:59.074866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:30:59.075095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:30:59.075159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:30:59.075531Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:30:59.075621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:30:59.075663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:739:2728] TestWaitNotification: OK eventTxId 102 2026-01-07T17:30:59.076090Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:30:59.076296Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 228us result status StatusSuccess 2026-01-07T17:30:59.076622Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-07T17:30:59.079820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/dir" OperationType: ESchemeOpCreateSecret CreateSecret { Name: "test-secret" Value: "test-value-new" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:30:59.080089Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_secret.cpp:152: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0 2026-01-07T17:30:59.080156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_secret.cpp:160: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0, secretDescription (without secret parts): Name: "test-secret" 2026-01-07T17:30:59.080402Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:30:59.082639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-07T17:30:59.082920Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), operation: CREATE SECRET, path: /MyRoot/dir/test-secret TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:30:59.083257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:30:59.083346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:30:59.083717Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:30:59.083814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:30:59.083851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:747:2736] TestWaitNotification: OK eventTxId 103 2026-01-07T17:30:59.084298Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-07T17:30:59.084497Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 233us result status StatusSuccess 2026-01-07T17:30:59.084810Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-init" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:56.513685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:56.513805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:56.513851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:56.513893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:56.513932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:56.513961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:56.514030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:56.514111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:56.515006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:56.515265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:56.607696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:56.607761Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:56.626622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:56.627002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:56.627242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:56.638617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:56.638920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:56.639597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:56.639819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:56.643857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:56.644029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:56.644878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:56.644921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:56.645043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:56.645080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:56.645150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:56.645282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:56.815593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.816516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.816642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.816718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.816789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.816851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.816949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.817037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.817141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.817210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.817274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.817345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.817415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.817494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:56.817595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eration IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:30:59.211762Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-07T17:30:59.211783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:30:59.211805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2026-01-07T17:30:59.211835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:30:59.211877Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:30:59.212074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:30:59.212154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:30:59.212188Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-07T17:30:59.212208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-07T17:30:59.212258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:30:59.212283Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-07T17:30:59.212323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-07T17:30:59.212359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:30:59.212387Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2026-01-07T17:30:59.212429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-07T17:30:59.212459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2026-01-07T17:30:59.212482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-07T17:30:59.212502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-07T17:30:59.216716Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:30:59.216845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:30:59.216885Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:30:59.217007Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:30:59.217062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:30:59.218095Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:30:59.218191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:30:59.218224Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:30:59.218286Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-07T17:30:59.218321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:30:59.220001Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:30:59.220101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:30:59.220138Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:30:59.220166Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:30:59.220199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:30:59.220686Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:30:59.220757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:30:59.220787Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:30:59.220823Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-07T17:30:59.220850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:30:59.220902Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:30:59.223788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:30:59.223988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:30:59.224312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:30:59.225442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:30:59.225629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:30:59.225669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:30:59.225935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:30:59.226018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:30:59.226046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:729:2718] TestWaitNotification: OK eventTxId 101 2026-01-07T17:30:59.226412Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir1/dir2/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-07T17:30:59.226569Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir1/dir2/test-secret" took 190us result status StatusSuccess 2026-01-07T17:30:59.226825Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir1/dir2/test-secret" PathDescription { Self { Name: "test-secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:57.094995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:57.095069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:57.095099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:57.095128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:57.095152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:57.095173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:57.095212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:57.095278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:57.095912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:57.096144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:57.206617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:57.206665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:57.236114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:57.236552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:57.236744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:57.245791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:57.246125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:57.246790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:57.247004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:57.249751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:57.249937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:57.250997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:57.251056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:57.251167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:57.251209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:57.251321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:57.251486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:57.440102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.445984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.446043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.446124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.446223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.446316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:57.446398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 4: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:59.599365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-07T17:30:59.599406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-07T17:30:59.599447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:30:59.599718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:30:59.599765Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:30:59.599856Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:30:59.599887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:30:59.599921Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:30:59.599948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:30:59.599980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:30:59.600018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:30:59.600093Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:30:59.600148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:30:59.600225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:30:59.600273Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-07T17:30:59.600310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-07T17:30:59.600339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:30:59.600372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-07T17:30:59.601537Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:59.601633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:59.601670Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:30:59.601707Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:30:59.601754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:30:59.602848Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:59.602931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:59.602959Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:30:59.602986Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:30:59.603015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:30:59.604046Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:59.604115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:30:59.604146Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:30:59.604172Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:30:59.604198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:30:59.604275Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:30:59.604672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:30:59.604712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:30:59.604784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:30:59.605974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:30:59.608294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:30:59.608394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:30:59.609314Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 104, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 105 2026-01-07T17:30:59.609756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:30:59.609813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 TestWaitNotification wait txId: 106 2026-01-07T17:30:59.609885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:30:59.609907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:30:59.610347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:30:59.610444Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:30:59.610505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:30:59.610545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:770:2759] 2026-01-07T17:30:59.610692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:30:59.610715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:770:2759] TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 2026-01-07T17:30:59.611191Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:30:59.611356Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 190us result status StatusPathDoesNotExist 2026-01-07T17:30:59.611521Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/dir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/dir/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/dir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:57.730747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:57.730839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:57.730879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:57.730916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:57.730949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:57.730975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:57.731029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:57.731096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:57.731839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:57.732111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:57.860760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:57.860813Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:57.883122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:57.883561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:57.883758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:57.890123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:57.890452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:57.891282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:57.891502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:57.894193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:57.894369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:57.895416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:57.895473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:57.895582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:57.895630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:57.895728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:57.895895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:58.043883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.044806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.044905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.045822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:31:00.020080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:31:00.020154Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:00.020183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:31:00.020216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:31:00.020392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:31:00.020435Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:31:00.020524Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:31:00.020557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:31:00.020593Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:31:00.020624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:31:00.020659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:31:00.020704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:31:00.020735Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:31:00.020765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:31:00.020828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:31:00.020861Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:31:00.020893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:31:00.020945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:31:00.021883Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:31:00.021962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:31:00.021992Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:31:00.022031Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:31:00.022069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:31:00.022438Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:31:00.022497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:31:00.022520Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:31:00.022544Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:31:00.022573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:31:00.022640Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:31:00.030204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:31:00.030315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:31:00.030501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:31:00.030540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:31:00.030873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:31:00.030954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:31:00.030986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:704:2693] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:31:00.033589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "dir" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:31:00.033702Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 102:0, path: /MyRoot/dir 2026-01-07T17:31:00.033806Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:31:00.035653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:00.035861Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: DROP SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:31:00.036138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:31:00.036175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:31:00.036527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:31:00.036619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:31:00.036648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:711:2700] TestWaitNotification: OK eventTxId 102 2026-01-07T17:31:00.037048Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-07T17:31:00.037204Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 174us result status StatusSuccess 2026-01-07T17:31:00.037446Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:57.954574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:57.954639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:57.954669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:57.954699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:57.954742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:57.954763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:57.954811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:57.954875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:57.955678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:57.955968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:58.044688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:58.044753Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:58.083210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:58.083569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:58.083810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:58.098775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:58.099119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:58.099761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:58.099956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:58.106548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:58.106746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:58.107757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:58.107812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:58.107914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:58.107959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:58.108055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:58.108214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:58.266977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.267804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.267904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.267976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:58.268748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:31:00.210018Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 124us result status StatusSuccess 2026-01-07T17:31:00.210276Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:00.210672Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:31:00.210767Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 109us result status StatusSuccess 2026-01-07T17:31:00.211143Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:00.211525Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:31:00.211622Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 113us result status StatusSuccess 2026-01-07T17:31:00.211924Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:00.212385Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:31:00.212507Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 135us result status StatusSuccess 2026-01-07T17:31:00.212849Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |83.1%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:28.854974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:28.855062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:28.855101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:28.855142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:28.855179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:28.855206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:28.855263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:28.855339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:28.866501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:28.866858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:28.956416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:28.956496Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:28.971766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:28.972135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:28.972301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:28.978578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:28.978916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:28.979543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:28.979798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:28.982370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:28.982551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:28.983684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:28.983739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:28.983832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:28.983886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:28.983972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:28.984181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:29.167000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.167992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.168970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.169055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:29.169144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... DROP LOCK, path: /MyRoot/ServerLessDB/Table/test_index/indexImplTable 2026-01-07T17:31:00.469306Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976725762, status# StatusPathDoesNotExist 2026-01-07T17:31:00.469387Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549 2026-01-07T17:31:00.469503Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3152: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 107, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2026-01-07T17:31:00.469684Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3157: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500), SubscribersCount: 1, CreateSender: [6:1668:3539], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2026-01-07T17:31:00.469871Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-07T17:31:00.472284Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected 2026-01-07T17:31:00.472460Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejected, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/Table/test_index', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760, SubscribersCount: 1, CreateSender: [6:1668:3539], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:31:00.472526Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-07T17:31:00.472679Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:31:00.472730Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:1685:3556] TestWaitNotification: OK eventTxId 107 2026-01-07T17:31:00.473560Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-07T17:31:00.473891Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-07T17:31:00.474776Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:31:00.475056Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 301us result status StatusSuccess 2026-01-07T17:31:00.475605Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-07T17:31:00.476470Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__forget.cpp:18: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: DoExecute TxId: 108 DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-07T17:31:00.476786Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.h:102: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: Reply TxId: 108 Status: SUCCESS BUILDINDEX RESPONSE Forget: NKikimrIndexBuilder.TEvForgetResponse TxId: 108 Status: SUCCESS 2026-01-07T17:31:00.482969Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__list.cpp:23: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" PageSize: 100 PageToken: "" 2026-01-07T17:31:00.483105Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingObject >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> TSchemeShardSecretTest::EmptySecretName >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> TCertificateCheckerTest::CheckSubjectDns >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingObject [GOOD] >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions >> TSchemeShardSecretTest::EmptySecretName [GOOD] >> TSchemeShardSecretTest::DropUnexistingSecret >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink >> TTicketParserTest::AuthorizationRetryError >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets [GOOD] >> TSchemeShardSecretTest::AsyncCreateSameSecret >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V1 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret [GOOD] >> TSchemeShardSecretTest::AlterNotASecret |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TTicketParserTest::NebiusAuthenticationUnavailable >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> TSchemeShardSecretTest::AsyncCreateSameSecret [GOOD] >> TSchemeShardSecretTest::AsyncAlterSameSecret |83.1%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:31:02.945796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:31:02.945897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:31:02.945927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:31:02.945958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:31:02.945984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:31:02.946006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:31:02.946064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:31:02.946130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:31:02.946965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:31:02.947196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:31:03.023570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:31:03.023614Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:03.042460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:31:03.042861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:31:03.043110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:31:03.054728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:31:03.055180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:31:03.056088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:31:03.056457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:31:03.063521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:03.063798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:31:03.065331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:31:03.065416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:03.065572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:31:03.065633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:31:03.065777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:31:03.066025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:31:03.227485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.228654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.228803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.228892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.228995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.229856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ration: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:31:05.304007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:31:05.304034Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:31:05.304060Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 6 2026-01-07T17:31:05.304087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:31:05.305235Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:31:05.305307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:31:05.305333Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:31:05.305357Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 5 2026-01-07T17:31:05.305383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:31:05.305456Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-07T17:31:05.308771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:31:05.308888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:31:05.308969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:31:05.309235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:31:05.309279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:31:05.309665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:31:05.309764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:31:05.309801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:850:2839] TestWaitNotification: OK eventTxId 105 2026-01-07T17:31:05.310321Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:31:05.310525Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 251us result status StatusSuccess 2026-01-07T17:31:05.310837Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:05.311331Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:31:05.311465Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir/secret" took 149us result status StatusSuccess 2026-01-07T17:31:05.311728Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir/secret" PathDescription { Self { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:05.312159Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:31:05.312312Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir" took 173us result status StatusSuccess 2026-01-07T17:31:05.312694Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir" PathDescription { Self { Name: "subdir" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\004\032\005user2 \003(\001" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:31:03.029809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:31:03.029908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:31:03.029946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:31:03.029978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:31:03.030007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:31:03.030035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:31:03.030087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:31:03.030156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:31:03.030954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:31:03.031247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:31:03.113797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:31:03.113857Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:03.129110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:31:03.129486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:31:03.129742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:31:03.139373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:31:03.139803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:31:03.140605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:31:03.140883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:31:03.144535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:03.144774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:31:03.146013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:31:03.146083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:03.146214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:31:03.146258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:31:03.146368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:31:03.146551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:31:03.296005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.296916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.297970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1:05.222499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:31:05.222666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:31:05.225758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:31:05.225894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:31:05.226237Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:31:05.226384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:05.226434Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:31:05.226692Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:31:05.226759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:31:05.226932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:31:05.227007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:31:05.234047Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:31:05.234162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:31:05.234419Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:05.234472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-07T17:31:05.234885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:31:05.234947Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:31:05.235072Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:31:05.235112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:31:05.235157Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:31:05.235192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:31:05.235235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:31:05.235281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:31:05.235324Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:31:05.235357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:31:05.235463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:31:05.235515Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:31:05.235561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:31:05.236171Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:31:05.236300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:31:05.236347Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:31:05.236392Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:31:05.236443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:31:05.236536Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:31:05.243342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:31:05.243839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-07T17:31:05.244300Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:684:2673] Bootstrap 2026-01-07T17:31:05.245305Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:684:2673] Become StateWork (SchemeCache [2:689:2678]) 2026-01-07T17:31:05.245699Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:31:05.245909Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/test-secret" took 237us result status StatusPathDoesNotExist 2026-01-07T17:31:05.246109Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:31:05.246659Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:684:2673] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:31:05.248622Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResults wait txId: 101 2026-01-07T17:31:05.251988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "test-secret" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:31:05.252303Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 101:0, path: /MyRoot/test-secret 2026-01-07T17:31:05.252428Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-07T17:31:05.254718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:05.254969Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP SECRET, path: /MyRoot/test-secret TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:31:05.255304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:31:05.255355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:31:05.255723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:31:05.255842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:31:05.255885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:700:2689] TestWaitNotification: OK eventTxId 101 |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TMLPWriterTests::EmptyWrite [GOOD] >> TMLPWriterTests::WriteOneMessage >> KqpPg::TableArrayInsert+useSink [GOOD] >> KqpPg::TableArrayInsert-useSink >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService >> TTicketParserTest::LoginGood >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent |83.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSecretTest::AlterNotASecret [GOOD] >> TTicketParserTest::AuthenticationWithUserAccount |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable |83.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AlterNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:31:02.763071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:31:02.763175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:31:02.763219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:31:02.763262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:31:02.763308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:31:02.763340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:31:02.763412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:31:02.763513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:31:02.764413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:31:02.764702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:31:02.853790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:31:02.853852Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:02.869815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:31:02.870298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:31:02.870635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:31:02.882061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:31:02.882432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:31:02.883100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:31:02.883308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:31:02.886464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:02.886731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:31:02.887891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:31:02.887955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:02.888093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:31:02.888152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:31:02.888261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:31:02.888462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:31:03.015097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.019981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.020036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:31:06.353491Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:31:06.353597Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:06.353638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:31:06.353685Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:31:06.353966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:31:06.354026Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:31:06.354161Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:31:06.354217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:31:06.354276Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:31:06.354325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:31:06.354374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:31:06.354430Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:31:06.354481Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:31:06.354530Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:31:06.354625Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:31:06.354678Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:31:06.354721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:31:06.354766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:31:06.355627Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:31:06.355726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:31:06.355762Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:31:06.355821Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:31:06.355871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:31:06.356526Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:31:06.356624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:31:06.356662Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:31:06.356695Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:31:06.356731Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:31:06.356810Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:31:06.362598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:31:06.363084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:31:06.363347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:31:06.363406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:31:06.363837Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:31:06.363968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:31:06.364019Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:713:2702] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:31:06.368239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSecret AlterSecret { Name: "dir" Value: "" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:31:06.368417Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_secret.cpp:113: [72057594046678944] TAlterSecret Propose, path: /MyRoot/dir, opId: 102:0 2026-01-07T17:31:06.368560Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:31:06.371321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-07T17:31:06.371640Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: ALTER SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:31:06.372015Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:31:06.372071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:31:06.372513Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:31:06.372632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:31:06.372679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:720:2709] TestWaitNotification: OK eventTxId 102 2026-01-07T17:31:06.373211Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-07T17:31:06.373420Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 246us result status StatusSuccess 2026-01-07T17:31:06.373901Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> DataShardSnapshotIsolation::ReadWriteCommitConflictThenRead [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading >> Yq_1::ListConnections |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:31:03.545289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:31:03.545384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:31:03.545424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:31:03.545464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:31:03.545501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:31:03.545529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:31:03.545579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:31:03.545651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:31:03.546453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:31:03.546707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:31:03.628834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:31:03.628892Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:03.643690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:31:03.643984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:31:03.644257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:31:03.650661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:31:03.651100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:31:03.651857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:31:03.652115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:31:03.658924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:03.659194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:31:03.660318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:31:03.660384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:03.660509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:31:03.660559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:31:03.660648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:31:03.660821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:31:03.807403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.808452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.808699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.808814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.808911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.808993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:31:03.809683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... n_alter_secret.cpp:31: [72057594046678944] TAlterSecret TPropose operationId# 103:0 ProgressState 2026-01-07T17:31:06.988151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-07T17:31:06.988290Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:31:06.989406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-07T17:31:06.989592Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter), operation: ALTER SECRET, path: /MyRoot/dir/test-secret 2026-01-07T17:31:06.990702Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-07T17:31:06.990846Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-07T17:31:06.991153Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:31:06.991286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 12884904046 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:31:06.991344Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_secret.cpp:44: [72057594046678944] TAlterSecret TPropose operationId# 103:0HandleReply TEvOperationPlan: step# 5000040 2026-01-07T17:31:06.991470Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-07T17:31:06.991670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:31:06.993502Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:31:06.993555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:31:06.993718Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:31:06.993761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 103, path id: 39 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:31:06.994082Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:31:06.994138Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:31:06.994263Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:31:06.994296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:31:06.994336Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:31:06.994385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:31:06.994429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:31:06.994471Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:31:06.994508Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:31:06.994540Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:31:06.994616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:31:06.994658Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-07T17:31:06.994699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-07T17:31:06.995285Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:31:06.995382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:31:06.995437Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:31:06.995476Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:31:06.995523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:31:06.995610Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:31:06.998256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2026-01-07T17:31:06.998511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:31:06.998550Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2026-01-07T17:31:06.998626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:31:06.998645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:31:06.999069Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:31:06.999160Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:31:06.999200Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:757:2746] 2026-01-07T17:31:06.999289Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:31:06.999401Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:31:06.999424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:757:2746] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2026-01-07T17:31:06.999856Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-07T17:31:07.000055Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 240us result status StatusSuccess 2026-01-07T17:31:07.000395Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-new" Version: 1 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> PrivateApi::PingTask >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink >> TContinuousBackupWithRebootsTests::Basic [GOOD] >> Yq_1::CreateQuery_With_Idempotency >> Yq_1::DescribeJob >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> Yq_1::DescribeConnection |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] [GOOD] |83.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> Yq_1::ModifyConnections >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |83.2%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] [GOOD] |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |83.2%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |83.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> Yq_1::Basic_Null >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> Yq_1::DeleteConnections >> Yq_1::Basic >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService [GOOD] >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] >> TTicketParserTest::LoginBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::Basic [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:113:2144] 2026-01-07T17:30:27.009981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:27.010136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:27.010183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:27.010217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:27.010254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:27.010283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:27.010338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:27.010403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:27.011236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:27.011544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:27.290891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:30:27.290981Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:27.291952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:27.305373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:27.310554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:27.310757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:27.318958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:27.319309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:27.320002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:27.320248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:27.324231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:27.324431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:27.325694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:27.325761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:27.325915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:27.325963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:27.326005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:27.326140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:27.526903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.527983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.528841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.563163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.563371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.563461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:27.563539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... _SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:31:08.669809Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 6 2026-01-07T17:31:08.669850Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:31:08.670430Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:31:08.670476Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:31:08.670506Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-07T17:31:08.670540Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:31:08.670616Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-07T17:31:08.670867Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:31:08.670899Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:31:08.670932Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-07T17:31:08.671960Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:31:08.679009Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:31:08.679137Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:31:08.679315Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:31:08.679625Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:31:08.684111Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:31:08.684761Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:31:08.684830Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:31:08.685023Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-07T17:31:08.685082Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:31:08.685146Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-07T17:31:08.685197Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:31:08.685248Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-07T17:31:08.685308Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:31:08.685372Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:31:08.685421Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:31:08.685585Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:31:08.685639Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-07T17:31:08.685668Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-07T17:31:08.685707Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:31:08.685736Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-07T17:31:08.685766Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-07T17:31:08.685834Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:31:08.686373Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:31:08.686629Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:31:08.686691Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:31:08.686800Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:31:08.686867Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:31:08.686915Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:31:08.687184Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:31:08.687238Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:31:08.691067Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:31:08.691515Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:31:08.691579Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:31:08.692220Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:31:08.692353Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:31:08.692404Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [16:1158:3074] TestWaitNotification: OK eventTxId 104 2026-01-07T17:31:08.693234Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:31:08.693519Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 342us result status StatusPathDoesNotExist 2026-01-07T17:31:08.693726Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:31:08.694396Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:31:08.694639Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 275us result status StatusPathDoesNotExist 2026-01-07T17:31:08.694821Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups >> Yq_1::CreateConnection_With_Existing_Name |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/00293f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_dynconfig/audit_log.y8b8j4im.txt 2026-01-07T17:30:50.406464Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |83.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2026-01-07T17:30:51.539500Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673472020461672:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:51.550687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:51.814671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:51.814782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:51.872438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:51.875028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:51.929766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:52.002819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:52.002847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:52.002856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:52.002950Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:52.120821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:52.268309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:52.275012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:30:52.281940Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket D604657B533D38C809BB5AE3A8AC254643B59CAB62B3D7F04437F4608E177EDE () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-07T17:30:55.891824Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673490112152720:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:55.915414Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:55.948355Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:56.135755Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673490112152676:2081] 1767807055863021 != 1767807055863024 2026-01-07T17:30:56.153986Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:56.154070Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:56.154602Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:56.155725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:56.294355Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:56.366242Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:30:56.366324Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:30:56.401718Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:56.401744Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:56.401753Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:56.401852Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:56.489855Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:56.626227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:56.632710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:30:56.635034Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket CD08328347B32EAA054B323C0198F583872B52BB8F1E703486BD3807AD6E0720 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2026-01-07T17:30:56.635613Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket CD08328347B32EAA054B323C0198F583872B52BB8F1E703486BD3807AD6E0720: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2026-01-07T17:30:59.788513Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592673504342550481:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:59.788676Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:59.822675Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:30:59.972045Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:59.973140Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:59.973534Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:59.977078Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673504342550374:2081] 1767807059779225 != 1767807059779228 2026-01-07T17:30:59.983900Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:59.995059Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:31:00.007647Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:31:00.062001Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:00.089501Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:00.089527Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:00.089536Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:00.089610Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:00.167622Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:00.292761Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:00.301240Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 000A35AE0160E6B03977056FC8B6FBB54700C377506D3563400ED02F2A0F2DD5 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-07T17:31:00.301802Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 000A35AE0160E6B03977056FC8B6FBB54700C377506D3563400ED02F2A0F2DD5: Cannot create token from certificate. Client certificate failed verification 2026-01-07T17:31:03.519268Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592673522552614242:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:03.519340Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:03.601872Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:03.617934Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:03.618974Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592673522552614217:2081] 1767807063518420 != 1767807063518423 2026-01-07T17:31:03.656636Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:03.656722Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:03.663233Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:03.718776Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:03.718795Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:03.718808Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:03.718893Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:03.892697Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:03.927627Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:03.939729Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-07T17:31:03.939817Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb2593cbad0] Connect to grpc://localhost:11519 2026-01-07T17:31:03.943616Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb2593cbad0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } 0: "OK" 2026-01-07T17:31:03.958853Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb2593cbad0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } } 2026-01-07T17:31:03.959666Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1292: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-07T17:31:03.959775Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:03.961545Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-07T17:31:03.961819Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb2593cbad0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } 0: "OK" 2026-01-07T17:31:03.963880Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb2593cbad0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } } 2026-01-07T17:31:03.964027Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1292: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-07T17:31:03.964084Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2026-01-07T17:31:07.332551Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673540993726832:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:07.345356Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:07.360745Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:07.455333Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:07.466938Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:07.467023Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:07.481382Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:07.586068Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:07.652671Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:07.652692Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:07.652701Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:07.652781Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:07.894208Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:07.905292Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-07T17:31:07.905360Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb2593bec50] Connect to grpc://localhost:1506 2026-01-07T17:31:07.906315Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb2593bec50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-07T17:31:07.918203Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb2593bec50] Status 14 Service Unavailable 2026-01-07T17:31:07.918826Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:31:07.918852Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:31:07.918882Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:07.918965Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-07T17:31:07.919461Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb2593bec50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2026-01-07T17:31:07.922636Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb2593bec50] Status 1 CANCELLED 2026-01-07T17:31:07.922933Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2026-01-07T17:31:07.922957Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2026-01-07T17:31:07.922977Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |83.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V2 >> Initializer::Simple >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService [GOOD] >> TTicketParserTest::AccessServiceAuthenticationOk >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationUnsupported >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::LoginCheckRemovedUser >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2026-01-07T17:30:52.849381Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673477973364020:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:52.849444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:53.396040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:53.416093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:53.416211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:53.418139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:53.533996Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673477973363908:2081] 1767807052834204 != 1767807052834207 2026-01-07T17:30:53.598483Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:53.643684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:53.653932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:53.653954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:53.653960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:53.654059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:53.857171Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:53.948337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:53.975361Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket 93B2A2492D3B4597B0B7057CEC5AE4E31EF911E4BDD51F64000B4D1311EFB8A6 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-07T17:30:57.524499Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673497321975351:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:57.524977Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:57.569556Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:30:57.717841Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:57.717923Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:57.718146Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:57.720200Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673497321975139:2081] 1767807057430926 != 1767807057430929 2026-01-07T17:30:57.735362Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:57.737819Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:57.946035Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:57.946051Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:57.946056Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:57.946137Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:57.957846Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:58.280161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:58.287241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:30:58.290244Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket 1D1415D131E42D681BCA18EBD47A2D4781AB2B2143277D416A681F7CF5C6B3B4 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-07T17:31:01.834929Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592673516088045823:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:01.834971Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:01.875438Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:01.934894Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:01.988552Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:01.988631Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:01.993918Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:02.056746Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:02.056772Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:02.056779Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:02.056884Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:02.104350Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:02.303975Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:02.311973Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket D2975AB8EFEC3A43D3AF83AB50DE2D682D58E1183A055CE1B03B93154FF53917 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-07T17:31:02.312695Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket D2975AB8EFEC3A43D3AF83AB50DE2D682D58E1183A055CE1B03B93154FF53917: Cannot create token from certificate. Client certificate failed verification 2026-01-07T17:31:05.848986Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592673529833773600:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:05.851249Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:05.854815Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:05.961741Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:05.963419Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:05.968709Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592673529833773433:2081] 1767807065842825 != 1767807065842828 2026-01-07T17:31:05.974808Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:05.974888Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:05.993278Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:06.065226Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:06.065249Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:06.065261Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:06.065333Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:06.212716Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:06.268463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:06.285593Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket B1322F56EF13FE8A79188B4599A75950FE2DEB91097F30F863330055D1CC3E55 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-07T17:31:10.281471Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673552492493320:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:10.282437Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:10.333113Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.465888Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:10.469539Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592673552492493285:2081] 1767807070244416 != 1767807070244419 2026-01-07T17:31:10.481327Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:10.481417Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:10.503085Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:10.517002Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:10.644203Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:10.644237Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:10.644246Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:10.644321Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:10.934578Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:10.946447Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 8F43504CAEEB761C9FFA6E5DEB88A3F97312A105EF5A43F929C44D1CFFC13184 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-07T17:31:10.947320Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 8F43504CAEEB761C9FFA6E5DEB88A3F97312A105EF5A43F929C44D1CFFC13184: Cannot create token from certificate. Client certificate failed verification |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |83.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> LocalPartitionReader::Booting >> LocalPartitionReader::Booting [GOOD] >> TSubscriberTest::NotifyUpdate |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TSubscriberTest::NotifyUpdate [GOOD] >> TSubscriberTest::ReconnectOnFailure |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] |83.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> LocalPartitionReader::Retries >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> LocalPartitionReader::Retries [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Retries [GOOD] |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2026-01-07T17:31:17.574819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:17.577171Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-07T17:31:17.577312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-07T17:31:17.577362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-07T17:31:17.577427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-07T17:31:17.577525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-07T17:31:17.577574Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:17.577642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-07T17:31:17.577690Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:17.578290Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2026-01-07T17:31:17.578364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2026-01-07T17:31:17.578423Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.058681Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][4:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:18.059793Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-07T17:31:18.059984Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-07T17:31:18.060062Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-07T17:31:18.060167Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-07T17:31:18.060246Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-07T17:31:18.060283Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][4:36:2066][path] Set up state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.060380Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-07T17:31:18.060431Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.060898Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-07T17:31:18.060989Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.061033Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-07T17:31:18.061088Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.061248Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-07T17:31:18.061278Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.072596Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-07T17:31:18.072738Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-07T17:31:18.072808Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.072951Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:48:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-07T17:31:18.073015Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:49:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-07T17:31:18.073079Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-07T17:31:18.073137Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.073207Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-07T17:31:18.073246Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:18.073813Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2026-01-07T17:31:18.073914Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [4:37:2066] 2026-01-07T17:31:18.073981Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][4:36:2066][path] Update to strong state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot_isolation/unittest >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:25:50.287501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:25:50.434316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:25:50.469129Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:25:50.469577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:25:50.469630Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:25:51.003238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:25:51.003364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:25:51.382329Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767806740755148 != 1767806740755152 2026-01-07T17:25:51.396347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:25:51.445664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:25:51.559074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:25:51.985117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:25:52.001623Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:25:52.444812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2748], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:52.445034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:52.445495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:883:2753], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:52.445572Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:25:52.458471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:25:52.503093Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:25:52.504564Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:25:52.504884Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:912:2779] 2026-01-07T17:25:52.505146Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:25:52.563734Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:25:52.564632Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:25:52.564753Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:25:52.566329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:25:52.566419Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:25:52.566488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:25:52.566951Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:25:52.567090Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:25:52.567191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2779] in generation 1 2026-01-07T17:25:52.567646Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:25:52.609296Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:25:52.609528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:25:52.609662Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:930:2789] 2026-01-07T17:25:52.609704Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:25:52.609741Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:25:52.609773Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:25:52.610024Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:912:2779], Recipient [1:912:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:25:52.610075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:25:52.610475Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:25:52.610589Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:25:52.610651Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:25:52.610694Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:25:52.610746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:25:52.610795Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:25:52.610837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:25:52.610869Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:25:52.610906Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:25:52.611009Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:915:2780], Recipient [1:912:2779]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:25:52.611050Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:25:52.611089Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:910:2777], serverId# [1:915:2780], sessionId# [0:0:0] 2026-01-07T17:25:52.611181Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:915:2780] 2026-01-07T17:25:52.611241Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:25:52.611333Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:25:52.611580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:25:52.611645Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:25:52.611742Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:25:52.611782Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:25:52.611817Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:25:52.611847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:25:52.611880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:25:52.612213Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:25:52.612261Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:25:52.612297Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:25:52.612344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:25:52.612401Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:25:52.612428 ... 86224037888 read iterator# {[19:1126:2934], 0} exhausted 2026-01-07T17:31:15.890797Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:31:15.890831Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:31:15.890862Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:31:15.890888Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:31:15.890946Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:31:15.890973Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:31:15.891021Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-07T17:31:15.891107Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:31:15.891307Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:31:16.190329Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [19:1136:2944], Recipient [19:919:2782]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:31:16.190465Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:31:16.190557Z node 19 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [19:1135:2943], serverId# [19:1136:2944], sessionId# [0:0:0] 2026-01-07T17:31:16.195079Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [19:1134:2942], Recipient [19:919:2782]: NKikimrDataEvents.TEvWrite TxId: 0 TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } LockMode: OPTIMISTIC_SNAPSHOT_ISOLATION 2026-01-07T17:31:16.195177Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-07T17:31:16.195464Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [19:919:2782], Recipient [19:919:2782]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:31:16.195515Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:31:16.195648Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-07T17:31:16.195879Z node 19 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxId: 0 TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } LockMode: OPTIMISTIC_SNAPSHOT_ISOLATION 2026-01-07T17:31:16.196042Z node 19 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-07T17:31:16.196192Z node 19 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-07T17:31:16.196333Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckWrite 2026-01-07T17:31:16.196565Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:31:16.196654Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckWrite 2026-01-07T17:31:16.196727Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:31:16.197539Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:31:16.197648Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-07T17:31:16.197744Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-07T17:31:16.197815Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:31:16.197855Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:31:16.197891Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T17:31:16.197932Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BlockFailPoint 2026-01-07T17:31:16.197958Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:31:16.197982Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T17:31:16.198006Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteWrite 2026-01-07T17:31:16.198031Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteWrite 2026-01-07T17:31:16.198102Z node 19 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:7] at 72075186224037888 2026-01-07T17:31:16.198207Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-07T17:31:16.198375Z node 19 :TX_DATASHARD TRACE: datashard_kqp.cpp:834: KqpCommitLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2026-01-07T17:31:16.198456Z node 19 :TX_DATASHARD TRACE: datashard_user_db.cpp:517: Committing changes lockId# 281474976715661 in localTid# 1001 shard# 72075186224037888 2026-01-07T17:31:16.198588Z node 19 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:7] at 72075186224037888 2026-01-07T17:31:16.198665Z node 19 :TX_DATASHARD TRACE: locks.cpp:194: Lock 281474976715662 marked broken at v{min} 2026-01-07T17:31:16.198899Z node 19 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T17:31:16.198991Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:31:16.199037Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T17:31:16.199094Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T17:31:16.199170Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:31:16.199274Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:31:16.199324Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T17:31:16.199382Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:31:16.199441Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:31:16.199495Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:31:16.199522Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:31:16.199559Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-07T17:31:16.200434Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:31:16.200521Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:31:16.200608Z node 19 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 7 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T17:31:16.200729Z node 19 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:31:16.201426Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553218, Sender [19:1126:2934], Recipient [19:919:2782]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 1 MaxRows: 1 MaxBytes: 4294967295 2026-01-07T17:31:16.203317Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3660: 72075186224037888 ReadAck for read iterator# {[19:1126:2934], 0}: { ReadId: 0 SeqNo: 1 MaxRows: 1 MaxBytes: 4294967295 }, read continued, bytesLeft# 4294967295, rowsLeft# 1 2026-01-07T17:31:16.203499Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553217, Sender [19:919:2782], Recipient [19:919:2782]: NKikimr::TEvDataShard::TEvReadContinue 2026-01-07T17:31:16.203624Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3081: 72075186224037888 ReadContinue for iterator# {[19:1126:2934], 0}, firstUnprocessedQuery# 0 2026-01-07T17:31:16.203754Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3174: 72075186224037888 ReadContinue: iterator# {[19:1126:2934], 0}, FirstUnprocessedQuery# 0 2026-01-07T17:31:16.204080Z node 19 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3277: 72075186224037888 read iterator# {[19:1126:2934], 0} TTxReadContinue::Execute() found broken lock# 281474976715662 2026-01-07T17:31:16.204397Z node 19 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3321: 72075186224037888 read iterator# {[19:1126:2934], 0} TTxReadContinue::Execute() finished with error, aborting: ReadId: 0 SeqNo: 2 Status { Code: ABORTED Issues { message: "Read conflict with concurrent transaction" severity: 1 } } BrokenTxLocks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 18446744073709551615 SchemeShard: 72057594046644480 PathId: 38 } NodeId: 19 |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> LocalPartitionReader::Simple >> LocalPartitionReader::Simple [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> TSubscriberCombinationsTest::CombinationsRootDomain >> TSubscriberTest::SyncPartial >> TSubscriberTest::NotifyDelete >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TSubscriberSyncQuorumTest::OneRingGroup >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> KqpLimits::WaitCAsStateOnAbort >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> TMLPWriterTests::WriteOneMessage [GOOD] >> TSubscriberSyncQuorumTest::OneRingGroup [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch [GOOD] >> TSubscriberTest::SyncPartial [GOOD] >> TMLPWriterTests::WriteTwoMessage_OnePartition >> TSubscriberTest::NotifyDelete [GOOD] >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest >> TSubscriberTest::SyncWithOutdatedReplica >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> TSubscriberTest::InvalidNotification >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TSubscriberTest::InvalidNotification [GOOD] >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |83.3%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.3%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] |83.4%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TTicketParserTest::AuthenticationRetryError >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V2 [GOOD] >> KqpExplain::Explain >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V1 >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately |83.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2026-01-07T17:31:22.214602Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:22.216864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2026-01-07T17:31:22.216997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2026-01-07T17:31:22.217036Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2026-01-07T17:31:22.217114Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2067] 2026-01-07T17:31:22.217171Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:39:2067] 2026-01-07T17:31:22.217218Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:37:2067][path] Set up state: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.217296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:40:2067] 2026-01-07T17:31:22.217351Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.217642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2026-01-07T17:31:22.217709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2026-01-07T17:31:22.217763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2067] 2026-01-07T17:31:22.217835Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:37:2067][path] Path was updated to new version: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.217930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:39:2067] 2026-01-07T17:31:22.217977Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.697159Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:22.697869Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-07T17:31:22.697934Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-07T17:31:22.697977Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-07T17:31:22.698038Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2066] 2026-01-07T17:31:22.698095Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:38:2066] 2026-01-07T17:31:22.698156Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:36:2066][path] Set up state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.698214Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:39:2066] 2026-01-07T17:31:22.698270Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:36:2066][path] Ignore empty state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.698392Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] 2026-01-07T17:31:22.698431Z node 3 :SCHEME_BOARD_SUBSCRIBER ERROR: subscriber.cpp:818: [main][3:36:2066][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization |83.4%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |83.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-07T17:31:22.370308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:22.372409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-07T17:31:22.372510Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-07T17:31:22.372572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2026-01-07T17:31:22.372832Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2026-01-07T17:31:22.372991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-07T17:31:22.373090Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-07T17:31:22.373151Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.373213Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-07T17:31:22.373249Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.373369Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2026-01-07T17:31:22.373444Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2026-01-07T17:31:22.373487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2026-01-07T17:31:22.373615Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-07T17:31:22.373659Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-07T17:31:22.373794Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2026-01-07T17:31:22.373848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:31:22.373896Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2026-01-07T17:31:22.373932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:31:22.373996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2026-01-07T17:31:22.374021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2026-01-07T17:31:22.374063Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-07T17:31:22.374118Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:24339059:0] whose ring group state is: 0 2026-01-07T17:31:22.374239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2026-01-07T17:31:22.374374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2026-01-07T17:31:22.374431Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2026-01-07T17:31:22.374485Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2026-01-07T17:31:22.374524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:31:22.374642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2026-01-07T17:31:22.374719Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:20:2066], cookie# 12346 2026-01-07T17:31:22.374756Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-07T17:31:22.374788Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:19:2066][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-07T17:31:22.374846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-07T17:31:22.374891Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.374936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12346 2026-01-07T17:31:22.374960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:21:2066], cookie# 12346, current cookie# 0 ... waiting for initial path lookups 2026-01-07T17:31:22.652028Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[2:3298559222387:0], [2:4398070850163:0], [2:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-07T17:31:22.653323Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2026-01-07T17:31:22.653423Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2026-01-07T17:31:22.653477Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] 2026-01-07T17:31:22.653515Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:13:2060] 2026-01-07T17:31:22.653551Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:14:2061] 2026-01-07T17:31:22.653587Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr:: ... p# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-07T17:31:22.656501Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:31:22.656526Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [2:32:2075], cookie# 12346 2026-01-07T17:31:22.656544Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-07T17:31:22.656562Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-07T17:31:22.656610Z node 2 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][2:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-07T17:31:22.656670Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:32:2075] 2026-01-07T17:31:22.656731Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.656793Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:33:2075], cookie# 12346 2026-01-07T17:31:22.656817Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:28:2075][TestPath] Sync cookie mismatch: sender# [2:33:2075], cookie# 12346, current cookie# 0 ... waiting for initial path lookups 2026-01-07T17:31:22.952606Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2026-01-07T17:31:22.953387Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-07T17:31:22.953467Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-07T17:31:22.953516Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2026-01-07T17:31:22.953856Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2026-01-07T17:31:22.954014Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-07T17:31:22.954110Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-07T17:31:22.954185Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.954280Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-07T17:31:22.954330Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.954433Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2026-01-07T17:31:22.954512Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2026-01-07T17:31:22.954561Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2026-01-07T17:31:22.954675Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-07T17:31:22.954741Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-07T17:31:22.954906Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2026-01-07T17:31:22.954970Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:31:22.955013Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2026-01-07T17:31:22.955056Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:31:22.955121Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2026-01-07T17:31:22.955157Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2026-01-07T17:31:22.955215Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-07T17:31:22.955279Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:24339059:0] whose ring group state is: 0 2026-01-07T17:31:22.955426Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2026-01-07T17:31:22.955574Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2026-01-07T17:31:22.955842Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2026-01-07T17:31:22.955915Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2026-01-07T17:31:22.955983Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:31:22.956117Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12346 2026-01-07T17:31:22.956205Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2026-01-07T17:31:22.956244Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-07T17:31:22.956284Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-07T17:31:22.956356Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-07T17:31:22.956415Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.956475Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2026-01-07T17:31:22.956509Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:30:2075], cookie# 12346, current cookie# 0 |83.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |83.4%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2026-01-07T17:31:21.726812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:21.729449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-07T17:31:21.729596Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-07T17:31:21.729636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-07T17:31:21.729697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-07T17:31:21.729791Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-07T17:31:21.729839Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:21.729900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-07T17:31:21.729955Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:21.730185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 1 2026-01-07T17:31:21.730332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2026-01-07T17:31:21.730374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2026-01-07T17:31:21.730447Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 1 2026-01-07T17:31:21.730606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:6:2053], cookie# 1 2026-01-07T17:31:21.730648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 1 2026-01-07T17:31:21.730702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 1 2026-01-07T17:31:21.730735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:31:21.730780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-07T17:31:21.730829Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:21.730877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:38:2066], cookie# 1 2026-01-07T17:31:21.730896Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-01-07T17:31:21.730914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 1 2026-01-07T17:31:21.730941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:36:2066][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-07T17:31:21.731045Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 2 2026-01-07T17:31:21.731111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 2 2026-01-07T17:31:21.731131Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:31:21.731151Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 2 2026-01-07T17:31:21.731177Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 2 2026-01-07T17:31:21.731287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 2 2026-01-07T17:31:21.731380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 2 2026-01-07T17:31:21.731403Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-07T17:31:21.731437Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 2 2026-01-07T17:31:21.731515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-07T17:31:21.731563Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:21.731614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 2 2026-01-07T17:31:21.731638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 2, current cookie# 0 2026-01-07T17:31:21.731714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 3 2026-01-07T17:31:21.731824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 3 2026-01-07T17:31:21.731852Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:31:21.731901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 3 2026-01-07T17:31:21.731946Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-07T17:31:21.731974Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 3 2026-01-07T17:31:21.732016Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 3 2026-01-07T17:31:21.732142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:39:2066], cookie# 3 2026-01-07T17:31:21.732166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 3, current cookie# 0 2026-01-07T17:31:21.732221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-07T17:31:21.732268Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.210027Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:22.210732Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:3:2050] 2026-01-07T17:31:22.210815Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:6:2053] 2026-01-07T17:31:22.210857Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:9:2056] 2026-01-07T17:31:22.210938Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:38:2067] 2026-01-07T17:31:22.211006Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:39:2067] 2026-01-07T17:31:22.211063Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.211183Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:40:2067] 2026-01-07T17:31:22.211248Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.211371Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2026-01-07T17:31:22.211494Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2026-01-07T17:31:22.211566Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2026-01-07T17:31:22.211623Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2026-01-07T17:31:22.211705Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2026-01-07T17:31:22.211757Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2026-01-07T17:31:22.211792Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2026-01-07T17:31:22.211854Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2026-01-07T17:31:22.211909Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:31:22.211952Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2026-01-07T17:31:22.211992Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:31:22.212070Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2026-01-07T17:31:22.212108Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] Test command err: ... waiting for initial path lookups 2026-01-07T17:31:21.794927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:21.796513Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:23:2066] 2026-01-07T17:31:21.796571Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:4:2051] Upsert description: path# TestPath 2026-01-07T17:31:21.796684Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:4:2051] Subscribe: subscriber# [1:23:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:21.796825Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:24:2066] 2026-01-07T17:31:21.796842Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:5:2052] Upsert description: path# TestPath 2026-01-07T17:31:21.796885Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:5:2052] Subscribe: subscriber# [1:24:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:21.797014Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:25:2066] 2026-01-07T17:31:21.797053Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# TestPath 2026-01-07T17:31:21.797090Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:25:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:21.797149Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-07T17:31:21.797206Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:23:2066] 2026-01-07T17:31:21.797268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-07T17:31:21.797316Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:24:2066] 2026-01-07T17:31:21.797354Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-07T17:31:21.797397Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:25:2066] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2026-01-07T17:31:21.797691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2026-01-07T17:31:21.797832Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-07T17:31:21.797918Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-07T17:31:21.797982Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:21.798043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-07T17:31:21.798083Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:21.798210Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2026-01-07T17:31:21.798261Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12345 2026-01-07T17:31:21.798348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2026-01-07T17:31:21.798394Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12345 2026-01-07T17:31:21.798434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2026-01-07T17:31:21.798509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-07T17:31:21.798552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-07T17:31:21.798693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2026-01-07T17:31:21.798752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:31:21.798802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2026-01-07T17:31:21.798839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:31:21.798883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2026-01-07T17:31:21.798905Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2026-01-07T17:31:21.798944Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-07T17:31:21.798982Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Updating cluster state generation on replica: [1:24339059:0] 2026-01-07T17:31:21.799101Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2026-01-07T17:31:21.799205Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:1240: [1:4:2051] Handle {EvUpdateGroupConfig GroupConfig: empty BoardConfig: empty SchemeBoardConfig: {RingGroups# [0:{NToSelect# 3 Rings# [0:{[[1:24339059:0]]} 1:{[[1:1099535966835:0]]} 2:{[[1:2199047594611:0]]}}] StateStorageVersion# 0 CompatibleVersions# [] ClusterStateGeneration# 1 ClusterStateGuid# 0}} 2026-01-07T17:31:21.799312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2026-01-07T17:31:21.799351Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12346 2026-01-07T17:31:21.799394Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2026-01-07T17:31:21.799415Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12346 2026-01-07T17:31:21.799445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2026-01-07T17:31:21.799479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:31:21.799553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { Generation: 1 Guid: 0 } }: sender# [1:4:2051], cookie# 12346 2026-01-07T17:31:21.799582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2026-01-07T17:31:21.799640Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { Generation: 1 Guid: 0 } }: sender# [1:20:2066], cookie# 12346 2026-01-07T17:31:21.799706Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:952: [main][1:19:2066][TestPath] Cluster State mismatch in sync version response: sender# [1:20:2066], cookie# 12346, subscriber cluster state# {Generation: 0, GUID: 0}, replica cluster state# {Generation: 1 Guid: 0} 2026-01-07T17:31:21.799746Z node 1 :SCHEME_BOARD_S ... [TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:22.340893Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-07T17:31:22.341035Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-07T17:31:22.341089Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-07T17:31:22.341306Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:18:2065], cookie# 12345 ... waiting for some sync responses 2026-01-07T17:31:22.341384Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:20:2066] 2026-01-07T17:31:22.341454Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:21:2066] 2026-01-07T17:31:22.341511Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.341557Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:22:2066] 2026-01-07T17:31:22.341619Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.341722Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:20:2066], cookie# 12345 2026-01-07T17:31:22.341794Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:21:2066], cookie# 12345 2026-01-07T17:31:22.341837Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:22:2066], cookie# 12345 2026-01-07T17:31:22.341909Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-07T17:31:22.341985Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-07T17:31:22.342027Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... waiting for some sync responses (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-07T17:31:22.342277Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:20:2066], cookie# 12345 2026-01-07T17:31:22.342325Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:31:22.342461Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:22.342517Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:1041: [main][3:19:2066][TestPath] Delay current sync request: 12345 ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-07T17:31:22.342984Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:21:2066], cookie# 12345 2026-01-07T17:31:22.343022Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:21:2066], cookie# 12345, current cookie# 0 2026-01-07T17:31:22.343065Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:22:2066], cookie# 12345 2026-01-07T17:31:22.343086Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:22:2066], cookie# 12345, current cookie# 0 2026-01-07T17:31:22.343534Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-07T17:31:22.343595Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-07T17:31:22.343635Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] 2026-01-07T17:31:22.343693Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:27:2066] 2026-01-07T17:31:22.343757Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:28:2066] 2026-01-07T17:31:22.343825Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.343878Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2066] 2026-01-07T17:31:22.343916Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:22.343996Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:27:2066], cookie# 12345 2026-01-07T17:31:22.344093Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:28:2066], cookie# 12345 2026-01-07T17:31:22.344146Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2066], cookie# 12345 2026-01-07T17:31:22.344203Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-07T17:31:22.344252Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-07T17:31:22.344294Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 2026-01-07T17:31:22.344377Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:27:2066], cookie# 12345 2026-01-07T17:31:22.344414Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:31:22.344494Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:28:2066], cookie# 12345 2026-01-07T17:31:22.344538Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:31:22.344590Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2066], cookie# 12345 2026-01-07T17:31:22.344613Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:29:2066], cookie# 12345, current cookie# 0 |83.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |83.4%| [TM] {RESULT} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> KqpParams::RowsList >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> KqpQuery::PreparedQueryInvalidate >> KqpQuery::SelectWhereInSubquery |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> KqpExplain::UpdateSecondaryConditional-UseSink >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpStats::SysViewClientLost |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> KqpExplain::UpdateConditional-UseSink >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> KqpTypes::QuerySpecialTypes >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> KqpQuery::UdfTerminate >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink >> KqpQuery::QueryTimeout >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2026-01-07T17:30:51.971798Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673471669398970:2136];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:51.983434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:52.321219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:52.371406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:52.371470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:52.421135Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:52.432434Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673471669398865:2081] 1767807051967144 != 1767807051967147 2026-01-07T17:30:52.439225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:52.577831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:52.577848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:52.577854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:52.577933Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:52.590601Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:52.836363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:52.846220Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:30:52.846283Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c88db6003d0] Connect to grpc://localhost:15940 2026-01-07T17:30:52.850301Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db6003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:30:52.865130Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c88db6003d0] Status 14 Service Unavailable 2026-01-07T17:30:52.865943Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:30:52.865991Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:30:52.866088Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:30:52.866363Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db6003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:30:52.875776Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c88db6003d0] Status 14 Service Unavailable 2026-01-07T17:30:52.877821Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:30:52.877857Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:30:52.983429Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:53.993062Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-07T17:30:53.993189Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:30:53.993477Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db6003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:30:54.000913Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c88db6003d0] Status 14 Service Unavailable 2026-01-07T17:30:54.001331Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:30:54.001359Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:30:55.002657Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-07T17:30:55.002768Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:30:55.003047Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db6003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:30:55.006832Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c88db6003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:30:55.007309Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-07T17:30:56.969805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673471669398970:2136];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:56.969878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:05.581455Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673531547138184:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:05.581568Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:05.620819Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:05.663496Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673531547138133:2081] 1767807065579104 != 1767807065579107 2026-01-07T17:31:05.689447Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:05.724921Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:05.725022Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:05.727272Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:05.787450Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:05.789553Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:05.789578Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:05.789586Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:05.789648Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:06.052265Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:06.057897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:06.062696Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:06.062753Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c88db688850] Connect to grpc://localhost:62431 2026-01-07T17:31:06.063802Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db688850] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:31:06.073797Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c88db688850] Status 14 Service Unavailable 2026-01-07T17:31:06.074745Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:31:06.074783Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:06.074849Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:06.0751 ... lkAuthorization( something.read something.write) 2026-01-07T17:31:15.703691Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db708450] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-07T17:31:15.708381Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c88db708450] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2026-01-07T17:31:15.708559Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-07T17:31:15.708635Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2026-01-07T17:31:15.742772Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:19.547450Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673592908031995:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:19.576993Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:19.588890Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:19.758272Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:19.760063Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:19.765027Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592673592908031862:2081] 1767807079483926 != 1767807079483929 2026-01-07T17:31:19.796885Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:19.797384Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:19.810142Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:19.940769Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:19.940794Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:19.940805Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:19.940905Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:20.043750Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:20.237247Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:20.243807Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:20.247515Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-07T17:31:20.247590Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c88db60b2d0] Connect to grpc://localhost:28783 2026-01-07T17:31:20.248860Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db60b2d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-07T17:31:20.277070Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c88db60b2d0] Status 14 Service Unavailable 2026-01-07T17:31:20.279013Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:31:20.279053Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:31:20.279087Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:20.279188Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-07T17:31:20.279524Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db60b2d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-07T17:31:20.282590Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c88db60b2d0] Status 1 CANCELLED 2026-01-07T17:31:20.285004Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2026-01-07T17:31:20.285036Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2026-01-07T17:31:20.285072Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-01-07T17:31:24.106806Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592673615157497560:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:24.106859Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:24.154759Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:24.228258Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:24.233200Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592673615157497508:2081] 1767807084096770 != 1767807084096773 2026-01-07T17:31:24.270403Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:24.270512Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:24.293406Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:24.387801Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:24.387829Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:24.387837Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:24.387923Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:24.405087Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:24.688318Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:24.709254Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:24.709317Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c88db69f7d0] Connect to grpc://localhost:2602 2026-01-07T17:31:24.710375Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db69f7d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:31:24.730506Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c88db69f7d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:24.733186Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:24.733887Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-07T17:31:24.734153Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c88db69f7d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-07T17:31:24.736346Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c88db69f7d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:24.736590Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |83.5%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 >> KqpExplain::Explain [GOOD] >> KqpExplain::ExplainDataQuery >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert [GOOD] >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::OlapTemporary >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2026-01-07T17:30:53.330555Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673482536228894:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:53.330615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:53.460438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:30:53.722225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:53.764986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:53.765133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:53.810167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:53.876150Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:53.878488Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673482536228871:2081] 1767807053329211 != 1767807053329214 2026-01-07T17:30:53.906884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:53.958785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:53.958808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:53.958819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:53.958915Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:54.184527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:54.192654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:30:54.273147Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 2026-01-07T17:30:54.282902Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:30:54.282958Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:30:54.283630Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****4QXA (55786BA6) () has now retryable error message 'Security state is empty' 2026-01-07T17:30:54.283857Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:30:54.283902Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:30:54.284112Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****4QXA (55786BA6) () has now retryable error message 'Security state is empty' 2026-01-07T17:30:54.284130Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-07T17:30:54.284142Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-07T17:30:54.284167Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket eyJh****4QXA (55786BA6): Security state is empty 2026-01-07T17:30:54.421323Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:55.408451Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****4QXA (55786BA6) 2026-01-07T17:30:55.409333Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:30:55.409363Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:30:55.409572Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****4QXA (55786BA6) () has now retryable error message 'Security state is empty' 2026-01-07T17:30:55.409585Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-07T17:30:57.285097Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:30:58.331005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673482536228894:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:58.331082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:58.417719Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****4QXA (55786BA6) 2026-01-07T17:30:58.417930Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:30:58.417945Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:30:58.418841Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****4QXA (55786BA6) () has now valid token of user1 2026-01-07T17:30:58.418858Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-07T17:31:01.420431Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****4QXA (55786BA6) 2026-01-07T17:31:01.420917Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****4QXA (55786BA6) () has now valid token of user1 2026-01-07T17:31:05.002960Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673527790658440:2216];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:05.003154Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:05.030892Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:05.141058Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673527790658261:2081] 1767807064976964 != 1767807064976967 2026-01-07T17:31:05.155536Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:05.163525Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:05.163611Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:05.173288Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:05.245497Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:05.245520Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:05.245527Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:05.245605Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:05.255390Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:05.448439Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:05.454622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:05.456335Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:05.456398Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5af09972d0] Connect to grpc://localhost:22201 2026-01-07T17:31:05.459163Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5af09972d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-01-07T17:31:05.478237Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5af09972d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-07T17:31:05.478822Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:08.722793Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592673544474629575:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:08.722911Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:08.802811Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:08.836270Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:08.840031Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673544474629516:2081] 1767807068710069 != 1767807068710072 2026-01-07T17:31:08.847170Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:08.847270Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 ... :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:09.025499Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:09.025532Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:09.025550Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:09.025661Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:09.380792Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:09.389706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:09.391412Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:09.391451Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5af0a105d0] Connect to grpc://localhost:30144 2026-01-07T17:31:09.392327Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5af0a105d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-07T17:31:09.401826Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5af0a105d0] Status 14 Service Unavailable 2026-01-07T17:31:09.403301Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:09.403340Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:09.403523Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5af0a105d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-07T17:31:09.408869Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5af0a105d0] Status 14 Service Unavailable 2026-01-07T17:31:09.409009Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:09.743691Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:10.744102Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-07T17:31:10.744149Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:10.744542Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5af0a105d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-07T17:31:10.754571Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5af0a105d0] Status 14 Service Unavailable 2026-01-07T17:31:10.757164Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:11.747087Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-07T17:31:11.747116Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:11.747487Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5af0a105d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-01-07T17:31:11.752947Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5af0a105d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-07T17:31:11.754268Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:13.725898Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673544474629575:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:13.725997Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:22.170603Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592673604714932865:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:22.180696Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:22.256250Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:22.263785Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:22.300034Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:22.300148Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:22.302199Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:22.331226Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:22.331245Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:22.331251Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:22.331327Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:22.492446Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:22.578090Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:22.585525Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:22.587941Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:22.588009Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5af093a6d0] Connect to grpc://localhost:1542 2026-01-07T17:31:22.589154Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5af093a6d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-01-07T17:31:22.603856Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5af093a6d0] Status 14 Service Unavailable NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 2026-01-07T17:31:22.604399Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' account { user_account { id: "user1" } } 02026-01-07T17:31:22.604430Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:22.604581Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5af093a6d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } : "" 2026-01-07T17:31:22.607361Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5af093a6d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-07T17:31:22.608135Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:23.189389Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:26.878981Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:26.884277Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:27.023577Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:27.058426Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:27.058537Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:27.072596Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:27.086583Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:27.349147Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:27.349184Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:27.349197Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:27.349294Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:27.666588Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:27.685459Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:27.690575Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:979: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V2 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> KqpParams::RowsList [GOOD] >> KqpParams::ParameterTypes >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> TTicketParserTest::AuthorizationModify [GOOD] >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::AuthorizationUnavailable >> TTicketParserTest::AuthenticationRetryErrorImmediately |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> KqpExplain::UpdateConditional-UseSink [GOOD] >> KqpExplain::UpdateConditionalKey+UseSink >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2026-01-07T17:31:06.772799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673535868789560:2203];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:06.773301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:07.151182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:07.197899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:07.198042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:07.326156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:07.335706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:07.338995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:07.477668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:07.477689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:07.477706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:07.477820Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:07.709180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:07.719500Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:07.719590Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb683af15d0] Connect to grpc://localhost:28941 2026-01-07T17:31:07.723248Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683af15d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-07T17:31:07.727226Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:07.745290Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683af15d0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:07.746577Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-07T17:31:07.748682Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb683af1cd0] Connect to grpc://localhost:13808 2026-01-07T17:31:07.749605Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683af1cd0] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-07T17:31:07.762298Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683af1cd0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-07T17:31:07.763195Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-07T17:31:10.879672Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673552425912009:2234];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:10.921009Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:10.980999Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:11.199669Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:11.199815Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:11.200112Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:11.203532Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673552425911793:2081] 1767807070815574 != 1767807070815577 2026-01-07T17:31:11.240990Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:11.263641Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:11.485475Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:11.485496Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:11.485502Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:11.485567Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:11.651488Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:11.809818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:11.817837Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:11.817879Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb683ba21d0] Connect to grpc://localhost:4304 2026-01-07T17:31:11.818598Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683ba21d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-07T17:31:11.844061Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb683ba21d0] Status 14 Service Unavailable 2026-01-07T17:31:11.845171Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:11.845214Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:11.845355Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683ba21d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-07T17:31:11.847848Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb683ba21d0] Status 1 CANCELLED 2026-01-07T17:31:11.849104Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-01-07T17:31:11.922288Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:15.789979Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592673574840843765:2124];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:15.790539Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:15.889589Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:15.996970Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:16.017104Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673574840843668:2081] 1767807075771955 != 1767807075771958 2026-01-07T17:31:16.022209Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:16.022289Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:16.064750Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:16.069027Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:16.229765Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:16.229795Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:16.229801Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:16.229880Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:16.487560Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:16.507371Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (8E120919): Token is not supported 2026-01-07T17:31:20.327797Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592673597822618949:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:20.328395Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:20.384790Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:20.471791Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:20.477253Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch ... ] Request AuthorizeRequest { iam_token: "**** (E2D1584C)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:25.524040Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb683ba0cd0] Status 16 Access Denied 2026-01-07T17:31:25.524308Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-07T17:31:25.524326Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2026-01-07T17:31:25.524808Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:25.524947Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683ba0cd0] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:25.526769Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb683ba0cd0] Status 16 Access Denied 2026-01-07T17:31:25.527012Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-07T17:31:25.527032Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2026-01-07T17:31:25.527522Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:25.527632Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683ba0cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2026-01-07T17:31:25.529042Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb683ba0cd0] Status 16 Access Denied 2026-01-07T17:31:25.529233Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-07T17:31:25.529248Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2026-01-07T17:31:25.529644Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:25.529727Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683ba0cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:25.530909Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683ba0cd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:25.531010Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-07T17:31:25.531053Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:25.531447Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:25.531528Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683ba0cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2026-01-07T17:31:25.532670Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683ba0cd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:25.532806Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-07T17:31:25.532861Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:25.533367Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2026-01-07T17:31:25.533502Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683ba0cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2026-01-07T17:31:25.534638Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683ba0cd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:25.534786Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2026-01-07T17:31:25.534850Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:25.535335Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (6968D2E8) asking for AccessServiceAuthorization(something.write) 2026-01-07T17:31:25.535422Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683ba0cd0] Request AuthorizeRequest { iam_token: "**** (6968D2E8)" permission: "something.write" resource_path { id: "123" type: "ydb.database" } resource_path { id: "folder" type: "resource-manager.folder" } } 2026-01-07T17:31:25.536586Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683ba0cd0] Response AuthorizeResponse { subject { service_account { id: "service1" } } } 2026-01-07T17:31:25.536689Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (6968D2E8) permission something.write now has a valid subject "service1@as" 2026-01-07T17:31:25.536723Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (6968D2E8) () has now valid token of service1@as 2026-01-07T17:31:25.711230Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:30.933144Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:30.943764Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:30.950044Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:30.953154Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592673640434723427:2081] 1767807090582191 != 1767807090582194 2026-01-07T17:31:30.972581Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:30.973170Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:31.031717Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:31.233308Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:31.297505Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:31.297527Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:31.297546Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:31.297636Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:31.713162Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:31.734376Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:31.741991Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:31.745052Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:31.745135Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb683b94550] Connect to grpc://localhost:12592 2026-01-07T17:31:31.745966Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683b94550] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:31.767915Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683b94550] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:31.768248Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-07T17:31:31.768338Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:31.769022Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:31.769072Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-07T17:31:31.769216Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683b94550] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:31.769751Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb683b94550] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:31.777758Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683b94550] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:31.777905Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb683b94550] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:31.778147Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-07T17:31:31.778188Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2026-01-07T17:31:31.778277Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> KqpExplain::UpdateSecondaryConditional-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] Test command err: 2026-01-07T17:31:06.701334Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673535727903809:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:06.704184Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:06.969899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:06.970030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:06.971650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:06.987066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:07.097550Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:07.181785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:07.188785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:07.188806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:07.188825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:07.188959Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:07.403375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:07.521377Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-07T17:31:07.521410Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-07T17:31:07.522019Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****k_2g (06C9D8BB)) 2026-01-07T17:31:07.716608Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:09.713402Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****k_2g (06C9D8BB) () has now permanent error message 'Login state is not available' 2026-01-07T17:31:09.713499Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2352: Finish waiting for login providers for 1 databases: /Root/Db1, 2026-01-07T17:31:10.684657Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673554338388403:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:10.697097Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:10.736286Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:10.829409Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:11.019325Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:11.019400Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:11.044349Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:11.045571Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:11.052975Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:11.056418Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673554338388277:2081] 1767807070650935 != 1767807070650938 2026-01-07T17:31:11.285576Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:11.285605Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:11.285615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:11.285714Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:11.591434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:11.602129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:11.643497Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:11.673453Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:11.868775Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:11.869008Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2026-01-07T17:31:11.869023Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:31:11.869146Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2026-01-07T17:31:11.869157Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2026-01-07T17:31:11.869179Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (5DAB89DE): Token is not in correct format 2026-01-07T17:31:15.779008Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592673576258209570:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:15.779056Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:15.843563Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:15.956082Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:15.956188Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:15.982694Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673576258209525:2081] 1767807075778248 != 1767807075778251 2026-01-07T17:31:16.028724Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:16.039502Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:16.109566Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:16.233494Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:16.233523Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:16.233530Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:16.233629Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:16.500180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:16.513524Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:16.601126Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:16.607482Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:31:16.607520Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:31:16.608468Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****kZHQ (EFCC7E9F) () has now valid token of user1 2026-01-07T17:31:16.608486Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-07T17:31:16.609511Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:16.790142Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:19.796711Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****kZHQ (EFCC7E9F) 2026-01-07T17:31:19.797050Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****kZHQ (EFCC7E9F) () has now permanent error message 'User not found' 2026-01-07T17:31:20.780456Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exist ... 23Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:27.763782Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592673628239225743:2081] 1767807087537360 != 1767807087537363 2026-01-07T17:31:27.776063Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:27.778750Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:27.778859Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:27.791626Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:27.944747Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:28.038371Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:28.038393Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:28.038402Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:28.038489Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:28.337567Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:28.345997Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:28.352095Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:28.352155Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7a2501b4d0] Connect to grpc://localhost:2408 2026-01-07T17:31:28.356434Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7a2501b4d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:31:28.366452Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7a2501b4d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:28.366728Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-07T17:31:28.368349Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7a2501b150] Connect to grpc://localhost:18831 2026-01-07T17:31:28.369321Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7a2501b150] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-07T17:31:28.377315Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7a2501b150] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-07T17:31:28.377781Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-07T17:31:28.378359Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.write) 2026-01-07T17:31:28.378521Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7a2501b4d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-07T17:31:28.380297Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7a2501b4d0] Response BulkAuthorizeResponse { results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2026-01-07T17:31:28.380514Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.write access denied for subject "" 2026-01-07T17:31:28.380550Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2026-01-07T17:31:28.381267Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-07T17:31:28.381457Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7a2501b4d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-07T17:31:28.382777Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7a2501b4d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:28.382930Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-07T17:31:28.383112Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-07T17:31:32.689840Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673647657890018:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:32.693267Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:32.728578Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:32.854913Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:32.855034Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:32.903597Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:32.917162Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592673647657889950:2081] 1767807092659543 != 1767807092659546 2026-01-07T17:31:32.948833Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:33.082643Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:33.109663Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:33.109684Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:33.109694Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:33.109782Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:33.410716Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:33.434225Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2026-01-07T17:31:33.434288Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7a24ff1bd0] Connect to grpc://localhost:6548 2026-01-07T17:31:33.435427Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7a24ff1bd0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2026-01-07T17:31:33.474934Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7a24ff1bd0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2026-01-07T17:31:33.475312Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2026-01-07T17:31:33.475344Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2026-01-07T17:31:33.475359Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2026-01-07T17:31:33.475376Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2026-01-07T17:31:33.475395Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-07T17:31:33.475571Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7a24ff7a50] Connect to grpc://localhost:1718 2026-01-07T17:31:33.476476Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7a24ff7a50] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-07T17:31:33.483526Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7a24ff7a50] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-07T17:31:33.485236Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TMLPWriterTests::WriteTwoMessage_OnePartition [GOOD] >> TMLPWriterTests::WriteTwoMessage_TwoPartition |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2026-01-07T17:31:05.850608Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673533150287638:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:05.858995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:05.880580Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:06.143471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:06.143569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:06.146786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:06.204764Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:06.213136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:06.324690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:06.324716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:06.324723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:06.324800Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:06.480601Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:06.557053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:06.567521Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:06.567599Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d6304ef0450] Connect to grpc://localhost:5682 2026-01-07T17:31:06.570193Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304ef0450] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-01-07T17:31:06.589103Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d6304ef0450] Status 14 Service Unavailable 2026-01-07T17:31:06.593069Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:06.593109Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-07T17:31:06.593261Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304ef0450] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-01-07T17:31:06.596777Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d6304ef0450] Status 14 Service Unavailable 2026-01-07T17:31:06.597172Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:09.906639Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673550506531678:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:09.916520Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:09.980961Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.179664Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:10.198605Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:10.198702Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:10.199189Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.229743Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:10.501594Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:10.501617Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:10.501623Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:10.501700Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:10.705975Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:10.862588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:10.876114Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-07T17:31:10.876203Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d6304fb1a50] Connect to grpc://localhost:3229 2026-01-07T17:31:10.877653Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304fb1a50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-07T17:31:10.894773Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d6304fb1a50] Status 14 Service Unavailable NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-07T17:31:10.897231Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:31:10.897263Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:10.897326Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-07T17:31:10.897566Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304fb1a50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2026-01-07T17:31:10.904441Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d6304fb1a50] Status 14 Service Unavailable 2026-01-07T17:31:10.904747Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:31:10.904781Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:10.909123Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-07T17:31:11.953333Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-07T17:31:11.953442Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-07T17:31:11.954078Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304fb1a50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2026-01-07T17:31:11.956175Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d6304fb1a50] Status 14 Service Unavailable 2026-01-07T17:31:11.956496Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-07T17:31:11.956518Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:12.954839Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-07T17:31:12.954895Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-07T17:31:12.955152Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304fb1a50] Request AuthorizeRequest { checks { key: 0 value { permission { ... 2: [7d6304ff6fd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "monitoring.view" } managed_resource_id: "folder" iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "monitoring.view" } managed_resource_id: "folder" iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-07T17:31:29.658870Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d6304ff6fd0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-07T17:31:29.661113Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service1" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service1" } } impersonation_info { } } } 0: "OK" 2026-01-07T17:31:29.665605Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (6968D2E8) asking for AccessServiceAuthorization( something.write) 2026-01-07T17:31:29.665837Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304ff6fd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (6968D2E8)" } } } 2026-01-07T17:31:29.668153Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d6304ff6fd0] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service1" } } impersonation_info { } } } } 2026-01-07T17:31:29.668573Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (6968D2E8) () has now valid token of service1@as 2026-01-07T17:31:29.669210Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (7A38211C) asking for AccessServiceAuthorization( something.write) 2026-01-07T17:31:29.669403Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304ff6fd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (7A38211C)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service2" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service2" } } impersonation_info { chain { account { service_account { id: "srv" } } } } } } 0: "OK" 2026-01-07T17:31:29.671545Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d6304ff6fd0] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service2" } } impersonation_info { chain { account { service_account { id: "srv" } } } } } } } 2026-01-07T17:31:29.671814Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (7A38211C) () has now valid token of service2@as 2026-01-07T17:31:29.672447Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8853A21F) asking for AccessServiceAuthorization( something.write) 2026-01-07T17:31:29.672635Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304ff6fd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (8853A21F)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service3" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service3" } } impersonation_info { chain { account { service_account { id: "srv" } } account { service_account { id: "one_more_service" } } } chain { account { service_account { id: "srv" } } account { service_account { id: "srv2" } } account { user_account { id: "user1" } } account { service_account { id: "srv3" } } } } } } 0: "OK" 2026-01-07T17:31:29.674858Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d6304ff6fd0] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service3" } } impersonation_info { chain { account { service_account { id: "srv" } } account { service_account { id: "one_more_service" } } } chain { account { service_account { id: "srv" } } account { service_account { id: "srv2" } } account { user_account { id: "user1" } } account { service_account { id: "srv3" } } } } } } } 2026-01-07T17:31:29.675157Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8853A21F) () has now valid token of service3@as 2026-01-07T17:31:33.831619Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673653865518433:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:33.831674Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:34.065015Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:34.068939Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:34.073172Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592673653865518412:2081] 1767807093829967 != 1767807093829970 2026-01-07T17:31:34.091986Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:34.096344Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:34.139471Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:34.347439Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:34.357769Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:34.357793Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:34.357802Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:34.357907Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:34.770889Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:34.778715Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:34.784531Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-07T17:31:34.784606Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d6304f902d0] Connect to grpc://localhost:9598 2026-01-07T17:31:34.785823Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304f902d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-07T17:31:34.815671Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d6304f902d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-07T17:31:34.816178Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:34.820562Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-07T17:31:34.821014Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6304f902d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-07T17:31:34.830787Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d6304f902d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-07T17:31:34.831209Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:34.873164Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginEmptyTicketBad >> TTicketParserTest::AuthorizationUnavailable [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2026-01-07T17:31:04.757961Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673528760854580:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:04.758013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:04.826421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:05.157238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:05.158533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:05.158648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:05.177791Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:05.178972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:05.181744Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673528760854542:2081] 1767807064756587 != 1767807064756590 2026-01-07T17:31:05.313641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:05.313671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:05.313678Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:05.313756Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:05.457044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:05.548388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:05.560012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:05.566900Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:05.567057Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d3703600050] Connect to grpc://localhost:7134 2026-01-07T17:31:05.570546Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3703600050] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:05.580886Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3703600050] Status 14 Service Unavailable 2026-01-07T17:31:05.581351Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-07T17:31:05.581397Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:05.581440Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:05.581742Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3703600050] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:05.583491Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3703600050] Status 14 Service Unavailable 2026-01-07T17:31:05.583814Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-07T17:31:05.583844Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:05.798697Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:06.780110Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-07T17:31:06.780164Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:06.780502Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3703600050] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:06.789763Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3703600050] Status 14 Service Unavailable 2026-01-07T17:31:06.790072Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-07T17:31:06.790104Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:07.780687Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-07T17:31:07.780728Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:07.781040Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3703600050] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:07.787254Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3703600050] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:07.787736Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2026-01-07T17:31:07.787913Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-07T17:31:09.762558Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673528760854580:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:09.762660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:18.507714Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673587908776780:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:18.507768Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:18.557931Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:18.660418Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:18.695890Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:18.695975Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:18.722589Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:18.805672Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:18.805696Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:18.805704Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:18.805787Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:18.823480Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:19.161306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:19.172420Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:19.172508Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d370362e2d0] Connect to grpc://localhost:63838 2026-01-07T17:31:19.173428Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d370362e2d0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:19.183877Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d370362e2d0] Status 14 Service Unavailable 2026-01-07T17:31:19.184267Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-07T17:31:19.184311Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:19.184334Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:19.184594Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d370362e2d0] Request Authoriz ... : failed to initialize from file: (empty maybe) 2026-01-07T17:31:31.668744Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:31.687786Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:31.975633Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:31.986151Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:31.989678Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:31.989764Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2026-01-07T17:31:31.989795Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2026-01-07T17:31:31.989829Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-07T17:31:31.989863Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2026-01-07T17:31:31.989931Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d37036a75d0] Connect to grpc://localhost:19710 2026-01-07T17:31:31.991022Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37036a75d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:31.996470Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37036a75d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:31.996873Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37036a75d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:31.997150Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37036a75d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:31.997416Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37036a75d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:32.009644Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d37036a75d0] Status 16 Access Denied 2026-01-07T17:31:32.012768Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2026-01-07T17:31:32.015337Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d37036a75d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:32.015664Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2026-01-07T17:31:32.015706Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d37036a75d0] Status 16 Access Denied 2026-01-07T17:31:32.015762Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2026-01-07T17:31:32.015962Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d37036a75d0] Status 16 Access Denied 2026-01-07T17:31:32.016027Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-07T17:31:32.017715Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d37036a75d0] Status 16 Access Denied 2026-01-07T17:31:32.017996Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2026-01-07T17:31:32.018024Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-07T17:31:32.018158Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d37036a7250] Connect to grpc://localhost:26620 2026-01-07T17:31:32.019076Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37036a7250] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-07T17:31:32.027836Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d37036a7250] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-07T17:31:32.028333Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-07T17:31:37.214931Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:37.215279Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:37.318943Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:37.321787Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592673669394641532:2081] 1767807097160121 != 1767807097160124 2026-01-07T17:31:37.343445Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:37.343533Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:37.361782Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:37.451500Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:37.477079Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:37.477102Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:37.477108Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:37.477188Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:37.781223Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:37.793219Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:37.797083Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:37.797165Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-07T17:31:37.797229Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d37035f54d0] Connect to grpc://localhost:13363 2026-01-07T17:31:37.798290Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37035f54d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:37.804179Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37035f54d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:37.815180Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d37035f54d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:37.815690Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-07T17:31:37.818556Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d37035f54d0] Status 14 Service Unavailable 2026-01-07T17:31:37.819342Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2026-01-07T17:31:37.819385Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:37.819419Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-07T17:31:37.819491Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-07T17:31:37.819755Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37035f54d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:37.820429Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37035f54d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-07T17:31:37.824697Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d37035f54d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:37.825013Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-07T17:31:37.829058Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d37035f54d0] Status 14 Service Unavailable 2026-01-07T17:31:37.830072Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2026-01-07T17:31:37.830112Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> KqpQuery::OlapTemporary [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2026-01-07T17:31:06.682485Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673537979205854:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:06.683999Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:07.005137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:07.008607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:07.008727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:07.065267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:07.227963Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:07.234449Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673537979205703:2081] 1767807066663326 != 1767807066663329 2026-01-07T17:31:07.248668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:07.344879Z node 1 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-07T17:31:07.351051Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:07.351148Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-07T17:31:07.351162Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-07T17:31:07.351172Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-07T17:31:07.351181Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-07T17:31:07.351246Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cde1bb00750] Connect to grpc://localhost:23036 2026-01-07T17:31:07.358277Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb00750] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:31:07.361768Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2026-01-07T17:31:07.386814Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde1bb00750] Status 16 Unauthenticated service 2026-01-07T17:31:07.387236Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-01-07T17:31:07.387283Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2026-01-07T17:31:07.387318Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-07T17:31:07.387389Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:07.387668Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb00750] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:31:07.393520Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cde1bb00750] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:07.396376Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:10.612108Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:10.637651Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.778948Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:10.779381Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673552772562235:2081] 1767807070501475 != 1767807070501478 2026-01-07T17:31:10.813180Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:10.813247Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:10.858800Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:10.904999Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:10.984853Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-07T17:31:10.985040Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:10.985060Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-07T17:31:10.985071Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-07T17:31:10.985092Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-07T17:31:10.985104Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-07T17:31:10.985130Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cde1bb22950] Connect to grpc://localhost:15287 2026-01-07T17:31:10.985989Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb22950] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:31:10.995406Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2026-01-07T17:31:11.057360Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde1bb22950] Status 16 Unauthenticated service 2026-01-07T17:31:11.065129Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-01-07T17:31:11.065159Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2026-01-07T17:31:11.065190Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-07T17:31:11.065233Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:11.065457Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb22950] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:31:11.070396Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cde1bb22950] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:11.073121Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:31:11.517625Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:11.517672Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-07T17:31:12.521014Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-07T17:31:13.523895Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-07T17:31:13.523931Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:70: Refresh token for provider# token-for-access-service 2026-01-07T17:31:13.523980Z node 2 :TOKEN_MANAGER TRACE: vm_metadata_token_provider_handler.cpp:25: Handle send request to vm metaservice 2026-01-07T17:31:13.526600Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-07T17:31:13.526766Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-07T17:31:13.526806Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (9D42FAED) asking for AccessServiceBulkAuthorization( something.read) 2026-01-07T17:31:13.526831Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-07T17:31:13.526843Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-07T17:31:13.526852Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-07T17:31:13.526858Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-07T17:31:13.527031Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb22950] Request BulkAuthorizeRequest { iam_token: "**** (9D42FAED)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-07T17:31:13.527585Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# new-****ount (82D66F55) 2026-01-07T17:31:13.530682Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde1bb22950] Status 16 Unauthenticated service 2026-01-07T17:31:13.535817Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (9D42FAED) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-01-07T17:31:13.535852Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (9D42FAED) () has now retryable error message 'Unauthenticated service' 2026-01-07T17:31:13.535892Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: new-****ount (82D66F ... r_subdomain.cpp:311) 2026-01-07T17:31:20.166619Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2026-01-07T17:31:20.166675Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cde1bb90650] Connect to grpc://localhost:10511 2026-01-07T17:31:20.167444Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb90650] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2026-01-07T17:31:20.181300Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cde1bb90650] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2026-01-07T17:31:20.181606Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2026-01-07T17:31:23.529996Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673607397949675:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:23.530048Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:23.638515Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:23.660821Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:23.671664Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592673607397949641:2081] 1767807083526354 != 1767807083526357 2026-01-07T17:31:23.706520Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:23.706599Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:23.711938Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:23.835966Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:23.847639Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:23.847658Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:23.847677Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:23.847749Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:24.065353Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:24.073806Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:24.081089Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-07T17:31:24.081158Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cde1bb6cbd0] Connect to grpc://localhost:1361 2026-01-07T17:31:24.082429Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb6cbd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-07T17:31:24.092045Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde1bb6cbd0] Status 14 Service Unavailable 2026-01-07T17:31:24.092607Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:24.092647Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-07T17:31:24.092853Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb6cbd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-07T17:31:24.098436Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde1bb6cbd0] Status 14 Service Unavailable 2026-01-07T17:31:24.098858Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:24.539169Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:25.585132Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-07T17:31:25.585186Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-07T17:31:25.585372Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb6cbd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-07T17:31:25.590180Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde1bb6cbd0] Status 14 Service Unavailable 2026-01-07T17:31:25.590290Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:26.586732Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-07T17:31:26.586767Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-07T17:31:26.586966Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb6cbd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-07T17:31:26.590205Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cde1bb6cbd0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:26.590688Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-07T17:31:28.533384Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592673607397949675:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:28.533450Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:37.205921Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592673668608465007:2182];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:37.206054Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:37.291777Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:37.437164Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:37.437244Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:37.443547Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:37.457629Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:37.469086Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592673668608464863:2081] 1767807097178849 != 1767807097178852 2026-01-07T17:31:37.494018Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:37.715745Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:37.737781Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:37.737806Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:37.737814Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:37.737897Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:38.044540Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:38.052283Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:38.054145Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-07T17:31:38.054207Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cde1bb8edd0] Connect to grpc://localhost:8196 2026-01-07T17:31:38.055104Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb8edd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-07T17:31:38.073618Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde1bb8edd0] Status 14 Service Unavailable 2026-01-07T17:31:38.077103Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-07T17:31:38.077150Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-07T17:31:38.077399Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde1bb8edd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-07T17:31:38.079499Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cde1bb8edd0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:31:38.081995Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-07T17:31:38.237710Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq [GOOD] >> IndexBuildTest::RejectsCancel |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV1 >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpParams::ParameterTypes [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> KqpInplaceUpdate::SingleRowIf-UseSink |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpExplain::UpdateConditionalKey+UseSink [GOOD] >> KqpExplain::UpdateConditionalKey-UseSink |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |83.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] |83.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> KqpEffects::InsertRevert_Literal_Success >> TTicketParserTest::LoginEmptyTicketBad [GOOD] |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] |83.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpOverload::OltpOverloaded-Distributed >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2026-01-07T17:31:06.690016Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673535177019195:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:06.690087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:06.941271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:07.018764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:07.018841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:07.092315Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673535177019167:2081] 1767807066687341 != 1767807066687344 2026-01-07T17:31:07.105686Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:07.107023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:07.172156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:07.220845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:07.220874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:07.220960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:07.221094Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:07.484808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:07.493077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:07.526561Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:07.540519Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:31:07.540550Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:31:07.541636Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****d-YQ (90792FD2) () has now valid token of user1 2026-01-07T17:31:07.541652Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-07T17:31:10.978904Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:10.986294Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:11.154884Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:11.160790Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:11.160880Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:11.224294Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:11.228962Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:11.387203Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:11.387221Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:11.387228Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:11.387341Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:11.756796Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:11.763553Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:11.891860Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:11.902098Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:31:11.902137Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:31:11.902900Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****I9Nw (26288DA1) () has now valid token of user1 2026-01-07T17:31:11.902916Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-07T17:31:11.957119Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:16.099542Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592673580201917471:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:16.099605Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:16.118048Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:16.263912Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673580201917443:2081] 1767807076098215 != 1767807076098218 2026-01-07T17:31:16.311011Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:16.316747Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:16.316834Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:16.319140Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:16.367456Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:16.473568Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:16.473588Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:16.473597Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:16.473678Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:16.782354Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:16.792398Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:16.899798Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-07T17:31:16.899837Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-07T17:31:16.899902Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****-jhA (ED06C971)) 2026-01-07T17:31:17.109112Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:17.900272Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root/Db1 keys 1 2026-01-07T17:31:17.900296Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root/Db1 2026-01-07T17:31:17.900565Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-07T17:31:17.900594Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-07T17:31:17.901517Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****-jhA (ED06C971) () has now valid token of user1 2026-01-07T17:31:17.901549Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root/Db1, A4 success 2026-01-07T17:31:18.029340Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db2, token db /Root/Db2, DomainLoginOnly 0 2026-01-07T17:31:18.029383Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db2, /Root 2026-01-07T17:31:18.029429Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db2, login state is not available yet, deffer token (eyJh****TKpg (4BD72EB6)) 2026-01-07T17:31:19.026990Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root/Db2 keys 1 2026-01-07T17:31:19.027012Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root/Db2 2026-01-07T17:31:19.027217Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db2, token db /Root/Db2, DomainLoginOnly 0 2026-01-07T17:31:19.027241Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db2, /Root 2026-01-07T17:31:19.027952Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****TKpg (4BD72EB6) () has now valid token of user1 2026-01-07T17:31:19.027971Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root/Db2, A4 success 2026-01-07T17:31:20.414257Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:20.414410Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:20.586163Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:20.591950Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:20.610454Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:20.610549Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:20.661313Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:20.809546Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:20.809570Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:20.809581Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:20.809668Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:21.120630Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:21.140633Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:21.153416Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:21.285120Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:21.289784Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:31:21.289822Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:31:21.290538Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ptsA (8DFDCA30) () has now valid token of user1 2026-01-07T17:31:21.290553Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-07T17:31:21.291037Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:21.405063Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:25.381750Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ptsA (8DFDCA30) 2026-01-07T17:31:25.382208Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ptsA (8DFDCA30) () has now valid token of user1 2026-01-07T17:31:30.397279Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ptsA (8DFDCA30) 2026-01-07T17:31:30.397729Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ptsA (8DFDCA30) () has now valid token of user1 2026-01-07T17:31:31.293643Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:34.402331Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ptsA (8DFDCA30) 2026-01-07T17:31:34.402663Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ptsA (8DFDCA30) () has now valid token of user1 2026-01-07T17:31:35.528170Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:31:35.528206Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:38.412666Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ptsA (8DFDCA30) 2026-01-07T17:31:38.413056Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ptsA (8DFDCA30) () has now valid token of user1 2026-01-07T17:31:42.551267Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673688816193491:2200];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:42.551312Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:42.967958Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:42.978297Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:42.984508Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592673688816193322:2081] 1767807102519118 != 1767807102519121 2026-01-07T17:31:43.035174Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:43.035272Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:43.111360Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:43.313017Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:43.324493Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:43.324520Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:43.324533Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:43.324669Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:43.581115Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:43.719276Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:43.872843Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:31:43.894101Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:987: Ticket **** (00000000): Ticket is empty |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap |83.6%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink >> KqpEffects::AlterAfterUpsertTransaction+UseSink >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |83.6%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.6%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> KqpImmediateEffects::Replace >> IndexBuildTest::RejectsCancel [GOOD] >> IndexBuildTest::NullsAreUniq >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr+UseSink >> VDiskBalancing::TestRandom_Block42 [GOOD] >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpExplain::CreateTableAs+Stats >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType >> Yq_1::Basic_TaggedLiteral [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV2 >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpExplain::UpdateConditionalKey-UseSink [GOOD] >> KqpExplain::UpdateOn+UseSink >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2026-01-07T17:31:10.399110Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673552560009561:2186];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:10.399201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:10.445643Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009087s 2026-01-07T17:31:11.380498Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] E0107 17:31:11.405143689 66203 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:11.405269621 66203 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:11.425420Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:11.539119Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.575920Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.601265Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.601336Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.634221Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19316 2026-01-07T17:31:11.634351Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.786980Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.787182Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.787438Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.807949Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.808058Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.831409Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.845917Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.846002Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.877979Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.944792Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.963929Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.970174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:11.970275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:11.970481Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.972143Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:11.972207Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:12.007245Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:12.067095Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:12.067179Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:12.068063Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:12.149383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:12.161273Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19316 } ] 2026-01-07T17:31:12.204349Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:19316: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on end ... sionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-07T17:31:53.765832Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7592673739789325822:2885], TxId: 281474976715729, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673739789325821 RawX2: 4503616807242564 } } DstEndpoint { ActorId { RawX1: 7592673739789325822 RawX2: 4503616807242565 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673739789325822 RawX2: 4503616807242565 } } DstEndpoint { ActorId { RawX1: 7592673739789325817 RawX2: 4503616807242253 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-07T17:31:53.765851Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673739789325822:2885], TxId: 281474976715729, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:31:53.765859Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673739789325822:2885], TxId: 281474976715729, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:31:53.765874Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673739789325822:2885], TxId: 281474976715729, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:31:53.765886Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673739789325822:2885], TxId: 281474976715729, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:31:53.765894Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673739789325822:2885], TxId: 281474976715729, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:31:53.767035Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Received TEvResolveKeySetResult update for table 'Root/yq/queries' 2026-01-07T17:31:53.767104Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Processing resolved ShardId# 72075186224037898, partition range: [(String : yandexcloud://some_folder_id, String : utqudo61ao5b6gp0nsbn) ; ()), i: 0, state ranges: 0, points: 1 2026-01-07T17:31:53.767122Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Add point to new shardId: 72075186224037898 2026-01-07T17:31:53.767213Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Pending shards States: TShardState{ TabletId: 72075186224037898, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudo61ao5b6gp0nsbn)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudo61ao5b6gp0nsbn)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-07T17:31:53.767228Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. effective maxinflight 1024 sorted 0 2026-01-07T17:31:53.767239Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. BEFORE: 1.0 2026-01-07T17:31:53.767278Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Send EvRead to shardId: 72075186224037898, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-07T17:31:53.767312Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. AFTER: 0.1 2026-01-07T17:31:53.767324Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-07T17:31:53.768317Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Recv TEvReadResult from ShardID=72075186224037898, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-07T17:31:53.768337Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Taken 0 locks 2026-01-07T17:31:53.768349Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:31:53.768370Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673739789325821:2884], TxId: 281474976715729, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-07T17:31:53.768387Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673739789325821:2884], TxId: 281474976715729, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:31:53.768402Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:31:53.768422Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. enter pack cells method shardId: 72075186224037898 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:31:53.768453Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. exit pack cells method shardId: 72075186224037898 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-01-07T17:31:53.768472Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. returned 1 rows; processed 1 rows 2026-01-07T17:31:53.768506Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. dropping batch for read #0 2026-01-07T17:31:53.768516Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. effective maxinflight 1024 sorted 0 2026-01-07T17:31:53.768528Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:31:53.768544Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715729, task: 1, CA Id [4:7592673739789325821:2884]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:31:53.768676Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673739789325821:2884], TxId: 281474976715729, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:31:53.768854Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715729;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:31:53.769051Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673739789325822:2885], TxId: 281474976715729, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:31:53.769165Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673739789325822:2885], TxId: 281474976715729, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr7pgt23nbb5xk526p88q1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGVmYzU4YzQtYWJiYzRiZDUtMzMzYTMzYTktZjc5MjI3ZjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:31:53.769253Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715729;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:31:54.030419Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:27127: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:27127 2026-01-07T17:31:54.262294Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:31:54.262336Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |83.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 11428037831953143984 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2026-01-07T17:23:28.863697Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2026-01-07T17:23:28.977273Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2026-01-07T17:23:30.411165Z 3 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2026-01-07T17:23:30.411385Z 6 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2026-01-07T17:23:30.411494Z 5 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7676:16] ServerId# [1:7684:1098] TabletId# 72057594037932033 PipeClientId# [5:7676:16] 2026-01-07T17:23:30.411571Z 4 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2026-01-07T17:23:30.411672Z 2 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2026-01-07T17:23:30.411759Z 7 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Statu ... 36 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999561} Stop node 3 2026-01-07T17:30:58.263569Z 1 00h25m30.852048s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 4 2026-01-07T17:30:58.570465Z 1 00h25m40.872048s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Stop node 7 2026-01-07T17:31:00.825486Z 1 00h26m10.874608s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Stop node 1 2026-01-07T17:31:01.281264Z 1 00h26m20.875120s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 1 2026-01-07T17:31:02.008033Z 1 00h26m40.885008s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Starting nodes Start compaction 1 Start checking |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> KqpQuery::CreateAsSelectBadTypes-IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpQuery::TableSinkWithSubquery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 26419, MsgBus: 18947 2026-01-07T17:30:36.298955Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673405374967496:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:36.298998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:36.857454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:36.857591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:36.892543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:36.995969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:37.025264Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673405374967332:2081] 1767807036223047 != 1767807036223050 2026-01-07T17:30:37.105709Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:37.337153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:37.337283Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:37.534884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:37.534927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:37.534938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:37.535048Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:38.492188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:38.511205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:30:41.301417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673405374967496:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:41.301501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:41.842260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673426849804708:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.842443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.849259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673426849804718:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.849384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.935684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:42.259636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673431144772112:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.259750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.260289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673431144772115:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.260342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.272402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:42.361933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673431144772194:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.362035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.363132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673431144772199:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.363186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673431144772200:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.363297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:42.368006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:42.384825Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673431144772203:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:30:42.548151Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673431144772256:2654] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3735, MsgBus: 8106 2026-01-07T17:30:44.248087Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673443291008650:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:44.248159Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:44.341894Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:44.453483Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673443291008630:2081] 1767807044242457 != 1767807044242460 2026-01-07T17:30:44.489801Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:44.491032Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:44.491103Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:44.521484Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:44.713091Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:44.741524Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:44.741556Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:44.741564Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:44.741653Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:45.305147Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:45.697718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976 ... pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:46.015967Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:46.016831Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592673709465556269:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:46.016958Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:46.017162Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592673709465556268:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:46.024150Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:46.050907Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7592673709465556272:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:31:46.110192Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7592673709465556323:2557] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:46.178702Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7592673709465556332:2342], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2026-01-07T17:31:46.188691Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=NzFhNjhhNjctMTBiNjM0NGEtNzE0MzZjOWItOTY4NWQ4Zjg=, ActorId: [10:7592673709465556230:2331], ActorState: ExecuteState, LegacyTraceId: 01kecr773xdbqad2tc34v5d5ek, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiCreateTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: \"text\"\n" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id#
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 11779, MsgBus: 62947 2026-01-07T17:31:48.704143Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7592673714775418409:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:48.704761Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:48.772503Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:49.081138Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:49.210139Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:49.210260Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:49.221147Z node 11 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [11:7592673714775418190:2081] 1767807108628507 != 1767807108628510 2026-01-07T17:31:49.268155Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:49.274545Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:49.543938Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:49.543969Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:49.543980Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:49.544087Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:49.633889Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:49.685093Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:50.845942Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:53.687846Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592673714775418409:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:53.687958Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:55.753758Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592673744840190166:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.753911Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.754365Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592673744840190178:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.754422Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592673744840190179:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.754474Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.760694Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:55.786147Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7592673744840190182:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:31:55.852850Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7592673744840190233:2549] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:55.898991Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7592673744840190250:2338], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2026-01-07T17:31:55.900872Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=YjY3ZmYyZDAtMTkyMDg4ZTEtNTIyODJhNDktMzFmMzZhZDY=, ActorId: [11:7592673744840190164:2328], ActorState: ExecuteState, LegacyTraceId: 01kecr7kqabz8cxk5rkwagd1fm, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiCreateTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: \"text\"\n" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id#
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |83.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> IndexBuildTest::NullsAreUniq [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::ExecuteWriteQuery >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> TMLPWriterTests::WriteTwoMessage_TwoPartition [GOOD] >> TMLPWriterTests::WriteTwoMessage_Deduplicated >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpInplaceUpdate::SingleRowStr-UseSink >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex-UseSink >> KqpExplain::CreateTableAs+Stats [GOOD] >> KqpExplain::CreateTableAs-Stats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::NullsAreUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:31.987546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:31.987639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:31.987678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:31.987726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:31.987762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:31.987796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:31.987852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:31.987927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:31.988748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:32.017672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:32.283172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:32.283245Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:32.320902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:32.321286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:32.321482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:32.354439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:32.354736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:32.355477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:32.360167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:32.371120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:32.371333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:32.372715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:32.372781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:32.372900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:32.372974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:32.373021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:32.373192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:32.831152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.845499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.845669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.845734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.845830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.845888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:32.846690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2026-01-07T17:31:59.861285Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 2/2, is published: true 2026-01-07T17:31:59.861358Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:1007:2947] message: TxId: 281474976725761 2026-01-07T17:31:59.861412Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2026-01-07T17:31:59.861447Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2026-01-07T17:31:59.861477Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:0 2026-01-07T17:31:59.861542Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2026-01-07T17:31:59.861575Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:1 2026-01-07T17:31:59.861593Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:1 2026-01-07T17:31:59.861620Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 34] was 3 2026-01-07T17:31:59.866205Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-07T17:31:59.866290Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-07T17:31:59.866372Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 107, txId# 281474976725761 2026-01-07T17:31:59.866494Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1652:3523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0}, txId# 281474976725761 2026-01-07T17:31:59.868502Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking 2026-01-07T17:31:59.868646Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1652:3523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2026-01-07T17:31:59.868726Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:31:59.871176Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done 2026-01-07T17:31:59.871329Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Done, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1652:3523], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2026-01-07T17:31:59.871402Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-07T17:31:59.871573Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:31:59.871625Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [5:1667:3538] TestWaitNotification: OK eventTxId 107 2026-01-07T17:31:59.872262Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-07T17:31:59.872515Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2026-01-07T17:31:59.873273Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:31:59.873545Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 304us result status StatusSuccess 2026-01-07T17:31:59.874251Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "test_index" LocalPathId: 33 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index1" KeyColumnNames: "index2" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::UpdateThenDelete+UseSink >> KqpEffects::AlterAfterUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertTransaction-UseSink >> KqpInplaceUpdate::SingleRowSimple+UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> PrivateApi::Nodes [GOOD] >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::ReplaceDuplicates >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] >> ColumnBuildTest::AlterMainTable_Failed >> KqpOverload::OltpOverloaded-Distributed [GOOD] >> KqpReattach::ReattachDeliveryProblem >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12688, MsgBus: 28900 2026-01-07T17:31:47.058728Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673712729269182:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:47.058776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:47.441126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:47.452777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:47.452854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:47.611938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673708434301731:2081] 1767807106969788 != 1767807106969791 2026-01-07T17:31:47.615223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:47.618896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:47.657009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:47.873484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:47.873505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:47.873510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:47.873577Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:48.057445Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:48.498797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:48.515247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:48.595340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:48.778420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:48.939483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:49.015075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:50.922716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673725614172789:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:50.922832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:50.923181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673725614172799:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:50.923216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.244240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.290989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.339385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.391894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.447100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.534033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.602507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.719743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.870529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673729909140966:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.870630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.871092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673729909140971:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.871133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673729909140972:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.871172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.875236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:51.917229Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673729909140975:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:52.022689Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673734204108324:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:52.057652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673712729269182:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:52.057723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:53.804339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... teTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 3478, MsgBus: 10982 2026-01-07T17:31:55.497212Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673748172067780:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:55.497266Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:55.527969Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:55.593853Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:55.621178Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:55.621246Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:55.629661Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:55.713477Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:55.713522Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:55.713529Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:55.713597Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:55.798097Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:56.107514Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:56.133628Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:56.216582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:56.397291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:56.509415Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:56.545539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.026114Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673765351938774:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.026244Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.026947Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673765351938784:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.027029Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.109124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.152973Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.194434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.229339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.264185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.310332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.361966Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.443630Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.554578Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673765351939655:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.554718Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.555039Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673765351939660:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.555073Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673765351939661:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.555204Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.560970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:59.573147Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673765351939664:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:31:59.669544Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673765351939715:3759] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:00.501017Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673748172067780:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:00.501082Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:01.258745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink >> KqpPg::ExplainColumnsReorder [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2026-01-07T17:31:09.193712Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673547812329413:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:09.194034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0107 17:31:09.818926397 65297 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:09.819039547 65297 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:09.836858Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:09.836968Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:09.939622Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:09.939706Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:09.941225Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:09.941357Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:09.945594Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:09.945642Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:09.974196Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.044538Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.050150Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2101 2026-01-07T17:31:10.050250Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.050297Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.057064Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.116551Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.185996Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.209199Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.209272Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.217011Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:10.219906Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.314334Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.314473Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.361150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.361309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:10.469071Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.519362Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.525039Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.537378Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.554308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.570893Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.585109Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2101: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2101 } ] 2026-01-07T17:31:10.588179Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GR ... input_channels_count=0; 2026-01-07T17:32:01.670203Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:144: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Start compute actor [7:7592673770543179734:2355], task: 1 2026-01-07T17:32:01.670219Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:155: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Set execution timeout 299.992812s 2026-01-07T17:32:01.670607Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1971: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\035\n\rRoot/yq/nodes\020\200\202\224\204\200\200\200\200\001\0306(\001\"\r\n\006tenant\020\001 \201 \"\r\n\007node_id\020\003 \002*\024\n\016active_workers\020\005 \004*\022\n\013data_center\020\013 \201 *\017\n\texpire_at\020\010 2*\017\n\010hostname\020\004 \201 *\022\n\013instance_id\020\002 \201 *\027\n\021interconnect_port\020\t \002*\026\n\020memory_allocated\020\007 \004*\022\n\014memory_limit\020\006 \004*\023\n\014node_address\020\n \201 *\r\n\007node_id\020\003 \002*\r\n\006tenant\020\001 \201 0\252\247\200\200\200\200@8\007@\000H\001R\022\t\322\347\246\014\221\230^i\0213\t\000\000\007\000\020\000X\000`\000h\004h\nh\007h\003h\002h\010h\006h\005h\th\001h\000r\022P\316\360\217\314\2713X\377\377\377\377\377\377\377\377\377\001x\000\262\001\000" } } } 2026-01-07T17:32:01.670803Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646926 2026-01-07T17:32:01.670832Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Received channels info: 2026-01-07T17:32:01.670912Z node 7 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:413: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2026-01-07T17:32:01.671019Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5019: TxId: 281474976715691, task: 1. Add data: 100 / 100 2026-01-07T17:32:01.671074Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4987: TxId: 281474976715691, task: 1. Send data=100, closed=1, bufferActorId=[7:7592673770543179730:2355] 2026-01-07T17:32:01.671095Z node 7 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 100 2026-01-07T17:32:01.671111Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Waiting finish of sink[0] 2026-01-07T17:32:01.671130Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646926 2026-01-07T17:32:01.671149Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Received channels info: 2026-01-07T17:32:01.671161Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Waiting finish of sink[0] 2026-01-07T17:32:01.671197Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-07T17:32:01.671213Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Waiting finish of sink[0] 2026-01-07T17:32:01.671280Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2941: SelfId: [7:7592673770543179730:2355], SessionActorId: [7:7592673731888471760:2355], Create new TableWriteActor for table `Root/yq/nodes` ([72057594046644480:54:1]). lockId=281474976715690. ActorId=[7:7592673770543179736:2355] 2026-01-07T17:32:01.671338Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:481: Table: `Root/yq/nodes` ([72057594046644480:54:1]), SessionActorId: [7:7592673731888471760:2355]Open: token=0 2026-01-07T17:32:01.671551Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:489: SelfId: [7:7592673770543179736:2355], Table: `Root/yq/nodes` ([72057594046644480:54:1]), SessionActorId: [7:7592673731888471760:2355]Write: token=0 2026-01-07T17:32:01.671644Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:496: SelfId: [7:7592673770543179736:2355], Table: `Root/yq/nodes` ([72057594046644480:54:1]), SessionActorId: [7:7592673731888471760:2355]Close: token=0 2026-01-07T17:32:01.671684Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4892: SelfId: [7:7592673770543179735:2355], TxId: 281474976715691, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [7:7592673770543179730:2355] 2026-01-07T17:32:01.671699Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4910: SelfId: [7:7592673770543179735:2355], TxId: 281474976715691, task: 1. Finished 2026-01-07T17:32:01.671717Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-07T17:32:01.671737Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7592673770543179734:2355], TxId: 281474976715691, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7y7y3j841zb089h5ht3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NjMzYzBiMDItMzcwOTJiMmMtZGVhMDA4MDEtMTlkZmNlNzc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:01.671887Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715691;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:01.672185Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3511: SelfId: [7:7592673770543179730:2355], SessionActorId: [7:7592673731888471760:2355], Start immediate commit 2026-01-07T17:32:01.672200Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1081: SelfId: [7:7592673770543179736:2355], Table: `Root/yq/nodes` ([72057594046644480:54:1]), SessionActorId: [7:7592673731888471760:2355]SetImmediateCommit 2026-01-07T17:32:01.672214Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [7:7592673770543179730:2355], SessionActorId: [7:7592673731888471760:2355], Flush data 2026-01-07T17:32:01.672349Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1235: SelfId: [7:7592673770543179736:2355], Table: `Root/yq/nodes` ([72057594046644480:54:1]), SessionActorId: [7:7592673731888471760:2355]Send EvWrite to ShardID=72075186224037891, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715690 DataShard: 72075186224037891 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 54, Size=212, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3, BufferMemory=212 2026-01-07T17:32:01.694156Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [7:7592673770543179736:2355], Table: `Root/yq/nodes` ([72057594046644480:54:1]), SessionActorId: [7:7592673731888471760:2355]Recv EvWriteResult from ShardID=72075186224037891, Status=STATUS_COMPLETED, TxId=7, Locks= , Cookie=1 2026-01-07T17:32:01.694195Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1023: SelfId: [7:7592673770543179736:2355], Table: `Root/yq/nodes` ([72057594046644480:54:1]), SessionActorId: [7:7592673731888471760:2355]Got completed result TxId=7, TabletId=72075186224037891, Cookie=1, Mode=3, Locks= 2026-01-07T17:32:01.694254Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4535: SelfId: [7:7592673770543179730:2355], SessionActorId: [7:7592673731888471760:2355], Committed TxId=0 |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> KqpQuery::TableSinkWithSubquery [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2026-01-07T17:31:08.488604Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673543479224781:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:08.488663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:09.010463Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] E0107 17:31:09.031681784 65058 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:09.031812039 65058 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:09.281096Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.281264Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.281303Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.281403Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.281459Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.378426Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.378555Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.395663Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.395957Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.441424Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.441735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:09.449664Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.477883Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:61868 2026-01-07T17:31:09.480525Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.481219Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.494884Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.495974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:09.540570Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:09.584696Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.643262Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.666686Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.666789Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.693620Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.693710Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.698058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:09.719476Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.740351Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.751514Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.763316Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.769918Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:61868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61868 } ] 2026-01-07T17:31:09.770014Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bin ... de 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673770194525609 RawX2: 4503616807242587 } } DstEndpoint { ActorId { RawX1: 7592673770194525610 RawX2: 4503616807242588 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673770194525610 RawX2: 4503616807242588 } } DstEndpoint { ActorId { RawX1: 7592673770194525605 RawX2: 4503616807242225 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-07T17:32:00.916728Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1117: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7592673770194525609:2907] 2026-01-07T17:32:00.916767Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:00.916798Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:00.916841Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-07T17:32:00.917032Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673770194525609 RawX2: 4503616807242587 } } DstEndpoint { ActorId { RawX1: 7592673770194525610 RawX2: 4503616807242588 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673770194525610 RawX2: 4503616807242588 } } DstEndpoint { ActorId { RawX1: 7592673770194525605 RawX2: 4503616807242225 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-07T17:32:00.917064Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:00.917074Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:00.917090Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:00.917105Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:00.917112Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:00.917900Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. Recv TEvReadResult from ShardID=72075186224037890, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-07T17:32:00.917927Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. Taken 0 locks 2026-01-07T17:32:00.917938Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:32:00.917955Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673770194525609:2907], TxId: 281474976715736, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7x05ez0axg165pds380b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-07T17:32:00.917973Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673770194525609:2907], TxId: 281474976715736, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7x05ez0axg165pds380b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:00.917993Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:32:00.918014Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. enter pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:00.918029Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. exit pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:00.918046Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. returned 0 rows; processed 0 rows 2026-01-07T17:32:00.918088Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. dropping batch for read #0 2026-01-07T17:32:00.918098Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. effective maxinflight 1 sorted 1 2026-01-07T17:32:00.918112Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:32:00.918130Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715736, task: 1, CA Id [4:7592673770194525609:2907]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:32:00.918191Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673770194525609:2907], TxId: 281474976715736, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr7x05ez0axg165pds380b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:00.918395Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715736;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:00.918582Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:00.918638Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673770194525610:2908], TxId: 281474976715736, task: 2. Ctx: { TraceId : 01kecr7x05ez0axg165pds380b. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzAwM2U5NWQtZWVlNDJiNTEtYTE2NTY0YmItNDJmODk2MDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:00.918742Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715736;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> KqpExplain::UpdateOn+UseSink [GOOD] >> KqpExplain::UpdateOn-UseSink ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 9284, MsgBus: 65429 |83.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV1 >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 5022, MsgBus: 19833 2026-01-07T17:30:32.949878Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673391788015887:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:32.949926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:33.920798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:33.921134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:33.937157Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:33.941036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:33.952499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:34.261579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:30:34.282711Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673391788015652:2081] 1767807032872057 != 1767807032872060 2026-01-07T17:30:34.290851Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:34.321145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:34.532948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:34.532972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:34.532986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:34.533097Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:35.726581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:37.937894Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673391788015887:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:37.937969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:39.318590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673421852787631:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.318741Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.320880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673421852787643:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.320962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673421852787644:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.321013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:39.331565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:39.346315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673421852787647:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:30:39.551712Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673421852787698:2545] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11218, MsgBus: 20833 2026-01-07T17:30:41.112281Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:41.148842Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:30:41.406919Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:41.406996Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:41.409178Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673423503728007:2081] 1767807040991285 != 1767807040991288 2026-01-07T17:30:41.409227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:41.458114Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:41.460906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:41.721505Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:41.721525Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:41.721531Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:41.721608Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:41.900978Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:42.053399Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:42.547310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:42.555473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:30:46.236035Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673449273532672:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:46.236158Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:46.237484Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673449273532684:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:46.237553Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673449273532685:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:46.237857Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:46.244843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:46.260433Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673449273532688:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:30:46.328781Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673449273532739:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15178, MsgBus: 5816 2026-01-07T17:30:47.593853Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;acto ... d ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:48.950695Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:53.299048Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592673736679011859:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.299200Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.300191Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592673736679011869:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.300272Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.331111Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.404759Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.525389Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592673736679012035:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.525555Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.529452Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592673736679012040:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.529555Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592673736679012041:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.530055Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.535666Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:53.560315Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7592673736679012044:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:31:53.665606Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7592673736679012097:2658] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-07T17:32:02.945391Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:02.957486Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:32:02.961567Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:32:02.961871Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:32:02.962186Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:32:03.444497Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:03.444732Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:03.498048Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:34:2081] 1767807117145732 != 1767807117145735 2026-01-07T17:32:03.504173Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:03.572111Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:03.674232Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:32:04.009576Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:04.026712Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:04.149716Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:864:2747], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:04.149902Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:875:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:04.150040Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:04.151852Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:878:2755], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:04.151997Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:04.159888Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:04.291845Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:879:2756], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:32:04.321004Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:935:2793] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "524d6d75-f949d1ee-d263f4e3-2ccba33c" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'219) \'(\'\"_id\" \'\"d4a29488-ae606dfc-9e68cc45-100b53d3\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11903, MsgBus: 26404 2026-01-07T17:31:47.388578Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673714045901306:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:47.388637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:47.915036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:47.915119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:47.920591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:47.998147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:48.087854Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:48.088285Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673714045901212:2081] 1767807107381470 != 1767807107381473 2026-01-07T17:31:48.353468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:48.353490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:48.353495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:48.353553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:48.371530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:48.401664Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:48.852016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:48.859241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:48.928194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:49.075785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:49.265260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:49.354684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.653886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673731225772276:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.653961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.654572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673731225772286:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:51.654623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:52.027641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.102101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.148831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.200322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.287922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.358135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.390481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673714045901306:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:52.390586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:52.443355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.532779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.631014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673735520740457:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:52.631106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:52.631464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673735520740463:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:52.631502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673735520740464:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:52.631764Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:52.634689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:52.646326Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673735520740467:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:52.704630Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673735520740518:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:54.855568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:57.255429Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:57.323005Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:57.324467Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673755686234402:2081] 1767807117236699 != 1767807117236702 2026-01-07T17:31:57.348994Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:57.350380Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:57.350442Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:57.380587Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:57.500618Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:57.500636Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:57.500640Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:57.500715Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:57.551080Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:58.114217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:58.119478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:58.128938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.205101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.338124Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:58.393845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.479048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.133101Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673772866105451:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.133235Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.133623Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673772866105461:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.133721Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.210787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.256233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.325214Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.379713Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.482924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.553250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.610766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.685437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.800645Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673772866106335:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.800739Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.801188Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673772866106340:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.801228Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673772866106341:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.801457Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:01.805463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:01.821642Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673772866106344:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:01.877275Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673772866106395:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:02.247303Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673755686234428:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:02.248102Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:04.010628Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> ColumnBuildTest::ValidDefaultValue |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 8587, MsgBus: 6048 2026-01-07T17:31:26.016518Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673624046410065:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:26.016616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:26.368884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:26.399377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:26.399505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:26.498017Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:26.507053Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673619751442661:2081] 1767807086004373 != 1767807086004376 2026-01-07T17:31:26.517529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:26.651048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:26.829762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:26.829789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:26.829798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:26.829892Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:27.025135Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:27.846053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:27.853909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:27.935160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.180194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.536868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.647763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.007287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673645521248319:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.007410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.007962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673645521248329:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.008044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.026920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673624046410065:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:31.027234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:31.434097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.476701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.588310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.678167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.752104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.809075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.871062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.967656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.134095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673649816216520:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.134189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.134527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673649816216525:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.134560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673649816216526:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.134675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.140942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:32.160219Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673649816216529:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:32.254761Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673649816216580:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6160, MsgBus: 61739 2026-01-07T17:31:36.108831Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor; ... ntly supported, code: 2031 2026-01-07T17:31:57.530630Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=YTJkYTM2MGQtZDJiNjIwNjItYmYzMTk2MTYtNDU4ZTIzMzQ=, ActorId: [4:7592673754741167043:2318], ActorState: ExecuteState, LegacyTraceId: 01kecr7pthbtp8fcqbk8dqf1nh, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 62 } message: "At function: KiCreateTable!" end_position { row: 6 column: 62 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:31:57.595916Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7592673754741167185:2344], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:45: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-01-07T17:31:57.598468Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=Y2Y4ZDA2ZGMtZDAwZGFmYTEtOTA0ZDRmOGUtYzI2YmUyYTU=, ActorId: [4:7592673754741167174:2338], ActorState: ExecuteState, LegacyTraceId: 01kecr7t797dhxt5mzfha7tecq, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 45 } message: "At function: KiCreateTable!" end_position { row: 6 column: 45 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:31:57.685279Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7592673754741167204:2350], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:43: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-01-07T17:31:57.688563Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=N2Y1YmNiNS1hY2NmYTllNy0yNTgxNzA3Zi05ZDkzZmQ1Nw==, ActorId: [4:7592673754741167198:2347], ActorState: ExecuteState, LegacyTraceId: 01kecr7t9g98jt4tepcttte9cz, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 43 } message: "At function: KiCreateTable!" end_position { row: 6 column: 43 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:31:57.789984Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592673733266329765:2098];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:57.790070Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32669, MsgBus: 29147 2026-01-07T17:31:58.717004Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673758005328374:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:58.717926Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:58.763058Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:58.815895Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:58.830258Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:58.830358Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:58.862422Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:58.944312Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:58.944344Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:58.944352Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:58.944444Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:59.012901Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:59.502503Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:59.525144Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:31:59.560650Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-07T17:31:59.578920Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:31:59.721857Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:03.717043Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592673758005328374:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:03.717120Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:03.744519Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673779480165779:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.744642Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.745073Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673779480165792:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.745159Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673779480165791:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.745532Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.750707Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:03.774897Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592673779480165795:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-07T17:32:03.834160Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592673779480165846:2603] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:03.870324Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:04.182841Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:04.472103Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:32:04.482878Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:32:04.496518Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSinkWithSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 26864, MsgBus: 20248 2026-01-07T17:31:26.128498Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673620336847981:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:26.128537Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:26.223841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:26.597031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:26.639091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:26.639245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:26.641791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:26.761576Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673620336847954:2081] 1767807086120151 != 1767807086120154 2026-01-07T17:31:26.787545Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:26.800842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:26.833217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:26.833244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:26.833250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:26.833337Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:27.165971Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:27.869053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:27.880974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:28.017092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.268090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.638500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.724116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.152282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673620336847981:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:31.152427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:31.197933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673641811686309:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.198038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.199415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673641811686319:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.199481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.670579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.714248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.760062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.809512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.872956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.918944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.970186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.055349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.190549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673646106654498:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.190639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.193125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673646106654503:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.193184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673646106654504:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.193490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.197550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:32.219119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673646106654507:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:31:32.325237Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673646106654558:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... 5: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673744238692725:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.723234Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.769757Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.999290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.282125Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673748533661383:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.282233Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.282333Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673748533661388:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.282824Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673748533661390:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.282884Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.286924Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:55.303727Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673748533661391:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:31:55.373228Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673748533661445:3411] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18879, MsgBus: 26390 2026-01-07T17:31:59.209311Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592673764147252481:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:59.209367Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:59.229372Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:59.353985Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:59.354089Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:59.355664Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:59.372631Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592673764147252450:2081] 1767807119208867 != 1767807119208870 2026-01-07T17:31:59.385368Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:59.485613Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:59.485639Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:59.485647Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:59.485742Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:59.512979Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:00.063222Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:00.071696Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:00.219149Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:03.474961Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673781327122521:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.475085Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.475548Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673781327122530:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.475609Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.543227Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.655710Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.785532Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673781327122693:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.785649Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.786218Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673781327122698:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.786279Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673781327122699:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.786626Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:03.791881Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:03.818366Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592673781327122702:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:32:03.916071Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592673781327122753:2641] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:04.211059Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592673764147252481:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:04.211130Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> Sharding::XXUsage >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> Sharding::XXUsage [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> ColumnBuildTest::AlterMainTable_Failed [GOOD] >> ColumnBuildTest::AlterMainTable_DisableFlag >> KqpExplain::CreateTableAs-Stats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 1371800463213966980 13965273525809993199 11017543697868009996 9942544047440595368 2567153963543802759 17007429129123001757 6319480430085262650 873176060590413888 605342055276055815 6836253616656911863 14431346139898535592 14573972760454279344 14288812691206877813 5850701165129203606 7646708661758676213 12084461618869623541 10065041397697806167 17953392519060070885 13950265799506731325 18424256714045996661 661886966385595645 5372101568561602454 15043706997038833519 17231178666398605290 17672317919176508749 1258306012431747199 16878932711788593483 8332462830627176663 18235181158208398062 18442070481902302235 11714940385220460225 11110365765291626400 9228663325799402668 873817261299352101 6334585515739887738 16979994468856668322 216483207749519936 1892719935389899558 14698199866587721005 18309772763806985428 13584083128638453963 15284552502239221134 8904182174830480458 10633505948838923548 5779744088230692177 648844345209837553 5788075912738639205 16753950536306230808 2699687620821782107 7916478530159873229 10798809820047634490 2365990190344007387 6176429201081580105 2884415221460663040 7131548657995675769 9211083324561729840 15210819728494825708 5547838825267091669 5732009510539697384 2863794935042818913 1856340002875486565 15602899539549377185 11940661245784948895 1382076039484973374 6860308266449527520 14463491066714350217 12273406381968721947 2783916176828646623 14177888623296678872 5937765339507155817 9096215835276779344 12167563757610216653 10589407939550804116 1422300544481595316 14502538659074621351 12339998410597913807 152330645502690435 8734949152782051485 12811630229310916344 15060509339236044886 327173387797980296 2457958640441323690 2371862140625703301 11137050006730794359 12704202893229013798 14570864576954488724 5742810422444866166 10545278558034136089 11578102829715694549 6883832165064435180 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> Yq_1::DescribeQuery [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> JsonProtoConversion::JsonToProtoArray [GOOD] >> JsonProtoConversion::JsonToProtoMap >> TestMalformedRequest::CompressedDeflateContentLengthLower >> JsonProtoConversion::JsonToProtoMap [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] >> KqpOverload::OltpOverloaded+Distributed >> KqpEffects::AlterAfterUpsertTransaction-UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink >> JsonProtoConversion::JsonToProtoSingleValue >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs-Stats [GOOD] Test command err: Trying to start YDB, gRPC: 23258, MsgBus: 30315 2026-01-07T17:31:23.628529Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673609934143396:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:23.628604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:23.976992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:23.977880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:23.978007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:23.992903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:24.063581Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:24.065244Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673609934143353:2081] 1767807083625677 != 1767807083625680 2026-01-07T17:31:24.128714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:24.128739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:24.128750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:24.128829Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:24.259756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:24.585048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:24.593151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:24.649516Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:24.672759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:24.813865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:24.980946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:25.107851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:27.250976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673627114014417:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:27.251087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:27.251757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673627114014427:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:27.251828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:27.704393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:27.764679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:27.856309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:27.912177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.021972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.091326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.174046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.251890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.389219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673631408982596:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:28.389309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:28.389819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673631408982601:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:28.389854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673631408982602:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:28.390102Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:28.394334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:28.425597Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673631408982605:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:28.502084Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673631408982656:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:28.629026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673609934143396:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:28.629086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Nam ... :2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} 2026-01-07T17:32:00.710881Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592673747776491014:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:00.711006Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26909, MsgBus: 32464 2026-01-07T17:32:01.602380Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673773787165572:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:01.602657Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:01.876988Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:01.883659Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:01.886757Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:01.886848Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:01.888812Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592673773787165417:2081] 1767807121538987 != 1767807121538990 2026-01-07T17:32:01.934999Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:02.129039Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:02.133673Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:02.133700Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:02.133709Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:02.133810Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:02.642672Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:02.810905Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:02.818629Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:06.481080Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673795262002787:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.481219Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.482154Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673795262002800:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.482245Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.482320Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673795262002799:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.487041Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:06.510895Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592673795262002803:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:32:06.566640Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592673795262002854:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:06.591641Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592673773787165572:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:06.591710Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:06.610753Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) PLAN::{"Plan":{"Plans":[{"Tables":["Destination"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Destination","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]},{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/Destination2"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/Destination2","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> Yq_1::CreateQuery_Without_Connection [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2026-01-07T17:31:11.467028Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011190s 2026-01-07T17:31:11.499025Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673558659171072:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:11.499076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:11.571236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown E0107 17:31:12.063049198 66952 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:12.063175482 66952 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:12.112218Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.190635Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:65521 2026-01-07T17:31:12.233498Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.306058Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.373870Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.503791Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.503921Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.503954Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.513239Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:12.541781Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.544973Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.549853Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.562371Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.562473Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.573927Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.574001Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.590895Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.591068Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.600484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:12.600577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:12.601332Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.617604Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.698019Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.773381Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.807405Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.807690Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.808492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:12.817478Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.818781Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.818900Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:65521: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:65521 } ] 2026-01-07T17:31:12.826753Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session err ... 02\000\037\000\000\000yandexcloud://Execute_folder_id\017\000\000\000idempotency_key\"\023\010\003\022\010response\030\201 (\0000\000(\0000\000@\201 @\201 H\001R\022\010\324\227\220\314\2713\020\377\377\377\377\377\377\377\377\377\001`\000h\337\200\200\200\200\200@p\004z\000z\000\240\001\000\270\001\000\300\001\001\312\001\000" } WatermarksMode: WATERMARKS_MODE_DISABLED } } 2026-01-07T17:32:06.483099Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-07T17:32:06.483212Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7592673794291698237 RawX2: 4503616807242189 } } DstEndpoint { ActorId { RawX1: 7592673794291698233 RawX2: 4503616807242189 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-07T17:32:06.483305Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:454: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Shards State: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 0 } 2026-01-07T17:32:06.483399Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:521: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Sending TEvResolveKeySet update for table 'Root/yq/idempotency_keys', range: [(String : yandexcloud://Execute_folder_id, String : idempotency_key) ; (String : yandexcloud://Execute_folder_id, String : idempotency_key)], attempt #1 2026-01-07T17:32:06.483463Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:06.483479Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:06.483519Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:06.483535Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-07T17:32:06.483556Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-07T17:32:06.483573Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:06.483584Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:06.483600Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Received TEvResolveKeySetResult update for table 'Root/yq/idempotency_keys' 2026-01-07T17:32:06.483667Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Processing resolved ShardId# 72075186224037893, partition range: [(String : yandexcloud://Execute_folder_id, String : idempotency_key) ; ()), i: 0, state ranges: 0, points: 1 2026-01-07T17:32:06.483685Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Add point to new shardId: 72075186224037893 2026-01-07T17:32:06.483807Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Pending shards States: TShardState{ TabletId: 72075186224037893, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-07T17:32:06.483821Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:06.483833Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. BEFORE: 1.0 2026-01-07T17:32:06.483873Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Send EvRead to shardId: 72075186224037893, tablePath: Root/yq/idempotency_keys, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1767807126484), lockTxId = 281474976710751, lockNodeId = 4 2026-01-07T17:32:06.483912Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. AFTER: 0.1 2026-01-07T17:32:06.483924Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-07T17:32:06.484806Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976710751 DataShard: 72075186224037893 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 49, BrokenTxLocks= 2026-01-07T17:32:06.484831Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Taken 1 locks 2026-01-07T17:32:06.484850Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:32:06.484877Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-07T17:32:06.484903Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:06.484943Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:32:06.484967Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:06.485012Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 1 freeSpace: 8388586 2026-01-07T17:32:06.485043Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. returned 1 rows; processed 1 rows 2026-01-07T17:32:06.485099Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. dropping batch for read #0 2026-01-07T17:32:06.485115Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:06.485131Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:32:06.485152Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710751, task: 1, CA Id [4:7592673794291698237:2509]. returned async data processed rows 1 left freeSpace 8388586 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:32:06.485282Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673794291698237:2509], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82ydbvfa8kpf0ypzywsf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWYzZjZiZTMtNmEyZTI1ZGMtZWJhM2NiZDgtNDI3MjU3ZjM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:06.485511Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710751;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthNone >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict >> KqpQuery::UpdateThenDelete+UseSink [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> TestMalformedRequest::ContentLengthLower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9440, MsgBus: 9979 2026-01-07T17:31:53.446208Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673737365153637:2231];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:53.450042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:53.514721Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:53.919285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:53.919399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:53.973907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:54.016367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:54.016478Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673737365153444:2081] 1767807113415188 != 1767807113415191 2026-01-07T17:31:54.016746Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:54.135339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:54.135357Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:54.135374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:54.135463Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:54.313496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:54.447221Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:54.785078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:54.790934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:54.842845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.035761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.223582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.315967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.128881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673754545024509:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.129011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.129393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673754545024519:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.129425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.512316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.554568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.589755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.625347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.674631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.733617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.775293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.838888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.933578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673754545025397:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.933629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.933847Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673754545025402:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.933891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673754545025403:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.933996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.937495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:57.947305Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673754545025406:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:58.033500Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673758839992753:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:58.445903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673737365153637:2231];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:58.445978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence; ... t path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:02.211497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:02.211567Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:02.213028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:02.221037Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673775961043014:2081] 1767807122027018 != 1767807122027021 2026-01-07T17:32:02.229927Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:02.230198Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:02.417408Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:02.417429Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:02.417439Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:02.417507Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:02.470894Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:02.987633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:02.994969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:03.000170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.041135Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:03.073622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.234383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.331053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.904688Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673788845946764:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.904800Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.905170Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673788845946773:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.905237Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.986874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.029800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.066754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.134935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.179629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.230175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.265669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.322647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.407215Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673793140914943:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.407313Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.407557Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673793140914948:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.407605Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673793140914949:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.407687Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.410891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:06.422548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:32:06.422760Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673793140914952:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:06.503903Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673793140915003:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:07.041042Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673775961043249:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:07.041102Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:08.070258Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex+UseSink >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2026-01-07T17:31:09.397792Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673550618091641:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:09.397859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0107 17:31:09.603344050 65521 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:09.635081251 65521 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:10.038857Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.093484Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.114182Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.325912Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.326022Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.326607Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.326732Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.327019Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.327084Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.345658Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.382112Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.421198Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.421265Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.421316Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.429836Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.430121Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4525 2026-01-07T17:31:10.430182Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.430249Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.430299Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.445622Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:10.501118Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.501168Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.501284Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.504740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:10.507185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.510223Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.543370Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.553286Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.626033Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.637370Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.646695Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4525: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4525 } ] 2026-01-07T17:31:10.646833Z node 1 ... /session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:07.359383Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-07T17:32:07.359457Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673799557793233 RawX2: 4503616807242649 } } DstEndpoint { ActorId { RawX1: 7592673799557793234 RawX2: 4503616807242650 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673799557793234 RawX2: 4503616807242650 } } DstEndpoint { ActorId { RawX1: 7592673799557793229 RawX2: 4503616807242199 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-07T17:32:07.359472Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:07.359481Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:07.359498Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:07.359514Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:07.359523Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:07.360612Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Received TEvResolveKeySetResult update for table 'Root/yq/queries' 2026-01-07T17:32:07.360701Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Processing resolved ShardId# 72075186224037894, partition range: [(String : yandexcloud://some_folder_id, String : utqudo61aera58kri4b9) ; ()), i: 0, state ranges: 0, points: 1 2026-01-07T17:32:07.360723Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Add point to new shardId: 72075186224037894 2026-01-07T17:32:07.360833Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Pending shards States: TShardState{ TabletId: 72075186224037894, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudo61aera58kri4b9)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudo61aera58kri4b9)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-07T17:32:07.360851Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:07.360864Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. BEFORE: 1.0 2026-01-07T17:32:07.360913Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Send EvRead to shardId: 72075186224037894, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-07T17:32:07.360970Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. AFTER: 0.1 2026-01-07T17:32:07.360983Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-07T17:32:07.362038Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Recv TEvReadResult from ShardID=72075186224037894, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-07T17:32:07.362065Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Taken 0 locks 2026-01-07T17:32:07.362081Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:32:07.362106Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673799557793233:2969], TxId: 281474976715744, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2026-01-07T17:32:07.362128Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673799557793233:2969], TxId: 281474976715744, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-07T17:32:07.362147Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:32:07.362170Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. enter pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:07.362201Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. exit pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 1 freeSpace: 8386352 2026-01-07T17:32:07.362225Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. returned 1 rows; processed 1 rows 2026-01-07T17:32:07.362262Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. dropping batch for read #0 2026-01-07T17:32:07.362276Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:07.362289Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:32:07.362308Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715744, task: 1, CA Id [4:7592673799557793233:2969]. returned async data processed rows 1 left freeSpace 8386352 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:32:07.362423Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673799557793233:2969], TxId: 281474976715744, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:07.362589Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715744;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:07.362800Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:07.362905Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673799557793234:2970], TxId: 281474976715744, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr83adazs6nnyr43hy65yh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzQ3YjE0OTctZjAyMWRhNWYtYjEyMDZiZDEtYTE3MjM3MjA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:07.362990Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715744;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 7694, MsgBus: 10373 2026-01-07T17:31:26.152648Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673621171969308:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:26.152706Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:26.517616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:26.527358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:26.527678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:26.584732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:26.624994Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:26.736737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:26.736759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:26.736766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:26.736850Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:26.799508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:27.165446Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:27.296777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:27.304510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:27.347816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:27.542426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:27.781354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:27.895932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:29.681679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673634056872984:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.681816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.685710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673634056872994:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.685810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:30.072165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.131760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.179839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.221696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.268522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.312606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.348386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.403066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.529397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673638351841168:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:30.529485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:30.529830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673638351841173:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:30.529883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673638351841174:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:30.529935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:30.533552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:30.551101Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673638351841177:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:30.620256Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673638351841228:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:31.154847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673621171969308:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:31.154927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:32.575494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) ... r] ActorId: [4:7592673768202801639:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.781375Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673768202801650:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.781461Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.781939Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673768202801654:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.782018Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.785943Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:00.803678Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592673768202801653:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:32:00.899655Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592673768202801706:2620] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:00.948550Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.267879Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:01.323359Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592673751022931602:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:01.323424Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:01.665370Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:32:01.674049Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 26315, MsgBus: 10069 2026-01-07T17:32:03.082895Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592673781450615967:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:03.083048Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:03.204999Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:03.408710Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:03.412725Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592673781450615841:2081] 1767807123058162 != 1767807123058165 2026-01-07T17:32:03.421158Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:03.421284Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:03.450328Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:03.456256Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:03.655466Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:03.655494Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:03.655502Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:03.655602Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:04.058688Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:04.108136Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:04.294854Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:04.309056Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:08.082025Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592673781450615967:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:08.082111Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:08.167016Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673802925453200:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:08.167059Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673802925453214:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:08.167138Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:08.172975Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673802925453219:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:08.173088Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:08.173715Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:08.186048Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592673802925453218:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:32:08.247663Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592673802925453272:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:08.283127Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:08.518989Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7592673802925453413:2351], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2026-01-07T17:32:08.521375Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=YTUzZGI2ZTctY2QxNjgwNjQtNzE1OGJmOTAtYmE1NzM3MmI=, ActorId: [5:7592673802925453411:2350], ActorState: ExecuteState, LegacyTraceId: 01kecr84xd08tzc6zqjx738nx0, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 5 column: 49 } message: "Creating table with data is not supported." end_position { row: 5 column: 49 } severity: 1 } } tx_id# trace_id# |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 15024, MsgBus: 4899 2026-01-07T17:31:28.497337Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673630662636577:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:28.501850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:29.165886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:29.166002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:29.252546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:29.309658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:29.376725Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:29.529141Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:29.683034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:29.683058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:29.683064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:29.683135Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:29.688940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:30.556758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:30.585089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:30.673381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.903790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.109016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.214120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.477104Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673630662636577:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:33.477195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:33.643462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673652137474897:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.643557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.643933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673652137474906:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.643970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.037665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.080292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.129436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.171712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.213774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.264905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.323942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.427614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.580688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673656432443078:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.580755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.581203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673656432443083:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.581243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673656432443084:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.581271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.589633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:34.603805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-01-07T17:31:34.609133Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673656432443087:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:31:34.675440Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673656432443140:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6738, MsgBus: 18510 2026-01-07T17:31:39.038078Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeliv ... } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.781243Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.782180Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.782209Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.783115Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.783194Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.784138Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.784136Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.785030Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037986;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.785658Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.785872Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037988;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.786768Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037990;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.787044Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.787567Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.787957Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.788491Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.788888Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.789373Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.789766Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.790227Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.790703Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037995;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.791611Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.792122Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.792879Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037969;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:32:08.793128Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2026-01-07T17:31:09.356491Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673548838693081:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:09.360163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0107 17:31:09.850905594 65445 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:09.851020119 65445 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:09.854780Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:09.921103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:09.947836Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:09.972879Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:6492 2026-01-07T17:31:10.081080Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.114551Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.115487Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.252301Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.252392Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.252428Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.252469Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.253039Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.253903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.262549Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.281215Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.281286Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.303100Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.389470Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:10.390249Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.391134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:10.420200Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.441778Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.442158Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.473131Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.537398Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.555536Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.569065Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.581440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.607690Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.607764Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.617060Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6492 } ] 2026-01-07T17:31:10.642768Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:6492: Failed to connect to ... x: { CheckpointId : . TraceId : 01kecr82kj23fd6hb0abg31713. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:06.446261Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592673792858938001:2888], TxId: 281474976715730, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82kj23fd6hb0abg31713. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-07T17:32:06.446276Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-07T17:32:06.446293Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-07T17:32:06.446301Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715729;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:06.446314Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [1:7592673792858938001:2888], TxId: 281474976715730, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82kj23fd6hb0abg31713. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. All outputs have been finished. Consider finished 2026-01-07T17:32:06.446322Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7592673792858938001:2888], TxId: 281474976715730, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82kj23fd6hb0abg31713. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:06.446529Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592673792858937986:2884], TxId: 281474976715729, task: 1. Ctx: { TraceId : 01kecr82hqa3s08fezt23n1zmk. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2IwZTkwYTUtMzUwZDI4MTAtZWFjN2E5ZmYtOGE1NDZlM2M=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:06.446558Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7592673792858937986:2884], TxId: 281474976715729, task: 1. Ctx: { TraceId : 01kecr82hqa3s08fezt23n1zmk. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2IwZTkwYTUtMzUwZDI4MTAtZWFjN2E5ZmYtOGE1NDZlM2M=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:06.446700Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715729;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:06.450053Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Received TEvResolveKeySetResult update for table 'Root/yq/pending_small' 2026-01-07T17:32:06.450173Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Processing resolved ShardId# 72075186224037893, partition range: [(String : TestTenant, String : NULL, String : NULL) ; ()), i: 0, state ranges: 1, points: 0 2026-01-07T17:32:06.450277Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:655: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Compare range #0 [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)] with partition range [(String : TestTenant, String : NULL, String : NULL) ; ()) : 0 2026-01-07T17:32:06.450329Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:662: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Add range to new shardId: 72075186224037893, range: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)] 2026-01-07T17:32:06.450478Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Pending shards States: TShardState{ TabletId: 72075186224037893, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-07T17:32:06.450494Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:06.450510Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. BEFORE: 1.0 2026-01-07T17:32:06.450623Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Send EvRead to shardId: 72075186224037893, tablePath: Root/yq/pending_small, ranges: [(String : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-07T17:32:06.450674Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. AFTER: 0.1 2026-01-07T17:32:06.450693Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-07T17:32:06.452341Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-07T17:32:06.452367Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Taken 0 locks 2026-01-07T17:32:06.452379Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:32:06.452402Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592673792858938001:2888], TxId: 281474976715730, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82kj23fd6hb0abg31713. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2026-01-07T17:32:06.452422Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592673792858938001:2888], TxId: 281474976715730, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82kj23fd6hb0abg31713. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-07T17:32:06.452444Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:32:06.452461Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:06.456011Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:06.456041Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. returned 0 rows; processed 0 rows 2026-01-07T17:32:06.456098Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. dropping batch for read #0 2026-01-07T17:32:06.456111Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:06.456124Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:32:06.456141Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715730, task: 1, CA Id [1:7592673792858938001:2888]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:32:06.456239Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7592673792858938001:2888], TxId: 281474976715730, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr82kj23fd6hb0abg31713. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:06.456469Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715730;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:06.456691Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592673792858938002:2889], TxId: 281474976715730, task: 2. Ctx: { TraceId : 01kecr82kj23fd6hb0abg31713. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:06.456750Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7592673792858938002:2889], TxId: 281474976715730, task: 2. Ctx: { TraceId : 01kecr82kj23fd6hb0abg31713. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM5MDJkNTctOGYwYzM0MGUtNjkxYzEyNmItNWI5YjkzMzI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:06.456832Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715730;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; [good] Yq_1::CreateQuery_Without_Connection |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] >> TestMalformedRequest::ContentLengthCorrect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32319, MsgBus: 21099 2026-01-07T17:31:29.145689Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673636555358271:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:29.145959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:29.560982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:29.567139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:29.567275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:29.640715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:29.665246Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673636555358247:2081] 1767807089128425 != 1767807089128428 2026-01-07T17:31:29.665413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:29.784567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:29.957687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:29.957714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:29.957722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:29.957812Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:30.170427Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:30.582883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:30.589035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:30.700461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.880238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.110705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.233454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.149326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673653735229304:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.149518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.153504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673653735229314:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.153602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.554974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.603095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.657327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.699254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.767603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.832057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.917987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.007088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.149344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673636555358271:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:34.149394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:34.224828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673658030197485:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.224958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.225333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673658030197490:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.225421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673658030197491:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.225465Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.231094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:34.244566Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673658030197494:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:34.331671Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673658030197545:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:37.429748Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592673670915099781:2547], TxId: 281474976710673, task: 1. Ctx: { Checkpo ... "Column \'NonExistentColumn\' does not exist in table \'/Root/KeyValue\'." end_position { row: 3 column: 84 } issue_code: 2017 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 14610, MsgBus: 27270 2026-01-07T17:32:01.777702Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592673773710626030:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:01.778179Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:01.946014Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:01.946558Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:01.948773Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592673773710626002:2081] 1767807121772730 != 1767807121772733 2026-01-07T17:32:01.959439Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:01.959551Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:01.979150Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:02.166693Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:02.166726Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:02.166735Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:02.166843Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:02.223488Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:02.818458Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:02.827754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:02.834766Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:02.858322Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:02.974154Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.156294Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.243434Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.535687Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673795185464344:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.535760Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.535973Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673795185464354:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.536000Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.622401Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.670714Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.726841Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.778835Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592673773710626030:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:06.778902Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:06.788619Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.856604Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.901551Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.948909Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.045642Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.159275Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673799480432523:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:07.159397Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:07.159725Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673799480432529:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:07.159730Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592673799480432528:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:07.159801Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:07.164178Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:07.175721Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592673799480432532:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:07.252720Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592673799480432583:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TestMalformedRequest::CompressedGzipContentLengthLower >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthCorrect >> TestMalformedRequest::ContentLengthNone >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:04.597053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:04.597146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:04.597176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:04.597209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:04.597236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:04.597257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:04.597301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:04.597375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:04.598019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:04.598225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:04.676636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:04.676685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:04.699056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:04.699456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:04.699695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:04.748303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:04.748690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:04.749406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:04.749668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:04.752804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:04.753028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:04.754236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:04.754294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:04.754404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:04.754443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:04.754479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:04.754642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:04.900011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.900757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.900891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.900992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:04.901529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 57 ready parts: 1/1 2026-01-07T17:32:11.877807Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-07T17:32:11.877844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-07T17:32:11.877890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-07T17:32:11.877962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1703:3569] message: TxId: 281474976730757 2026-01-07T17:32:11.878020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-07T17:32:11.878060Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-07T17:32:11.878094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-07T17:32:11.878162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-07T17:32:11.881186Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-07T17:32:11.881265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-07T17:32:11.881341Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2026-01-07T17:32:11.881481Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-07T17:32:11.883475Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-07T17:32:11.883618Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:11.883673Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-07T17:32:11.885180Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-07T17:32:11.885296Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:11.885335Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-07T17:32:11.885449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:32:11.885490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1761:3615] TestWaitNotification: OK eventTxId 105 2026-01-07T17:32:11.886184Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-07T17:32:11.886486Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-07T17:32:11.887395Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:11.887627Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 250us result status StatusSuccess 2026-01-07T17:32:11.888098Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> listing_paging.py::TestListingPaging::test_listing_paging_solomon [GOOD] >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> BindQueue::Basic >> Yq_1::DeleteQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] Test command err: 2026-01-07T17:32:08.415872Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:08.417368Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:08.417452Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:08.418244Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:08.418542Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ec9d/002e1c/r3tmp/tmpyDgq3u/pdisk_1.dat 2026-01-07T17:32:09.570610Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:09.570732Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:09.572339Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:09.573866Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:09.574966Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:09.575040Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ec9d/002e1c/r3tmp/tmpBGJuVP/pdisk_1.dat driveSize# 7900 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# -0.0125 driveSize# 8000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# 0 driveSize# 8100 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# 0.0125 driveSize# 16000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 1 relativeError# 0 driveSize# 24000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 12 SlotSizeInUnits# 2 relativeError# 0 driveSize# 31000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 2 relativeError# -0.03125 driveSize# 50000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 13 SlotSizeInUnits# 4 relativeError# -0.03846153846 driveSize# 50000 unitSizeInBytes# 100 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 32 relativeError# -0.0234375 driveSize# 18000 unitSizeInBytes# 200 maxSlots# 16 -> ExpectedSlotCount# 11 SlotSizeInUnits# 8 relativeError# 0.02272727273 driveSize# 1 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 1 relativeError# 0 driveSize# 2 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 2 relativeError# 0 driveSize# 3 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# -0.25 driveSize# 4 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# 0 driveSize# 5 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# 0.25 driveSize# 6 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# -0.25 driveSize# 7 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# -0.125 driveSize# 8 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0 driveSize# 9 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.125 driveSize# 10 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.25 driveSize# 11 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.375 driveSize# 12 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.25 driveSize# 13 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.1875 driveSize# 14 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.125 driveSize# 15 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.0625 driveSize# 16 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0 driveSize# 17 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.0625 driveSize# 18 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.125 driveSize# 19 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.1875 driveSize# 20 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.25 driveSize# 21 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.3125 driveSize# 22 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.375 driveSize# 23 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.4375 driveSize# 24 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.25 driveSize# 25 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.21875 driveSize# 26 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.1875 driveSize# 27 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.15625 driveSize# 28 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.125 driveSize# 29 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.09375 driveSize# 30 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.0625 driveSize# 31 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.03125 driveSize# 32 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0 driveSize# 33 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.03125 driveSize# 34 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.0625 driveSize# 35 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.09375 driveSize# 36 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.125 driveSize# 37 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.15625 driveSize# 38 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.1875 driveSize# 39 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.21875 driveSize# 40 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.25 driveSize# 41 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.28125 driveSize# 42 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.3125 driveSize# 43 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.34375 driveSize# 44 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.375 driveSize# 45 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.40625 driveSize# 46 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.4375 driveSize# 47 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.46875 driveSize# 48 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.25 driveSize# 49 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.234375 driveSize# 50 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.21875 driveSize# 51 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.203125 driveSize# 52 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.1875 driveSize# 53 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.171875 driveSize# 54 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.15625 driveSize# 55 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.140625 driveSize# 56 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.125 driveSize# 57 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.109375 driveSize# 58 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.09375 driveSize# 59 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.078125 driveSize# 60 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.0625 driveSize# 61 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.046875 driveSize# 62 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.03125 driveSize# 63 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.015625 driveSize# 64 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0 driveSize# 65 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0.015625 driveSize# 66 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0.03125 driveSize# 67 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0.046875 driveSize# 68 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0.0625 driveSize# 69 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnit ... writing ChunkIdx# 0 SectorIdx# 246 PDiskId# 1001 2026-01-07T17:32:12.878680Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 249 PDiskId# 1001 2026-01-07T17:32:12.878726Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 252 PDiskId# 1001 2026-01-07T17:32:12.878767Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 255 PDiskId# 1001 2026-01-07T17:32:12.878802Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 258 PDiskId# 1001 2026-01-07T17:32:12.878881Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 261 PDiskId# 1001 2026-01-07T17:32:12.878928Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 264 PDiskId# 1001 2026-01-07T17:32:12.878982Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 267 PDiskId# 1001 2026-01-07T17:32:12.879017Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 270 PDiskId# 1001 2026-01-07T17:32:12.879056Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 273 PDiskId# 1001 2026-01-07T17:32:12.879098Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 276 PDiskId# 1001 2026-01-07T17:32:12.879146Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 279 PDiskId# 1001 2026-01-07T17:32:12.879199Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 282 PDiskId# 1001 2026-01-07T17:32:12.879241Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 285 PDiskId# 1001 2026-01-07T17:32:12.879281Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 288 PDiskId# 1001 2026-01-07T17:32:12.879319Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 291 PDiskId# 1001 2026-01-07T17:32:12.879372Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 294 PDiskId# 1001 2026-01-07T17:32:12.879449Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 297 PDiskId# 1001 2026-01-07T17:32:12.879507Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 300 PDiskId# 1001 2026-01-07T17:32:12.879559Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 303 PDiskId# 1001 2026-01-07T17:32:12.879597Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 306 PDiskId# 1001 2026-01-07T17:32:12.879641Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 309 PDiskId# 1001 2026-01-07T17:32:12.879690Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 312 PDiskId# 1001 2026-01-07T17:32:12.879749Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 315 PDiskId# 1001 2026-01-07T17:32:12.879852Z node 3 :BS_PDISK DEBUG: {BPD66@blobstorage_pdisk_writer.h:388} TSectorWriter TerminateLog large SectorBytesFree# 2119 ChunkIdx# 0 SectorIdx# 315 SectorOffset# 1290240 PDiskId# 1001 2026-01-07T17:32:12.880016Z node 3 :BS_PDISK DEBUG: {BPD69@blobstorage_pdisk_impl_log.cpp:846} WriteSysLogRestorePoint FirstLogChunkToParseCommits# 1 CommonLogger# 0x00007D61343F7480 "LogChunks.size()"# 1 "LogChunks.front().ChunkIdx"# 1 BeginSectorIdx# 171 EndSectorIdx# 318 PDiskId# 1001 2026-01-07T17:32:12.880136Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000004 PDiskId# 1001 2026-01-07T17:32:12.880272Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 0 ReadOffset# 2576760176640 BytesToRead# 4096 ReqId# 2562560001348 PDiskId# 1001 2026-01-07T17:32:12.880350Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1759} PDisk have successfully started PDiskId# 1001 2026-01-07T17:32:12.881156Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_completion_impl.cpp:423: TCompletionEventSender {EvLogInitResult} 2026-01-07T17:32:12.881276Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001348 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 1534.455681 2026-01-07T17:32:12.881296Z node 3 :BS_PDISK INFO: {BPD01@blobstorage_pdisk_impl_log.cpp:1760} StartupOwnerInfo# { PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..18902} PDiskId# 1001 2026-01-07T17:32:12.881570Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-07T17:32:12.881622Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001348 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-07T17:32:12.881676Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.881721Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2026-01-07T17:32:12.881814Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000005 PDiskId# 1001 2026-01-07T17:32:12.881945Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2562560001604 PDiskId# 1001 2026-01-07T17:32:12.882049Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.885063Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 1534.457372 2026-01-07T17:32:12.893410Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10010000006 PDiskId# 1001 2026-01-07T17:32:12.893576Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001848 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 1534.469043 2026-01-07T17:32:12.893845Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-07T17:32:12.893895Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001604 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-07T17:32:12.893956Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-07T17:32:12.893986Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001848 PushRequestToScheduler Push to FastOperationsQueue.size# 2 2026-01-07T17:32:12.894019Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.894066Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2026-01-07T17:32:12.894171Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1001 Path: "SectorMap:TestInferPDiskSlotCountExplicitConfig:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 13 NumActiveSlots: 0 SlotSizeInUnits: 0 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 0 } 2026-01-07T17:32:12.905044Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.916325Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.926718Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.937015Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.947659Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.960770Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.971072Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.981362Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:12.995021Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:13.005464Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:13.016035Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:13.028498Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:13.040919Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-07T17:32:13.051707Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ColumnBuildTest::Unlocking_Failed >> TDistconfGenerateConfigTest::UsedNodes [GOOD] >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] |83.7%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageWardenTest::TestSendToInvalidGroupId ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2026-01-07T17:31:10.649772Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673553273432605:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:10.649810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0107 17:31:11.585268615 66391 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:11.585364471 66391 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:11.586257Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.649135Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:11.669855Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.669948Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.670027Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.670126Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.670181Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.677263Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.677352Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.783547Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.783600Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.783643Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.783690Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.783724Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.920532Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.978685Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:11.983266Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10166 2026-01-07T17:31:12.030142Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.055623Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.074532Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.109146Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.109317Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.109501Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.109535Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.118434Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.118521Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.144101Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.149073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:12.149158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:12.216049Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10166 } ] 2026-01-07T17:31:12.265741Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10166: Failed to connect to remote host: Connection refused } {
: Error: Grpc error respons ... 474976710761, task: 1, CA Id [4:7592673810751189563:3025]. dropping batch for read #0 2026-01-07T17:32:10.956291Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710761, task: 1, CA Id [4:7592673810751189563:3025]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:10.956303Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710761, task: 1, CA Id [4:7592673810751189563:3025]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:32:10.956321Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710761, task: 1, CA Id [4:7592673810751189563:3025]. returned async data processed rows 1 left freeSpace 8388557 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:32:10.956451Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673810751189563:3025], TxId: 281474976710761, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr86xs7k8d0axmvvg97xhg. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzQxZWQ2MjEtZjI4OTViZjktYjk4OGQ3YTAtYTRiYzA0ZmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:10.956618Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710761;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:10.956803Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673810751189564:3026], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr86xs7k8d0axmvvg97xhg. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzQxZWQ2MjEtZjI4OTViZjktYjk4OGQ3YTAtYTRiYzA0ZmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:10.956904Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:413: SelfId: [4:7592673810751189564:3026], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr86xs7k8d0axmvvg97xhg. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzQxZWQ2MjEtZjI4OTViZjktYjk4OGQ3YTAtYTRiYzA0ZmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2026-01-07T17:32:10.957034Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5019: SelfId: [4:7592673810751189566:3026], TxId: 281474976710761, task: 2. Add data: 234 / 234 2026-01-07T17:32:10.957103Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4987: SelfId: [4:7592673810751189566:3026], TxId: 281474976710761, task: 2. Send data=234, closed=1, bufferActorId=[4:7592673810751189558:2369] 2026-01-07T17:32:10.957127Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [4:7592673810751189564:3026], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr86xs7k8d0axmvvg97xhg. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzQxZWQ2MjEtZjI4OTViZjktYjk4OGQ3YTAtYTRiYzA0ZmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 234 2026-01-07T17:32:10.957142Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [4:7592673810751189564:3026], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr86xs7k8d0axmvvg97xhg. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzQxZWQ2MjEtZjI4OTViZjktYjk4OGQ3YTAtYTRiYzA0ZmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-07T17:32:10.957263Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2941: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Create new TableWriteActor for table `Root/yq/connections` ([72057594046644480:50:1]). lockId=281474976710754. ActorId=[4:7592673810751189571:2369] 2026-01-07T17:32:10.957308Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:481: Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]Open: token=0 2026-01-07T17:32:10.957455Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:489: SelfId: [4:7592673810751189571:2369], Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]Write: token=0 2026-01-07T17:32:10.957537Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:496: SelfId: [4:7592673810751189571:2369], Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]Close: token=0 2026-01-07T17:32:10.957583Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4892: SelfId: [4:7592673810751189566:3026], TxId: 281474976710761, task: 2. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7592673810751189558:2369] 2026-01-07T17:32:10.957600Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4910: SelfId: [4:7592673810751189566:3026], TxId: 281474976710761, task: 2. Finished 2026-01-07T17:32:10.957623Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673810751189564:3026], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr86xs7k8d0axmvvg97xhg. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzQxZWQ2MjEtZjI4OTViZjktYjk4OGQ3YTAtYTRiYzA0ZmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:10.957649Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673810751189564:3026], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr86xs7k8d0axmvvg97xhg. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzQxZWQ2MjEtZjI4OTViZjktYjk4OGQ3YTAtYTRiYzA0ZmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:10.957746Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710761;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:10.958028Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3486: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Start prepare for distributed commit 2026-01-07T17:32:10.958050Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1067: SelfId: [4:7592673810751189571:2369], Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]SetPrepare; txId=281474976710761 2026-01-07T17:32:10.958065Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Flush data 2026-01-07T17:32:10.958209Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1235: SelfId: [4:7592673810751189571:2369], Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]Send EvWrite to ShardID=72075186224037895, isPrepare=1, isImmediateCommit=0, TxId=281474976710761, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710754 DataShard: 72075186224037895 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 50, Size=324, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1, BufferMemory=324 2026-01-07T17:32:10.958300Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Send EvWrite (external) to ShardID=72075186224037891, isPrepare=1, isRollback=0, TxId=281474976710761, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710754 DataShard: 72075186224037891 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 46, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-07T17:32:10.959106Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4150: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Recv EvWriteResult (external) from ShardID=72075186224037891, Status=STATUS_PREPARED, TxId=281474976710761, Locks= , Cookie=0 2026-01-07T17:32:10.959147Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4468: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Got prepared result TxId=281474976710761, TabletId=72075186224037891, Cookie=0 2026-01-07T17:32:10.959179Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Flush data 2026-01-07T17:32:10.959218Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7592673810751189571:2369], Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]Recv EvWriteResult from ShardID=72075186224037895, Status=STATUS_PREPARED, TxId=281474976710761, Locks= , Cookie=1 2026-01-07T17:32:10.959271Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3532: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Start distributed commit with TxId=281474976710761 2026-01-07T17:32:10.959294Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1075: SelfId: [4:7592673810751189571:2369], Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]SetDistributedCommit; txId=281474976710761 2026-01-07T17:32:10.959331Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3719: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2026-01-07T17:32:10.959458Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3796: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Got transaction status, status: 16 2026-01-07T17:32:10.959757Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3796: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Got transaction status, status: 17 2026-01-07T17:32:10.962687Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4182: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Recv EvWriteResult (external) from ShardID=72075186224037891, Status=STATUS_COMPLETED, TxId=281474976710761, Locks= , Cookie=0 2026-01-07T17:32:10.962722Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4497: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Got completed result TxId=281474976710761, TabletId=72075186224037891, Cookie=0, Locks= 2026-01-07T17:32:10.963862Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7592673810751189571:2369], Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]Recv EvWriteResult from ShardID=72075186224037895, Status=STATUS_COMPLETED, TxId=281474976710761, Locks= , Cookie=0 2026-01-07T17:32:10.963889Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1023: SelfId: [4:7592673810751189571:2369], Table: `Root/yq/connections` ([72057594046644480:50:1]), SessionActorId: [4:7592673729146806574:2369]Got completed result TxId=281474976710761, TabletId=72075186224037895, Cookie=0, Mode=2, Locks= 2026-01-07T17:32:10.963922Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4535: SelfId: [4:7592673810751189558:2369], SessionActorId: [4:7592673729146806574:2369], Committed TxId=281474976710761 E0107 17:32:11.015106821 76632 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:32:11.015279436 76632 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> KqpExplain::UpdateOn-UseSink [GOOD] >> TBlobStorageWardenTest::TestFilterBadSerials >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13600, MsgBus: 13112 2026-01-07T17:31:48.622058Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673717712262870:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:48.622196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:49.106842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:49.106929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:49.110789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:49.123334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:49.127676Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:49.176583Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673717712262717:2081] 1767807108616030 != 1767807108616033 2026-01-07T17:31:49.410180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:49.430152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:49.430170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:49.430184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:49.430246Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:49.625296Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:50.080993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:50.159990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:50.512152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:50.844334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:50.989508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.190573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673739187101072:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.190669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.194248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673739187101082:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.194339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.603520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.623005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673717712262870:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:53.623050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:53.644339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.690438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.782823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.827522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.872950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.938837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.008539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.138587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673743482069255:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.138673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.139247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673743482069258:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.139305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.139453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673743482069262:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.143112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:54.154940Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673743482069264:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:54.241568Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673743482069317:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:56.189620Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-07T17:31:56.201624Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATU ... 17:32:05.569629Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:05.569727Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:06.118239Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:06.133335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:06.146686Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.233959Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.333746Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:06.438021Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.524823Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.118946Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673806564061838:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.119057Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.120011Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673806564061848:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.120090Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.120351Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673806564061850:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.120408Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.200389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.242031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.278245Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.316340Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.349525Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.381335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.413701Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.505169Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.586717Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673806564062719:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.586788Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.586906Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673806564062724:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.586954Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673806564062725:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.586994Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.590809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:09.606640Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673806564062728:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:09.690781Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673806564062779:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:10.297210Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673789384191016:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:10.297281Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:12.064248Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7592673819448964987:2537], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr88278w3ny31z4h14z46s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YzFkODcyNjYtN2JmM2VmMzItZjUxNzc1ZWQtNzRiZjAxMDI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:32:12.065103Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7592673819448964988:2538], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr88278w3ny31z4h14z46s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YzFkODcyNjYtN2JmM2VmMzItZjUxNzc1ZWQtNzRiZjAxMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7592673819448964984:2524], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:32:12.065522Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YzFkODcyNjYtN2JmM2VmMzItZjUxNzc1ZWQtNzRiZjAxMDI=, ActorId: [3:7592673815153997659:2524], ActorState: ExecuteState, LegacyTraceId: 01kecr88278w3ny31z4h14z46s, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 10 } Ring { Node: 11 } Ring { Node: 12 } Ring { Node: 13 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 12 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> TDistconfGenerateConfigTest::GenerateConfigSimpleCases |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigSimpleCases [GOOD] >> TDistconfGenerateConfigTest::IgnoreNodes [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] >> TBlobStorageWardenTest::TestDeleteStoragePool >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 22477, MsgBus: 15095 2026-01-07T17:31:49.903145Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673722990601150:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:49.903291Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:50.195781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:50.253687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:50.253779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:50.303574Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:50.307536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:50.548417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:50.560347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:50.560365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:50.560371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:50.560431Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:50.913906Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:51.126400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:51.150193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:51.221183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.349306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.504540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:51.595864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.786978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673740170472161:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.787100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.787563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673740170472171:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:53.787608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.269481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.345634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.421195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.487278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.520438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.577342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.634458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.691718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:54.783611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673744465440341:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.783676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.783983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673744465440346:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.784031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673744465440347:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.784087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:54.791449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:54.806224Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673744465440350:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:54.885899Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673744465440401:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:54.905065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673722990601150:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:54.905156Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:56.467264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... d: 72057594046644480 2026-01-07T17:32:07.183037Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.271172Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:07.278827Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.481894Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.564897Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.828076Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673808243661483:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.828153Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.828429Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673808243661492:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.828462Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:09.898858Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.925607Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.957597Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.993133Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.027788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.083050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.138776Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.234711Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.348824Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673812538629662:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.348941Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.349580Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673812538629667:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.349589Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673812538629668:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.349703Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.353570Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:10.365780Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673812538629671:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:10.433479Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673812538629722:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:11.272170Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673795358757949:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:11.272264Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:12.341618Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.049220Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715677; 2026-01-07T17:32:13.060267Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7592673825423532190:2561], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7592673821128564832:2561]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7592673825423532190:2561].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-07T17:32:13.060760Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592673825423532184:2561], SessionActorId: [3:7592673821128564832:2561], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7592673821128564832:2561]. 2026-01-07T17:32:13.061051Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=Mjc2MDhjNjMtZjY4NDMwZjQtYzA1MWFiZTgtZDk4YTMyZjc=, ActorId: [3:7592673821128564832:2561], ActorState: ExecuteState, LegacyTraceId: 01kecr89bm812wsz0y80rqp5bq, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592673825423532185:2561] from: [3:7592673825423532184:2561] trace_id# 2026-01-07T17:32:13.061185Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592673825423532185:2561] TxId: 281474976715677. Ctx: { TraceId: 01kecr89bm812wsz0y80rqp5bq, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Mjc2MDhjNjMtZjY4NDMwZjQtYzA1MWFiZTgtZDk4YTMyZjc=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:32:13.061507Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=Mjc2MDhjNjMtZjY4NDMwZjQtYzA1MWFiZTgtZDk4YTMyZjc=, ActorId: [3:7592673821128564832:2561], ActorState: ExecuteState, LegacyTraceId: 01kecr89bm812wsz0y80rqp5bq, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] Test command err: Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } Ring { Node: 2 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 6 } Ring { Node: 11 } Ring { Node: 16 } Ring { Node: 21 } Ring { Node: 26 } Ring { Node: 31 } Ring { Node: 36 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 11 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2026-01-07T17:31:09.638035Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673550499658705:2268];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:09.638118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0107 17:31:10.013413025 65711 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:10.013549128 65711 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:10.116609Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.303394Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.319521Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.359760Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.360035Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26122 2026-01-07T17:31:10.360114Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.393198Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.432853Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.432962Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.433012Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.435725Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.445889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.499329Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.499468Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.538240Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.569268Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.569348Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.588119Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.625328Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:10.771129Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.777653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.777774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:10.800536Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.829060Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:10.837118Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:11.013056Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:11.013201Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:11.020742Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:11.045949Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:11.046050Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:11.056153Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26122: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26122 } ] 2026-01-07T17:31:11.071261Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory " ... { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673815192154388 RawX2: 4503616807242637 } } DstEndpoint { ActorId { RawX1: 7592673815192154383 RawX2: 4503616807242576 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-07T17:32:11.923861Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1117: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7592673815192154387:2956] 2026-01-07T17:32:11.923901Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:11.923912Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:11.923936Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-07T17:32:11.924054Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673815192154387 RawX2: 4503616807242636 } } DstEndpoint { ActorId { RawX1: 7592673815192154388 RawX2: 4503616807242637 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7592673815192154388 RawX2: 4503616807242637 } } DstEndpoint { ActorId { RawX1: 7592673815192154383 RawX2: 4503616807242576 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-07T17:32:11.924079Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:11.924089Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:11.924110Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:11.924127Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:32:11.924139Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:32:11.924193Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-07T17:32:11.924208Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. Taken 0 locks 2026-01-07T17:32:11.924219Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:32:11.924240Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673815192154387:2956], TxId: 281474976715750, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-07T17:32:11.924259Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673815192154387:2956], TxId: 281474976715750, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:11.924273Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:32:11.924291Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:11.924305Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:11.924316Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. returned 0 rows; processed 0 rows 2026-01-07T17:32:11.924359Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. dropping batch for read #0 2026-01-07T17:32:11.924370Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:11.924383Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:32:11.924400Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715750, task: 1, CA Id [4:7592673815192154387:2956]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:32:11.924449Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673815192154387:2956], TxId: 281474976715750, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:11.924656Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715750;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:11.924843Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:11.924897Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7592673815192154388:2957], TxId: 281474976715750, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr87tt3dzzdn8b4dwh7b79. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZmIzMWJhMjgtNjM2YjY0ZDItZTgyYzkwYjEtMTZiZDdiMzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:11.925023Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715750;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:11.929971Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqudo61aaevbeira95j" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:669: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } 2026-01-07T17:32:12.085209Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:10954: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:10954 >> KeyValueGRPCService::SimpleWriteReadOverrunV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV2 >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> TestMalformedRequest::CompressedDeflateContentLengthLower [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases [GOOD] >> TDistconfGenerateConfigTest::BadRack [GOOD] >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig3DCCases >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> TDistconfGenerateConfigTest::GenerateConfig3DCCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOn-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21300, MsgBus: 8512 2026-01-07T17:31:28.497656Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673631749237570:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:28.528801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:28.550030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:29.087361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:29.087508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:29.162269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:29.175304Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:29.206804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:29.209628Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673631749237534:2081] 1767807088485491 != 1767807088485494 2026-01-07T17:31:29.377157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:29.377182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:29.377188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:29.377289Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:29.431174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:29.540694Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:29.925322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:29.936668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:29.992440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.145243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.349996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.458025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.867103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673648929108592:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.867218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.867896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673648929108602:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.867959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.408770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.460438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.498288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.501592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673631749237570:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:33.502647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:33.543592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.589944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.628847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.684147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.763166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.874473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673653224076770:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.874551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.875114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673653224076775:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.875161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673653224076776:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.875263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:33.879098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:33.903375Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673653224076779:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:33.986699Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673653224076830:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EP ... eck script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:06.325530Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:06.325558Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:06.325565Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:06.325649Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:06.963290Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:06.969831Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:06.990554Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.031735Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:07.098835Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.298585Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.393951Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.126524Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673810848426727:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.126663Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.127928Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673810848426737:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.127998Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.268847Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.311529Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.354414Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.436670Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.481886Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.536528Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.583920Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.647262Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.758276Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673810848427616:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.758393Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.758892Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673810848427621:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.758941Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673810848427622:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.758983Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.764212Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:10.780092Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592673810848427625:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:10.859060Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592673810848427676:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:10.986009Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592673789373588490:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:10.986108Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"Tables":["EightShard"],"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/EightShard","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"EightShard","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Columns":["Key"],"Name":"TableLookup","E-Cost":"0","E-Size":"0","LookupKeyColumns":["Key"],"Table":"EightShard"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink >> KqpPg::TableArrayInsert-useSink [GOOD] >> KqpPg::Returning+useSink >> KqpReattach::ReattachDeliveryProblem [GOOD] >> TestMalformedRequest::CompressedDeflateContentLengthHigher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 61437, MsgBus: 11328 2026-01-07T17:31:50.392547Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673727316332252:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:50.392580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:51.043037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:51.090583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:51.102592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:51.262388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:51.289062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673727316332229:2081] 1767807110384643 != 1767807110384646 2026-01-07T17:31:51.315915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:51.340262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:51.440464Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:51.665490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:51.665511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:51.665517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:51.665590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:52.499903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:52.512024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:52.589326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:52.844210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.086026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:53.220947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.130306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673748791170592:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.130442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.130793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673748791170602:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.130864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:55.393125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673727316332252:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:55.393211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:55.548887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.622349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.655796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.684838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.717693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.776072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.839514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.924900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:56.015820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673753086138774:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:56.015952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:56.016325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673753086138779:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:56.016383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673753086138780:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:56.016418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:56.020137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:56.038087Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673753086138783:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:56.104678Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673753086138836:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9141, MsgBus: 5005 2026-01-07T17:31:59.593545Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... ATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24044, MsgBus: 29982 2026-01-07T17:32:07.184060Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592673799023337009:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:07.184533Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:07.209215Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:07.283486Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:07.299393Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673799023336969:2081] 1767807127182848 != 1767807127182851 2026-01-07T17:32:07.327930Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:07.328016Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:07.334044Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:07.417653Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:07.417682Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:07.417689Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:07.417766Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:07.439907Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:07.897019Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:07.905004Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:07.923279Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:07.981795Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:08.149484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:08.194504Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:08.229750Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.021876Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673816203208024:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.021977Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.022248Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673816203208034:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.022280Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.095378Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.138844Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.170967Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.213424Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.253482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.337057Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.378128Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.429759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.538855Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673816203208914:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.538934Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.539222Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673816203208920:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.539239Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673816203208919:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.539283Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.543315Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:11.555659Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673816203208923:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:11.639289Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673816203208974:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:12.184476Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673799023337009:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:12.184542Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TBlobStorageWardenTest::TestEvVGenerationChangeRace >> ColumnBuildTest::Locking_Failed >> TestMalformedRequest::CompressedDeflateContentLengthNone [GOOD] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] Test command err: Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 3 } Ring { Node: 5 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 19 } Ring { Node: 20 } Ring { Node: 21 } Ring { Node: 37 } Ring { Node: 38 } Ring { Node: 39 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 10 } Ring { Node: 19 } } } Expected: NToSelect: 3 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> TMLPWriterTests::WriteTwoMessage_Deduplicated [GOOD] >> TMLPWriterTests::Deduplicated_Reboot >> ColumnBuildTest::Rejecting >> ColumnBuildTest::Unlocking_Failed [GOOD] >> ColumnBuildTest::DoNotRestoreDeletedRows >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> TestMalformedRequest::ContentLengthLower [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthCorrect >> ColumnBuildTest::Initiating_Failed >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] >> ColumnBuildTest::Cancelling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpReattach::ReattachDeliveryProblem [GOOD] Test command err: Trying to start YDB, gRPC: 32166, MsgBus: 26319 ... waiting for SysViewsRoster update finished 2026-01-07T17:31:54.450597Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:54.604304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:31:54.627659Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:31:54.628213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:54.628278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:31:54.968005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:54.968158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:55.119052Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807111170256 != 1767807111170260 2026-01-07T17:31:55.130499Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:55.186309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:55.330915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:31:55.718307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:55.718375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:55.718409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:55.718922Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:55.733856Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:56.116743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:56.196710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:56.503698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:56.987748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.297029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.208412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1906:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.208596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.209811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1979:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.209925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.249279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.442779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.713461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.044279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.331802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.615407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.898057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:00.214154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:00.613229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.613343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.614104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.614227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.614323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.619641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:00.784915Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2802:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:00.843835Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=3; FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=5; Trying to start YDB, gRPC: 23412, MsgBus: 18658 ... waiting for SysViewsRoster update finished 2026-0 ... State: Connecting -> Connected 2026-01-07T17:32:08.473528Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:32:08.871612Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:08.871683Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:08.871726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:08.871991Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:08.883293Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:09.313748Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:09.401788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:09.648758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.026716Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.379860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.948666Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1908:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.949131Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.950033Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1981:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:10.950170Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:11.012304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.224658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.531139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.791494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.051866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.347957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.636569Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.005948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.395688Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2794:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.395847Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.396484Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2798:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.396644Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2801:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.396744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.404643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:13.563997Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2803:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:13.625682Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2863:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:15.541258Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:3119:4407], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [2:3097:4407]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-07T17:32:15.552830Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:3119:4407], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [2:3097:4407]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-07T17:32:15.553655Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:3113:4407], SessionActorId: [2:3097:4407], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:3097:4407]. 2026-01-07T17:32:15.554017Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=NTJhMzIwMjUtY2U0ZmFhMjYtZmViNTdlYmUtZTVhOWIzNzg=, ActorId: [2:3097:4407], ActorState: ExecuteState, LegacyTraceId: 01kecr8bph22782tsr3t1gr54s, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:3114:4407] from: [2:3113:4407] trace_id# 2026-01-07T17:32:15.554238Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:3114:4407] TxId: 281474976715673. Ctx: { TraceId: 01kecr8bph22782tsr3t1gr54s, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTJhMzIwMjUtY2U0ZmFhMjYtZmViNTdlYmUtZTVhOWIzNzg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888., code: 2026 } trace_id# 2026-01-07T17:32:15.554565Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NTJhMzIwMjUtY2U0ZmFhMjYtZmViNTdlYmUtZTVhOWIzNzg=, ActorId: [2:3097:4407], ActorState: ExecuteState, LegacyTraceId: 01kecr8bph22782tsr3t1gr54s, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888." issue_code: 2026 severity: 1 } trace_id# |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> ColumnBuildTest::Filling_Failed >> TestMalformedRequest::ContentLengthHigher >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> ColumnBuildTest::Applying_Failed >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_Failed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:07.439056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:07.439161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:07.439217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:07.439269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:07.439304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:07.439331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:07.439394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:07.439453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:07.440322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:07.440639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:07.532622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:07.532683Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:07.548418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:07.548836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:07.549072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:07.559407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:07.559731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:07.560344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:07.560599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:07.563355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:07.563535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:07.564512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:07.564573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:07.564701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:07.564734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:07.564764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:07.564900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:07.716942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.720668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.720893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:07.721938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7.038713Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976725761, at schemeshard: 72075186233409549 2026-01-07T17:32:17.038797Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-07T17:32:17.038845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-07T17:32:17.038899Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976725761 2026-01-07T17:32:17.039013Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1550:3420], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2026-01-07T17:32:17.040645Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking 2026-01-07T17:32:17.040771Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1550:3420], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:17.040827Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-07T17:32:17.042393Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected 2026-01-07T17:32:17.042520Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1550:3420], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:17.042559Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-07T17:32:17.042693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:32:17.042748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:1566:3436] TestWaitNotification: OK eventTxId 106 2026-01-07T17:32:17.043404Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-07T17:32:17.043823Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-07T17:32:17.044561Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:17.044792Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 250us result status StatusSuccess 2026-01-07T17:32:17.045493Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TestMalformedRequest::ContentLengthCorrect [GOOD] >> ColumnBuildTest::BaseCase >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6735, MsgBus: 6041 2026-01-07T17:32:02.048196Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673776451805767:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:02.048247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:02.416361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:02.419882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:02.419991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:02.434928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:02.554662Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:02.716805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:02.716825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:02.716831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:02.716893Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:02.729109Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:03.077140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:03.240434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:03.254373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:03.332363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.647115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.873173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.976332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.109832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673793631676701:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.109974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.110328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673793631676711:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.110371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.479083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.516262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.552963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.603917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.643282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.687797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.731570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.792326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.898475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673793631677586:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.898546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.898617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673793631677591:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.898916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673793631677593:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.898954Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.902968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:06.918889Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673793631677594:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:06.977459Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673793631677646:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:07.048669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673776451805767:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:07.048735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:08.563454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 30104, MsgBus: 2863 2026-01-07T17:32:10.175442Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673812549865573:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:10.175499Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:10.190744Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:10.308740Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:10.311258Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:10.311343Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:10.329101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:10.335881Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:10.452895Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:10.452918Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:10.452965Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:10.453046Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:10.888985Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:10.895319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:10.912447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.993146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.145343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.207650Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:11.236146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.640906Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673825434769289:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.641037Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.641401Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673825434769299:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.641455Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.697066Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.726829Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.761729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.803897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.851733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.912762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.968695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.026322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.132011Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673829729737467:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.132126Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.132440Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673829729737473:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.132495Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673829729737472:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.132551Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.137162Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:14.154239Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673829729737476:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:14.218697Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673829729737527:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:15.176560Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673812549865573:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:15.176639Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:15.886936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> Yq_1::ModifyQuery [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthLower [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2026-01-07T17:32:14.769180Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:14.771162Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:14.771247Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:14.772385Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:14.772873Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ec9d/002e18/r3tmp/tmpfleNZJ/pdisk_1.dat 2026-01-07T17:32:15.353128Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [5a9a1d6240d04444] bootstrap ActorId# [1:485:2464] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1353:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:15.353305Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.353354Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.353386Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.353425Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.353456Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.353485Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.353528Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1353:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:32:15.353601Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1353:1] Marker# BPG33 2026-01-07T17:32:15.353661Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1353:1] Marker# BPG32 2026-01-07T17:32:15.353704Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1353:2] Marker# BPG33 2026-01-07T17:32:15.353733Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1353:2] Marker# BPG32 2026-01-07T17:32:15.353771Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1353:3] Marker# BPG33 2026-01-07T17:32:15.353797Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1353:3] Marker# BPG32 2026-01-07T17:32:15.353984Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:3] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:15.354751Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:2] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:15.354812Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:1] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:15.357648Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-07T17:32:15.357865Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-07T17:32:15.357961Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-07T17:32:15.358041Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1353:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-07T17:32:15.358105Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1353:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:32:15.358292Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.929 sample PartId# [72057594037932033:2:8:0:0:1353:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.931 sample PartId# [72057594037932033:2:8:0:0:1353:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.931 sample PartId# [72057594037932033:2:8:0:0:1353:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.816 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.987 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 5.079 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-07T17:32:15.378946Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2026-01-07T17:32:15.381378Z node 1 :BS_PROXY CRIT: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2026-01-07T17:32:15.381711Z node 1 :BS_PROXY DEBUG: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2026-01-07T17:32:15.381869Z node 1 :BS_PROXY DEBUG: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 2026-01-07T17:32:15.888604Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.888714Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.890141Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.891562Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.892779Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.892856Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ec9d/002e18/r3tmp/tmpc47b58/pdisk_1.dat 2026-01-07T17:32:16.414186Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [5a9a1d6240d04444] bootstrap ActorId# [2:487:2466] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1341:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:16.414359Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033: ... stablishingSessions Marker# DSP03 2026-01-07T17:32:17.794547Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.794713Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.794795Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.794978Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.795052Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.795105Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.795154Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.795184Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-07T17:32:17.795223Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-07T17:32:17.795271Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:340: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2026-01-07T17:32:17.796159Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [d70ef3c23a1a2346] bootstrap ActorId# [3:616:2519] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:17.796283Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [d70ef3c23a1a2346] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:17.796324Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [d70ef3c23a1a2346] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:32:17.796373Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [d70ef3c23a1a2346] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2026-01-07T17:32:17.796408Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [d70ef3c23a1a2346] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2026-01-07T17:32:17.796509Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:609:2512] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:17.799514Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [d70ef3c23a1a2346] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-07T17:32:17.799631Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [d70ef3c23a1a2346] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2026-01-07T17:32:17.799702Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [d70ef3c23a1a2346] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:32:17.800275Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:32:17.800331Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2026-01-07T17:32:17.800436Z node 4 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2026-01-07T17:32:17.800855Z node 4 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/ec9d/002e18/r3tmp/tmpxVW6Yp//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2026-01-07T17:32:17.802179Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2026-01-07T17:32:17.802233Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:32:17.804599Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:620:2106] targetNodeId# 3 Marker# DSP01 2026-01-07T17:32:17.804760Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:621:2107] targetNodeId# 3 Marker# DSP01 2026-01-07T17:32:17.804873Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:622:2108] targetNodeId# 3 Marker# DSP01 2026-01-07T17:32:17.805007Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:623:2109] targetNodeId# 3 Marker# DSP01 2026-01-07T17:32:17.805121Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:624:2110] targetNodeId# 3 Marker# DSP01 2026-01-07T17:32:17.805235Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:625:2111] targetNodeId# 3 Marker# DSP01 2026-01-07T17:32:17.805340Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:626:2112] targetNodeId# 3 Marker# DSP01 2026-01-07T17:32:17.805365Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:32:17.806668Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.806972Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.807043Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.807215Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.807279Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.807355Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.807432Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:17.807461Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-07T17:32:17.807501Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-07T17:32:17.807714Z node 4 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [4:620:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] Test command err: 2026-01-07T17:32:15.660486Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.663252Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.663359Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.664646Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.665242Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ec9d/002e11/r3tmp/tmpyOqzlh/pdisk_1.dat 2026-01-07T17:32:16.269731Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [5a9a1d6240d04444] bootstrap ActorId# [1:485:2464] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1353:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:16.269917Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.269959Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.269995Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.270025Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.270050Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.270077Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.270114Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1353:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:32:16.270177Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1353:1] Marker# BPG33 2026-01-07T17:32:16.270228Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1353:1] Marker# BPG32 2026-01-07T17:32:16.270289Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1353:2] Marker# BPG33 2026-01-07T17:32:16.270315Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1353:2] Marker# BPG32 2026-01-07T17:32:16.270347Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1353:3] Marker# BPG33 2026-01-07T17:32:16.270379Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1353:3] Marker# BPG32 2026-01-07T17:32:16.270552Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:3] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.270614Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:2] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.270677Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:1] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.276423Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-07T17:32:16.276631Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-07T17:32:16.276722Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-07T17:32:16.276802Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1353:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-07T17:32:16.276864Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1353:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:32:16.277069Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.132 sample PartId# [72057594037932033:2:8:0:0:1353:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.133 sample PartId# [72057594037932033:2:8:0:0:1353:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.134 sample PartId# [72057594037932033:2:8:0:0:1353:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 6.926 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 7.083 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 7.174 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-07T17:32:16.329624Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [b6b2c6548553d7a5] bootstrap ActorId# [1:531:2502] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:16.329788Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.329837Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.329866Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.329893Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.329921Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.329947Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.329985Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:32:16.330081Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2026-01-07T17:32:16.330134Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2026-01-07T17:32:16.330174Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2026-01-07T17:32:16.330203Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2026-01-07T17:32:16.330236Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2026-01-07T17:32:16.330263Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2026-01-07T17:32:16.330418Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.330487Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.330531Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:54:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.339893Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.c ... # 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2026-01-07T17:32:18.041184Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:607:2510] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:18.044242Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [cd65997ea3b51537] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-07T17:32:18.044354Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [cd65997ea3b51537] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2026-01-07T17:32:18.044419Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [cd65997ea3b51537] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:32:18.044994Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:32:18.045035Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2026-01-07T17:32:18.045184Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2026-01-07T17:32:18.045418Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/ec9d/002e11/r3tmp/tmpGcn2r2//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2026-01-07T17:32:18.046311Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2026-01-07T17:32:18.046363Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:32:18.048572Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:620:2106] targetNodeId# 2 Marker# DSP01 2026-01-07T17:32:18.048715Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:621:2107] targetNodeId# 2 Marker# DSP01 2026-01-07T17:32:18.048827Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:622:2108] targetNodeId# 2 Marker# DSP01 2026-01-07T17:32:18.049048Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:623:2109] targetNodeId# 2 Marker# DSP01 2026-01-07T17:32:18.049166Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:624:2110] targetNodeId# 2 Marker# DSP01 2026-01-07T17:32:18.049282Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:625:2111] targetNodeId# 2 Marker# DSP01 2026-01-07T17:32:18.049402Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:626:2112] targetNodeId# 2 Marker# DSP01 2026-01-07T17:32:18.049430Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:32:18.051098Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:18.051390Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:18.051464Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:18.051660Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:18.051735Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:18.051828Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:18.051903Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-07T17:32:18.051934Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-07T17:32:18.051978Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-07T17:32:18.052119Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:150: [efc53170c63234c6] bootstrap ActorId# [3:627:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2026-01-07T17:32:18.052190Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:111: [efc53170c63234c6] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:2:0:0:0] node# 2 Marker# DSPB03 2026-01-07T17:32:18.052380Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:620:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 3580630381867750677 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2026-01-07T17:32:18.053532Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:43: [efc53170c63234c6] Handle TEvVBlockResult status# OK From# [82000002:2:0:0:0] NodeId# 2 Marker# DSPB01 2026-01-07T17:32:18.053604Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:100: [efc53170c63234c6] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2026-01-07T17:32:18.053946Z node 3 :BS_PROXY INFO: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2026-01-07T17:32:18.054204Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2026-01-07T17:32:18.054559Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [c85e1a21dcb31b54] bootstrap ActorId# [2:628:2520] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:18.054708Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [c85e1a21dcb31b54] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:18.054760Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [c85e1a21dcb31b54] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:32:18.054828Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [c85e1a21dcb31b54] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2026-01-07T17:32:18.054879Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [c85e1a21dcb31b54] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2026-01-07T17:32:18.055001Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:607:2510] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:18.055228Z node 2 :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:581: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-07T17:32:18.055511Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:265: [c85e1a21dcb31b54] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-07T17:32:18.055601Z node 2 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [c85e1a21dcb31b54] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2026-01-07T17:32:18.055670Z node 2 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:499: [c85e1a21dcb31b54] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:32:18.056089Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:620:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> TestMalformedRequest::CompressedDeflateContentLengthCorrect [GOOD] >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex-UseSink |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest |83.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] Test command err: 2026-01-07T17:32:15.755566Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.757981Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.760367Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.760509Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.762547Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.765046Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ec9d/002dd0/r3tmp/tmpuZY59r/pdisk_1.dat 2026-01-07T17:32:16.419885Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [3ca1a99c83a6f037] bootstrap ActorId# [1:555:2468] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1351:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:16.420076Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [3ca1a99c83a6f037] Id# [72057594037932033:2:8:0:0:1351:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.420123Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [3ca1a99c83a6f037] Id# [72057594037932033:2:8:0:0:1351:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.420150Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [3ca1a99c83a6f037] Id# [72057594037932033:2:8:0:0:1351:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.420178Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [3ca1a99c83a6f037] Id# [72057594037932033:2:8:0:0:1351:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.420206Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [3ca1a99c83a6f037] Id# [72057594037932033:2:8:0:0:1351:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.420232Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [3ca1a99c83a6f037] Id# [72057594037932033:2:8:0:0:1351:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.420272Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [3ca1a99c83a6f037] restore Id# [72057594037932033:2:8:0:0:1351:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:32:16.420352Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [3ca1a99c83a6f037] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1351:1] Marker# BPG33 2026-01-07T17:32:16.420393Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [3ca1a99c83a6f037] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1351:1] Marker# BPG32 2026-01-07T17:32:16.420435Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [3ca1a99c83a6f037] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1351:2] Marker# BPG33 2026-01-07T17:32:16.420464Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [3ca1a99c83a6f037] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1351:2] Marker# BPG32 2026-01-07T17:32:16.420495Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [3ca1a99c83a6f037] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1351:3] Marker# BPG33 2026-01-07T17:32:16.420520Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [3ca1a99c83a6f037] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1351:3] Marker# BPG32 2026-01-07T17:32:16.420700Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:67:2092] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1351:3] FDS# 1351 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.420768Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:60:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1351:2] FDS# 1351 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.420815Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:81:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1351:1] FDS# 1351 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.425367Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [3ca1a99c83a6f037] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1351:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90637 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-07T17:32:16.425667Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [3ca1a99c83a6f037] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1351:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90637 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-07T17:32:16.425762Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [3ca1a99c83a6f037] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1351:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90637 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-07T17:32:16.425859Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [3ca1a99c83a6f037] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1351:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-07T17:32:16.425928Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [3ca1a99c83a6f037] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1351:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:32:16.426122Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.12 sample PartId# [72057594037932033:2:8:0:0:1351:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.12 sample PartId# [72057594037932033:2:8:0:0:1351:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.121 sample PartId# [72057594037932033:2:8:0:0:1351:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 5.73 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 5.979 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 6.081 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-07T17:32:16.481676Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [1a43693427d0a82b] bootstrap ActorId# [1:601:2506] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:16.481851Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.481900Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.481931Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.481963Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.481991Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.482019Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:16.482056Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [1a43693427d0a82b] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:32:16.482127Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2026-01-07T17:32:16.482180Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [1a43693427d0a82b] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2026-01-07T17:32:16.482218Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2026-01-07T17:32:16.482245Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [1a43693427d0a82b] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2026-01-07T17:32:16.482279Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2026-01-07T17:32:16.482305Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [1a43693427d0a82b] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2026-01-07T17:32:16.482463Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:60:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.482531Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:81:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:16.482610Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: ... _PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 234 PDiskId# 1002 2026-01-07T17:32:18.464390Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 237 PDiskId# 1002 2026-01-07T17:32:18.464426Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 240 PDiskId# 1002 2026-01-07T17:32:18.464481Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 243 PDiskId# 1002 2026-01-07T17:32:18.464522Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 246 PDiskId# 1002 2026-01-07T17:32:18.464559Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 249 PDiskId# 1002 2026-01-07T17:32:18.464595Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 252 PDiskId# 1002 2026-01-07T17:32:18.464644Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 255 PDiskId# 1002 2026-01-07T17:32:18.464683Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 258 PDiskId# 1002 2026-01-07T17:32:18.464720Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 261 PDiskId# 1002 2026-01-07T17:32:18.464755Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 264 PDiskId# 1002 2026-01-07T17:32:18.464790Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 267 PDiskId# 1002 2026-01-07T17:32:18.464827Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 270 PDiskId# 1002 2026-01-07T17:32:18.464863Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 273 PDiskId# 1002 2026-01-07T17:32:18.464922Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 276 PDiskId# 1002 2026-01-07T17:32:18.464978Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 279 PDiskId# 1002 2026-01-07T17:32:18.465020Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 282 PDiskId# 1002 2026-01-07T17:32:18.465059Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 285 PDiskId# 1002 2026-01-07T17:32:18.465097Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 288 PDiskId# 1002 2026-01-07T17:32:18.465139Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 291 PDiskId# 1002 2026-01-07T17:32:18.465183Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 294 PDiskId# 1002 2026-01-07T17:32:18.465228Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 297 PDiskId# 1002 2026-01-07T17:32:18.465274Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 300 PDiskId# 1002 2026-01-07T17:32:18.465310Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 303 PDiskId# 1002 2026-01-07T17:32:18.465352Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 306 PDiskId# 1002 2026-01-07T17:32:18.465388Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 309 PDiskId# 1002 2026-01-07T17:32:18.465437Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 312 PDiskId# 1002 2026-01-07T17:32:18.465481Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 315 PDiskId# 1002 2026-01-07T17:32:18.465543Z node 3 :BS_PDISK DEBUG: {BPD66@blobstorage_pdisk_writer.h:388} TSectorWriter TerminateLog large SectorBytesFree# 2119 ChunkIdx# 0 SectorIdx# 315 SectorOffset# 1290240 PDiskId# 1002 2026-01-07T17:32:18.466479Z node 3 :BS_PDISK DEBUG: {BPD69@blobstorage_pdisk_impl_log.cpp:846} WriteSysLogRestorePoint FirstLogChunkToParseCommits# 1 CommonLogger# 0x00007D6AB5DFF280 "LogChunks.size()"# 1 "LogChunks.front().ChunkIdx"# 1 BeginSectorIdx# 171 EndSectorIdx# 318 PDiskId# 1002 2026-01-07T17:32:18.466583Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000004 PDiskId# 1002 2026-01-07T17:32:18.466648Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 0 ReadOffset# 2576760176640 BytesToRead# 4096 ReqId# 2565120001348 PDiskId# 1002 2026-01-07T17:32:18.466703Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1759} PDisk have successfully started PDiskId# 1002 2026-01-07T17:32:18.467384Z node 3 :BS_PDISK INFO: {BPD01@blobstorage_pdisk_impl_log.cpp:1760} StartupOwnerInfo# { PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..18902} PDiskId# 1002 2026-01-07T17:32:18.467473Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.468213Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_completion_impl.cpp:423: TCompletionEventSender {EvLogInitResult} 2026-01-07T17:32:18.468302Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001348 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 1540.200892 2026-01-07T17:32:18.468414Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-07T17:32:18.468527Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001348 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-07T17:32:18.468562Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.468613Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2026-01-07T17:32:18.468681Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000005 PDiskId# 1002 2026-01-07T17:32:18.468749Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2565120001604 PDiskId# 1002 2026-01-07T17:32:18.468804Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.468922Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 1540.203000 2026-01-07T17:32:18.469007Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-07T17:32:18.469038Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001604 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-07T17:32:18.469061Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.469092Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2026-01-07T17:32:18.469131Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.479021Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10020000006 PDiskId# 1002 2026-01-07T17:32:18.479133Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001848 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 1540.213378 2026-01-07T17:32:18.479449Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-07T17:32:18.479583Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001848 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-07T17:32:18.479635Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.479777Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 12 NumActiveSlots: 0 SlotSizeInUnits: 2 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 0 } 2026-01-07T17:32:18.489982Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.500294Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.510536Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.520824Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.531489Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.542515Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-07T17:32:18.552800Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 >> TestMalformedRequest::ContentLengthNone [GOOD] |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6101, MsgBus: 65244 2026-01-07T17:31:53.542656Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673737470343544:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:53.542803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:53.572596Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:53.905467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:53.905544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:53.911780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:53.964045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:54.045633Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:54.046907Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673737470343420:2081] 1767807113518075 != 1767807113518078 2026-01-07T17:31:54.266896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:54.282451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:54.282477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:54.282486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:54.282583Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:54.540851Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:54.982666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:54.989605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:55.068437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.336117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.633923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:55.741558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.927001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673754650214473:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.927144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.927730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673754650214483:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:57.927822Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.280806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.317426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.351464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.384775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.423679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.487644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.521027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.537772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673737470343544:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:58.537822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:58.568248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:58.641997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673758945182654:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.642089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.642436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673758945182659:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.642476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673758945182660:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.642765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:58.646025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:58.656572Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673758945182663:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:58.734987Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673758945182714:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EP ... 026-01-07T17:32:10.106216Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:10.113097Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673806989031121:2081] 1767807129989436 != 1767807129989439 2026-01-07T17:32:10.128517Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:10.229546Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:10.229572Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:10.229580Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:10.229667Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:10.305218Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:10.724057Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:10.730477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:10.748550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:10.834490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.008482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.021068Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:11.128384Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.854006Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673824168902171:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.854092Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.861283Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673824168902181:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.861392Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:13.929083Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.973566Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.016726Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.052399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.088872Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.143764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.183177Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.263664Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.356343Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673828463870350:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.356487Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.356792Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673828463870356:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.356818Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673828463870355:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.356871Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.361849Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:14.376326Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673828463870359:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:14.458326Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673828463870410:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:14.992494Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673806989031214:2133];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:14.992569Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:16.378089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.715289Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:32:16.876210Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YzA1ZTU0NDMtM2QwOTA4NGYtMzYyOTJhMzEtYzliNzQ1YmE=, ActorId: [3:7592673837053805290:2523], ActorState: ExecuteState, LegacyTraceId: 01kecr8d3174v9jakd8z1dhdrm, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable`" issue_code: 2001 severity: 1 } trace_id# >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink >> TestMalformedRequest::CompressedGzipContentLengthNone |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TestMalformedRequest::CompressedGzipContentLengthHigher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] Test command err: 2026-01-07T17:32:15.139248Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.141044Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.141134Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.142132Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:15.142551Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ec9d/002e16/r3tmp/tmpBcNDBl/pdisk_1.dat Formatting PDisk with guid1 9186043477982739244 Creating PDisk with guid2 8789274994156284603 Creating pdisk 2026-01-07T17:32:15.727359Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:570} PDiskId# 1001 Can't start due to a guid error expected# 8789274994156284603 on-disk# 9186043477982739244 PDiskId# 1001 2026-01-07T17:32:15.770946Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [e2e5f1b9c917f854] bootstrap ActorId# [1:486:2464] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:361:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:32:15.771116Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:361:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.771471Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:361:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.771514Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:361:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.771541Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:361:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.771580Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:361:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.771608Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:361:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:32:15.771649Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:361:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:32:15.771744Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:361:1] Marker# BPG33 2026-01-07T17:32:15.771794Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:361:1] Marker# BPG32 2026-01-07T17:32:15.771840Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:361:2] Marker# BPG33 2026-01-07T17:32:15.771870Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:361:2] Marker# BPG32 2026-01-07T17:32:15.771900Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:361:3] Marker# BPG33 2026-01-07T17:32:15.771934Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:361:3] Marker# BPG32 2026-01-07T17:32:15.772097Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:361:3] FDS# 361 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:15.772163Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:361:2] FDS# 361 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:15.772208Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:361:1] FDS# 361 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:32:15.777166Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:361:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82842 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-07T17:32:15.777389Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:361:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82842 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-07T17:32:15.777476Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:361:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82842 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-07T17:32:15.777547Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:361:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-07T17:32:15.777605Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:361:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:32:15.777785Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.462 sample PartId# [72057594037932033:2:8:0:0:361:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.463 sample PartId# [72057594037932033:2:8:0:0:361:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.463 sample PartId# [72057594037932033:2:8:0:0:361:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 6.477 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 6.647 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 6.73 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } Verify that PDisk returns ERROR YardInitResult: {EvYardInitResult Status# CORRUPTED ErrorReason# "PDisk is in StateError, reason# PDiskId# 1001 Can't start due to a guid error expected# 8789274994156284603 on-disk# 9186043477982739244" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 0 ownerRound# 0 SlotSizeInUnits# 0 ChunkSize# 0 AppendBlockSize# 0 RecommendedReadSize# 0 SeekTimeUs# 0 ReadSpeedBps# 0 WriteSpeedBps# 0 ReadBlockSize# 0 WriteBlockSize# 0 BulkWriteBlockSize# 0 PrefetchSizeBytes# 0 GlueRequestDistanceBytes# 0 IsTinyDisk# 0}} OwnedChunks# {}} 2026-01-07T17:32:17.138135Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:17.138252Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:17.139752Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:17.141333Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:17.142636Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:32:17.142716Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ec9d/002e16/r3tmp/tmp8ONiFM/pdisk_1.dat Starting test 2026-01-07T17:32:17.660840Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2537: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON START Marker# BSVS37 2026-01-07T17:32:17.661133Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:707: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) LocalRecovery START 2026-01-07T17:32:17.665416Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:190: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2026-01-07T17:32:17.666208Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_logreplay.cpp:83: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TRecoveryLogReplayer: START 2026-01-07T17:32:17.666828Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:143: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) LocalRecovery FINISHED: {RecoveryDuration# 0.002000s RecoveredLogStartLsn# 0 SuccessfulRecovery# true EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {{RecsN# 0 Lsns# [0 0]}}} ... blocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON cookie 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } PDiskId: 1000 VDiskSlotId: 1002 Guid: 5456350744084491739 Kind: 0 StoragePoolName: "testEvVGenerationChangeRace" InstanceGuid: 11806457655526628656 GroupSizeInUnits: 0 2026-01-07T17:32:17.719939Z node 2 :BS_SKELETON INFO: blobstorage_skeletonfront.cpp:1727: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) VDisk Generation Change success; new VDiskId# [82000002:2:0:0:0] Marker# BSVSF02 TEvControllerConfigResponse# NKikimrBlobStorage.TEvControllerConfigResponse Response { Status { Success: true } Success: true ConfigTxSeqNo: 5 } Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: Initial DiskSpace: Green Replicated: false UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 11806457655526628656 ReplicationProgress: nan ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 11806457655526628656 GroupSizeInUnits: 0 ... unblocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON 2026-01-07T17:32:17.720679Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1966: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON LOCAL RECOVERY SUCCEEDED Marker# BSVS29 2026-01-07T17:32:17.737956Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2134: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON SYNC GUID RECOVERY SUCCEEDED Marker# BSVS31 2026-01-07T17:32:17.738065Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1845: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON IS UP AND RUNNING Marker# BSVS28 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IncarnationGuid: 16349739802483488852 InstanceGuid: 11806457655526628656 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 3 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 11213970223398292486 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 2 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 1228621934024697151 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 16882059809704980464 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 1 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 13377627423442986411 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 11213970223398292486 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 1228621934024697151 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 16882059809704980464 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 13377627423442986411 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 11213970223398292486 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 1228621934024697151 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 16882059809704980464 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 13377627423442986411 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 10075491934428361830 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 7210365765701504807 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 11806457655526628656 AvailableSize: 34225520640 GroupSizeInUnits: 2 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> ColumnBuildTest::Locking_Failed [GOOD] >> ColumnBuildTest::Locking_DisableFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] Test command err: Disable nodeId# 94 Disable nodeId# 2 Delete nodeId# 64 Pick Pick Disable nodeId# 78 Delete nodeId# 61 Enable nodeId# 78 Disable nodeId# 95 Enable nodeId# 94 Delete nodeId# 74 Pick Pick Delete nodeId# 99 Disable nodeId# 69 Add nodeId# 101 Delete nodeId# 95 Enable nodeId# 69 Disable nodeId# 73 Enable nodeId# 73 Pick Enable nodeId# 2 Delete nodeId# 77 Delete nodeId# 53 Pick Pick Delete nodeId# 65 Add nodeId# 102 Delete nodeId# 32 Disable nodeId# 5 Disable nodeId# 69 Add nodeId# 103 Add nodeId# 104 Add nodeId# 105 Disable nodeId# 28 Disable nodeId# 103 Add nodeId# 106 Delete nodeId# 78 Add nodeId# 107 Delete nodeId# 107 Disable nodeId# 90 Add nodeId# 108 Delete nodeId# 35 Disable nodeId# 45 Enable nodeId# 28 Enable nodeId# 45 Add nodeId# 109 Add nodeId# 110 Pick Add nodeId# 111 Delete nodeId# 15 Enable nodeId# 90 Delete nodeId# 75 Disable nodeId# 86 Delete nodeId# 85 Add nodeId# 112 Disable nodeId# 54 Pick Pick Disable nodeId# 22 Disable nodeId# 29 Disable nodeId# 59 Pick Disable nodeId# 56 Disable nodeId# 111 Disable nodeId# 41 Disable nodeId# 94 Add nodeId# 113 Add nodeId# 114 Delete nodeId# 84 Disable nodeId# 38 Pick Delete nodeId# 30 Disable nodeId# 109 Pick Enable nodeId# 111 Pick Pick Enable nodeId# 94 Disable nodeId# 18 Add nodeId# 115 Pick Delete nodeId# 13 Add nodeId# 116 Enable nodeId# 22 Pick Disable nodeId# 57 Delete nodeId# 40 Pick Delete nodeId# 110 Pick Delete nodeId# 83 Add nodeId# 117 Delete nodeId# 10 Add nodeId# 118 Delete nodeId# 81 Disable nodeId# 51 Disable nodeId# 11 Delete nodeId# 36 Enable nodeId# 59 Add nodeId# 119 Enable nodeId# 103 Enable nodeId# 41 Disable nodeId# 96 Disable nodeId# 102 Add nodeId# 120 Enable nodeId# 51 Pick Delete nodeId# 8 Disable nodeId# 4 Delete nodeId# 69 Delete nodeId# 2 Pick Delete nodeId# 58 Pick Disable nodeId# 115 Add nodeId# 121 Enable nodeId# 4 Enable nodeId# 54 Delete nodeId# 112 Enable nodeId# 86 Enable nodeId# 102 Pick Delete nodeId# 118 Enable nodeId# 18 Delete nodeId# 91 Add nodeId# 122 Enable nodeId# 5 Delete nodeId# 59 Enable nodeId# 29 Enable nodeId# 11 Delete nodeId# 51 Enable nodeId# 56 Delete nodeId# 11 Enable nodeId# 115 Add nodeId# 123 Add nodeId# 124 Enable nodeId# 109 Delete nodeId# 12 Disable nodeId# 63 Disable nodeId# 22 Pick Enable nodeId# 96 Pick Delete nodeId# 60 Disable nodeId# 67 Delete nodeId# 9 Pick Delete nodeId# 25 Disable nodeId# 56 Disable nodeId# 46 Pick Delete nodeId# 38 Add nodeId# 125 Delete nodeId# 54 Disable nodeId# 109 Disable nodeId# 116 Delete nodeId# 70 Enable nodeId# 116 Pick Disable nodeId# 33 Disable nodeId# 31 Delete nodeId# 117 Enable nodeId# 33 Enable nodeId# 109 Enable nodeId# 22 Enable nodeId# 57 Pick Delete nodeId# 27 Pick Pick Add nodeId# 126 Add nodeId# 127 Add nodeId# 128 Disable nodeId# 68 Delete nodeId# 42 Delete nodeId# 123 Add nodeId# 129 Pick Add nodeId# 130 Enable nodeId# 63 Delete nodeId# 29 Enable nodeId# 31 Add nodeId# 131 Delete nodeId# 55 Delete nodeId# 88 Enable nodeId# 46 Pick Add nodeId# 132 Disable nodeId# 127 Pick Pick Pick Enable nodeId# 68 Disable nodeId# 46 Pick Add nodeId# 133 Pick Enable nodeId# 67 Delete nodeId# 56 Enable nodeId# 127 Enable nodeId# 46 Pick Add nodeId# 134 Delete nodeId# 134 Delete nodeId# 76 Pick Disable nodeId# 52 Add nodeId# 135 Pick Delete nodeId# 46 Pick Enable nodeId# 52 Add nodeId# 136 Add nodeId# 137 Disable nodeId# 71 Disable nodeId# 122 Disable nodeId# 67 Enable nodeId# 71 Disable nodeId# 39 Pick Enable nodeId# 67 Delete nodeId# 97 Enable nodeId# 39 Add nodeId# 138 Pick Enable nodeId# 122 Pick Add nodeId# 139 Delete nodeId# 122 Disable nodeId# 120 Delete nodeId# 67 Add nodeId# 140 Disable nodeId# 1 Delete nodeId# 108 Add nodeId# 141 Pick Delete nodeId# 127 Disable nodeId# 7 Delete nodeId# 113 Enable nodeId# 7 Disable nodeId# 138 Disable nodeId# 14 Delete nodeId# 73 Pick Disable nodeId# 126 Delete nodeId# 23 Disable nodeId# 137 Add nodeId# 142 Disable nodeId# 103 Enable nodeId# 14 Enable nodeId# 103 Enable nodeId# 138 Pick Disable nodeId# 129 Add nodeId# 143 Pick Pick Disable nodeId# 119 Add nodeId# 144 Pick Delete nodeId# 66 Disable nodeId# 6 Disable nodeId# 136 Enable nodeId# 6 Add nodeId# 145 Add nodeId# 146 Enable nodeId# 126 Enable nodeId# 120 Enable nodeId# 1 Pick Disable nodeId# 26 Pick Add nodeId# 147 Delete nodeId# 14 Delete nodeId# 57 Disable nodeId# 16 Disable nodeId# 5 Enable nodeId# 129 Pick Add nodeId# 148 Disable nodeId# 20 Enable nodeId# 20 Add nodeId# 149 Disable nodeId# 129 Add nodeId# 150 Disable nodeId# 103 Add nodeId# 151 Disable nodeId# 100 Disable nodeId# 37 Disable nodeId# 145 Delete nodeId# 19 Disable nodeId# 138 Disable nodeId# 82 Add nodeId# 152 Disable nodeId# 135 Enable nodeId# 138 Delete nodeId# 45 Add nodeId# 153 Pick Enable nodeId# 136 Delete nodeId# 79 Add nodeId# 154 Disable nodeId# 49 Add nodeId# 155 Pick Add nodeId# 156 Delete nodeId# 111 Pick Add nodeId# 157 Add nodeId# 158 Pick Add nodeId# 159 Delete nodeId# 5 Enable nodeId# 137 Disable nodeId# 47 Add nodeId# 160 Pick Add nodeId# 161 Delete nodeId# 31 Delete nodeId# 96 Add nodeId# 162 Disable nodeId# 50 Enable nodeId# 119 Add nodeId# 163 Disable nodeId# 17 Disable nodeId# 160 Delete nodeId# 104 Enable nodeId# 17 Delete nodeId# 4 Pick Enable nodeId# 135 Enable nodeId# 26 Enable nodeId# 100 Pick Pick Disable nodeId# 33 Add nodeId# 164 Add nodeId# 165 Pick Disable nodeId# 128 Pick Add nodeId# 166 Delete nodeId# 100 Pick Disable nodeId# 121 Delete nodeId# 128 Disable nodeId# 28 Enable nodeId# 37 Add nodeId# 167 Delete nodeId# 131 Pick Disable nodeId# 146 Delete nodeId# 101 Pick Enable nodeId# 145 Pick Pick Delete nodeId# 154 Pick Add nodeId# 168 Disable nodeId# 167 Add nodeId# 169 Pick Delete nodeId# 90 Disable nodeId# 34 Enable nodeId# 129 Add nodeId# 170 Enable nodeId# 50 Disable nodeId# 1 Disable nodeId# 152 Delete nodeId# 26 Disable nodeId# 145 Disable nodeId# 17 Add nodeId# 171 Delete nodeId# 121 Add nodeId# 172 Enable nodeId# 33 Add nodeId# 173 Enable nodeId# 1 Add nodeId# 174 Enable nodeId# 34 Pick Add nodeId# 175 Pick Disable nodeId# 141 Disable nodeId# 140 Add nodeId# 176 Add nodeId# 177 Enable nodeId# 17 Enable nodeId# 160 Disable nodeId# 136 Enable nodeId# 28 Delete nodeId# 82 Pick Enable nodeId# 103 Pick Disable nodeId# 132 Pick Enable nodeId# 141 Disable nodeId# 48 Enable nodeId# 167 Pick Add nodeId# 178 Add nodeId# 179 Add nodeId# 180 Pick Delete nodeId# 39 Disable nodeId# 119 Enable nodeId# 145 Pick Disable nodeId# 138 Delete nodeId# 178 Enable nodeId# 119 Delete nodeId# 62 Enable nodeId# 48 Pick Disable nodeId# 1 Enable nodeId# 1 Pick Pick Pick Pick Pick Add nodeId# 181 Add nodeId# 182 Enable nodeId# 16 Enable nodeId# 146 Enable nodeId# 138 Add nodeId# 183 Add nodeId# 184 Pick Enable nodeId# 49 Enable nodeId# 140 Pick Pick Disable nodeId# 151 Disable nodeId# 139 Enable nodeId# 151 Enable nodeId# 139 Enable nodeId# 132 Enable nodeId# 152 Pick Disable nodeId# 22 Enable nodeId# 47 Pick Disable nodeId# 125 Enable nodeId# 136 Delete nodeId# 148 Add nodeId# 185 Delete nodeId# 103 Disable nodeId# 89 Pick Pick Delete nodeId# 89 Delete nodeId# 86 Enable nodeId# 22 Delete nodeId# 48 Pick Delete nodeId# 142 Pick Enable nodeId# 125 Delete nodeId# 164 Add nodeId# 186 Delete nodeId# 151 Delete nodeId# 182 Pick Add nodeId# 187 Add nodeId# 188 Pick Disable nodeId# 71 Pick Pick Pick Add nodeId# 189 Pick Delete nodeId# 63 Pick Disable nodeId# 72 Disable nodeId# 34 Add nodeId# 190 Enable nodeId# 71 Pick Enable nodeId# 34 Enable nodeId# 72 Delete nodeId# 21 Add nodeId# 191 Pick Delete nodeId# 120 Add nodeId# 192 Delete nodeId# 132 Add nodeId# 193 Disable nodeId# 150 Pick Delete nodeId# 133 Delete nodeId# 7 Pick Disable nodeId# 163 Add nodeId# 194 Enable nodeId# 150 Add nodeId# 195 Pick Enable nodeId# 163 Pick Add nodeId# 196 Pick Delete nodeId# 147 Pick Disable nodeId# 169 Disable nodeId# 34 Pick Delete nodeId# 188 Enable nodeId# 169 Enable nodeId# 34 Pick Disable nodeId# 140 Delete nodeId# 191 Disable nodeId# 149 Delete nodeId# 141 Enable nodeId# 149 Pick Enable nodeId# 140 Delete nodeId# 195 Add nodeId# 197 Disable nodeId# 160 Delete nodeId# 68 Enable nodeId# 160 Pick Disable nodeId# 163 Pick Enable nodeId# 163 Add nodeId# 198 Pick Add nodeId# 199 Delete nodeId# 172 Add nodeId# 200 Delete nodeId# 179 Disable nodeId# 158 Enable nodeId# 158 Disable nodeId# 176 Enable nodeId# 176 Add nodeId# 201 Pick Add nodeId# 202 Delete nodeId# 22 Delete nodeId# 28 Add nodeId# 203 Pick Add nodeId# 204 Pick Delete nodeId# 167 Pick Pick Pick Pick Pick Disable nodeId# 49 Enable nodeId# 49 Disable nodeId# 126 Disable nodeId# 114 Pick Pick Delete nodeId# 187 Disable nodeId# 71 Delete nodeId# 161 Add nodeId# 205 Delete nodeId# 171 Delete nodeId# 184 Enable nodeId# 114 Delete nodeId# 198 Add nodeId# 206 Enable nodeId# 126 Pick Delete nodeId# 158 Add nodeId# 207 Delete nodeId# 169 Pick Delete nodeId# 50 Delete nodeId# 149 Pick Add nodeId# 208 Delete nodeId# 49 Disable nodeId# 176 Pick Delete nodeId# 192 Add nodeId# 209 Enable nodeId# 176 Delete nodeId# 33 Pick Add nodeId# 210 Enable nodeId# 71 Delete nodeId# 150 Disable nodeId# 6 Add nodeId# 211 Delete nodeId# 24 Delete nodeId# 155 Pick Disable nodeId# 140 Disable nodeId# 114 Enable nodeId# 114 Enable nodeId# 140 Add nodeId# 212 Enable nodeId# 6 Pick Delete nodeId# 44 Pick Delete nodeId# 17 Disable nodeId# 175 Disable nodeId# 52 Enable nodeId# 52 Add nodeId# 213 Pick Enable nodeId# 175 Disable nodeId# 174 Enable nodeId# 174 Delete nodeId# 204 Delete nodeId# 20 Add nodeId# 214 Delete nodeId# 156 Add nodeId# 215 Disable nodeId# 126 Enable nodeId# 126 Disable nodeId# 180 Delete nodeId# 145 Pick Enable nodeId# 180 Add nodeId# 216 Add nodeId# 217 Add nodeId# 218 Add nodeId# 219 Delete nodeId# 116 Delete nodeId# 16 Pick Disable nodeId# 71 Enable nodeId# 71 Add nodeId# 220 Pick Disable nodeId# 129 Delete nodeId# 162 Add nodeId# 221 Delete nodeId# 201 Add nodeId# 222 Add nodeId# 223 Enable nodeId# 129 Pick Pick Add nodeId# 224 Add nodeId# 225 Delete nodeId# 220 Delete nodeId# 166 Add nodeId# 226 Pick Add nodeId# 227 Disable nodeId# 181 Pick Pick Disable nodeId# 175 Enable nodeId# 175 Pick Delete nodeId# 221 Add nodeId# 228 Delete nodeId# 139 Add nodeId# 229 Delete nodeId# 92 Disable nodeId# 136 Add nodeId# 230 Enable nodeId# 136 Pick Pick Delete nodeId# 223 Disable nodeId# 3 Disable nodeId# 206 Disable nodeId# 137 Enable nodeId# 3 Enable nodeId# 181 Pick Delete nodeId# 173 Delete nodeId# 206 Enable nodeId# 137 Disable nodeId# 217 Pick Enable nodeId# 217 Add nodeId# 231 Add nodeId# 232 Pick Pick Add nodeId# 233 Add nodeId# 234 Disable nodeId# 189 Pick Pick Add nodeId# 235 Disable nodeId# 115 Enable nodeId# 189 Add nodeId# 236 Disable nodeId# 159 Delete nodeId# 225 Disable nodeId# 185 Add nodeId# 237 Disable nodeId# 41 Add nodeId# 238 Pick Add nodeId# 239 Enable nodeId# 41 Disable nodeId# 211 Disable nodeId# 6 Pick Disable nodeId# 165 Pick Pick Pick Enable nodeId# 159 Disable nodeId# 235 Add nodeId# 240 Enable nodeId# 115 Disable nodeId# 105 Disable nodeId# 138 Delete nodeId# 175 Disable nodeId# 137 Disable nodeId# 163 Enable nodeId# 138 Delete nodeId# 137 Enable nodeId# 235 Disable nodeId# 72 Add nodeId# 241 Add nodeId# 242 Enable nodeId# 211 Enable nodeId# 185 Enable nodeId# 6 Delete nodeId# 168 Enable nodeId# 105 Pick Disable nodeId# 183 Add nodeId# 243 Delete nodeId# 136 Disable nodeId# 181 Disable nodeId# 157 Enable nodeId# 165 Disable nodeId# 41 Disable nodeId# 98 Disable nodeId# 52 Delete nodeId# 227 Disable nodeId# 124 Pick Add nodeId# 244 Delete nodeId# 224 Pick Disable nodeId# 230 Add nodeId# 245 Disable nodeId# 129 Disable nodeId# 190 Delete nodeId# 197 Pick Enable nodeId# 41 Delete nodeId# 129 Enable nodeId# 181 Disable nodeId# 214 Pick Delete nodeId# 217 Enable nodeId# 124 Enable nodeId# 72 Pick Add nodeId# 246 Pick Add nodeId# 247 Disable nodeId# 105 Delete nodeId# 212 Add nodeId# 248 Pick Enable nodeId# 230 Delete nodeId# 140 Disable nodeId# 94 Add nodeId# 249 Pick Add nodeId# 250 Pick Pick Enable nodeId# 105 Pick Add nodeId# 251 Add nodeId# 252 Enable nodeId# 190 Enable nodeId# 98 Delete nodeId# 194 Add nodeI ... eId# 20254 Delete nodeId# 20138 Pick Enable nodeId# 20239 Pick Disable nodeId# 20199 Add nodeId# 20271 Enable nodeId# 20224 Enable nodeId# 19996 Pick Pick Enable nodeId# 20254 Enable nodeId# 20199 Disable nodeId# 20163 Enable nodeId# 20163 Add nodeId# 20272 Add nodeId# 20273 Add nodeId# 20274 Disable nodeId# 20266 Pick Pick Pick Add nodeId# 20275 Disable nodeId# 20203 Delete nodeId# 20203 Disable nodeId# 20206 Enable nodeId# 20266 Delete nodeId# 20266 Disable nodeId# 19775 Delete nodeId# 20255 Add nodeId# 20276 Enable nodeId# 20206 Pick Disable nodeId# 20274 Enable nodeId# 19775 Enable nodeId# 20274 Pick Pick Add nodeId# 20277 Disable nodeId# 20260 Add nodeId# 20278 Add nodeId# 20279 Delete nodeId# 20258 Delete nodeId# 20240 Add nodeId# 20280 Disable nodeId# 20279 Enable nodeId# 20279 Disable nodeId# 20271 Disable nodeId# 20095 Disable nodeId# 20242 Disable nodeId# 20206 Disable nodeId# 20263 Enable nodeId# 20263 Delete nodeId# 20060 Delete nodeId# 20137 Enable nodeId# 20242 Add nodeId# 20281 Enable nodeId# 20206 Enable nodeId# 20095 Disable nodeId# 20175 Delete nodeId# 20253 Disable nodeId# 20126 Enable nodeId# 20271 Delete nodeId# 20175 Enable nodeId# 20260 Delete nodeId# 20126 Delete nodeId# 20158 Delete nodeId# 20273 Disable nodeId# 20206 Enable nodeId# 20206 Pick Delete nodeId# 19996 Pick Pick Delete nodeId# 20230 Disable nodeId# 20155 Enable nodeId# 20155 Delete nodeId# 20264 Disable nodeId# 20202 Enable nodeId# 20202 Add nodeId# 20282 Pick Disable nodeId# 20122 Add nodeId# 20283 Pick Delete nodeId# 20140 Enable nodeId# 20122 Disable nodeId# 19852 Delete nodeId# 20191 Pick Enable nodeId# 19852 Disable nodeId# 20272 Enable nodeId# 20272 Add nodeId# 20284 Delete nodeId# 20238 Delete nodeId# 19997 Pick Delete nodeId# 20277 Disable nodeId# 20245 Pick Add nodeId# 20285 Enable nodeId# 20245 Disable nodeId# 20206 Delete nodeId# 20202 Pick Enable nodeId# 20206 Delete nodeId# 20247 Add nodeId# 20286 Pick Disable nodeId# 20265 Add nodeId# 20287 Pick Delete nodeId# 20090 Delete nodeId# 20236 Enable nodeId# 20265 Pick Disable nodeId# 20122 Pick Delete nodeId# 20097 Add nodeId# 20288 Enable nodeId# 20122 Delete nodeId# 20252 Add nodeId# 20289 Pick Pick Delete nodeId# 20085 Pick Disable nodeId# 20283 Enable nodeId# 20283 Pick Delete nodeId# 20254 Delete nodeId# 20244 Disable nodeId# 20239 Pick Enable nodeId# 20239 Add nodeId# 20290 Add nodeId# 20291 Pick Disable nodeId# 20285 Disable nodeId# 20276 Pick Enable nodeId# 20285 Delete nodeId# 20184 Delete nodeId# 20268 Add nodeId# 20292 Pick Pick Enable nodeId# 20276 Disable nodeId# 20278 Disable nodeId# 20289 Pick Disable nodeId# 20095 Add nodeId# 20293 Delete nodeId# 20292 Enable nodeId# 20289 Pick Disable nodeId# 20242 Pick Enable nodeId# 20278 Pick Enable nodeId# 20095 Enable nodeId# 20242 Delete nodeId# 20281 Add nodeId# 20294 Delete nodeId# 20250 Disable nodeId# 20141 Disable nodeId# 20012 Add nodeId# 20295 Delete nodeId# 20222 Pick Pick Pick Pick Add nodeId# 20296 Pick Enable nodeId# 20141 Pick Delete nodeId# 20179 Enable nodeId# 20012 Delete nodeId# 20032 Disable nodeId# 20199 Enable nodeId# 20199 Disable nodeId# 20220 Pick Pick Enable nodeId# 20220 Disable nodeId# 20263 Disable nodeId# 20289 Pick Pick Pick Pick Pick Add nodeId# 20297 Delete nodeId# 20287 Enable nodeId# 20263 Add nodeId# 20298 Pick Add nodeId# 20299 Delete nodeId# 20246 Disable nodeId# 20220 Add nodeId# 20300 Pick Enable nodeId# 20220 Pick Pick Disable nodeId# 20285 Enable nodeId# 20285 Pick Disable nodeId# 20095 Pick Pick Delete nodeId# 20249 Disable nodeId# 20059 Pick Disable nodeId# 20265 Disable nodeId# 20239 Enable nodeId# 20095 Enable nodeId# 20265 Add nodeId# 20301 Delete nodeId# 20094 Add nodeId# 20302 Delete nodeId# 20024 Enable nodeId# 20059 Disable nodeId# 20294 Pick Disable nodeId# 20275 Disable nodeId# 20245 Add nodeId# 20303 Delete nodeId# 20245 Enable nodeId# 20239 Add nodeId# 20304 Add nodeId# 20305 Delete nodeId# 20299 Disable nodeId# 20296 Add nodeId# 20306 Delete nodeId# 20269 Add nodeId# 20307 Delete nodeId# 20303 Disable nodeId# 20291 Disable nodeId# 20284 Add nodeId# 20308 Add nodeId# 20309 Add nodeId# 20310 Add nodeId# 20311 Add nodeId# 20312 Disable nodeId# 20224 Enable nodeId# 20291 Disable nodeId# 20267 Add nodeId# 20313 Disable nodeId# 20288 Add nodeId# 20314 Add nodeId# 20315 Delete nodeId# 19976 Add nodeId# 20316 Disable nodeId# 20243 Delete nodeId# 20265 Pick Add nodeId# 20317 Delete nodeId# 20206 Delete nodeId# 20317 Add nodeId# 20318 Enable nodeId# 20275 Add nodeId# 20319 Delete nodeId# 20144 Add nodeId# 20320 Pick Pick Pick Pick Enable nodeId# 20243 Disable nodeId# 20081 Delete nodeId# 20163 Add nodeId# 20321 Disable nodeId# 20200 Delete nodeId# 20282 Add nodeId# 20322 Enable nodeId# 20200 Add nodeId# 20323 Enable nodeId# 20224 Pick Disable nodeId# 20315 Pick Delete nodeId# 20313 Enable nodeId# 20284 Delete nodeId# 20279 Disable nodeId# 20248 Add nodeId# 20324 Enable nodeId# 20289 Pick Delete nodeId# 20319 Delete nodeId# 20216 Add nodeId# 20325 Enable nodeId# 20315 Add nodeId# 20326 Disable nodeId# 20318 Enable nodeId# 20248 Delete nodeId# 20270 Add nodeId# 20327 Disable nodeId# 20325 Add nodeId# 20328 Pick Delete nodeId# 20275 Add nodeId# 20329 Disable nodeId# 20315 Disable nodeId# 20302 Pick Enable nodeId# 20294 Enable nodeId# 20302 Enable nodeId# 20318 Delete nodeId# 20271 Add nodeId# 20330 Disable nodeId# 20218 Add nodeId# 20331 Pick Enable nodeId# 20296 Delete nodeId# 20171 Add nodeId# 20332 Delete nodeId# 19834 Pick Enable nodeId# 20325 Add nodeId# 20333 Disable nodeId# 20311 Disable nodeId# 20314 Disable nodeId# 20215 Disable nodeId# 20209 Delete nodeId# 20305 Enable nodeId# 20215 Add nodeId# 20334 Add nodeId# 20335 Add nodeId# 20336 Disable nodeId# 20336 Pick Delete nodeId# 20302 Disable nodeId# 20276 Pick Enable nodeId# 20288 Enable nodeId# 20267 Disable nodeId# 20147 Pick Pick Pick Pick Add nodeId# 20337 Delete nodeId# 20200 Delete nodeId# 20237 Pick Pick Add nodeId# 20338 Enable nodeId# 20315 Disable nodeId# 20290 Add nodeId# 20339 Add nodeId# 20340 Enable nodeId# 20276 Pick Add nodeId# 20341 Disable nodeId# 20239 Add nodeId# 20342 Pick Pick Add nodeId# 20343 Add nodeId# 20344 Delete nodeId# 20339 Delete nodeId# 20327 Pick Enable nodeId# 20147 Enable nodeId# 20336 Enable nodeId# 20311 Delete nodeId# 20262 Disable nodeId# 20155 Disable nodeId# 20201 Add nodeId# 20345 Delete nodeId# 20288 Pick Enable nodeId# 20239 Add nodeId# 20346 Disable nodeId# 20328 Add nodeId# 20347 Add nodeId# 20348 Enable nodeId# 20328 Pick Enable nodeId# 20290 Disable nodeId# 20306 Delete nodeId# 20340 Disable nodeId# 20337 Add nodeId# 20349 Enable nodeId# 20209 Disable nodeId# 20012 Disable nodeId# 20343 Add nodeId# 20350 Enable nodeId# 20337 Disable nodeId# 20332 Add nodeId# 20351 Enable nodeId# 20314 Add nodeId# 20352 Delete nodeId# 20242 Disable nodeId# 20300 Disable nodeId# 20186 Disable nodeId# 20311 Enable nodeId# 20343 Disable nodeId# 20345 Add nodeId# 20353 Pick Delete nodeId# 20315 Delete nodeId# 20142 Delete nodeId# 20309 Disable nodeId# 20353 Enable nodeId# 20300 Enable nodeId# 20081 Disable nodeId# 20304 Add nodeId# 20354 Add nodeId# 20355 Pick Delete nodeId# 19775 Enable nodeId# 20306 Pick Disable nodeId# 20348 Add nodeId# 20356 Pick Add nodeId# 20357 Delete nodeId# 20311 Pick Add nodeId# 20358 Pick Enable nodeId# 20332 Pick Disable nodeId# 20285 Disable nodeId# 20147 Pick Enable nodeId# 20345 Pick Disable nodeId# 20328 Delete nodeId# 20285 Enable nodeId# 20348 Enable nodeId# 20147 Enable nodeId# 20353 Enable nodeId# 20218 Disable nodeId# 20348 Disable nodeId# 20084 Delete nodeId# 20344 Enable nodeId# 20348 Enable nodeId# 20186 Disable nodeId# 20233 Enable nodeId# 20201 Enable nodeId# 20012 Pick Disable nodeId# 20322 Enable nodeId# 20304 Enable nodeId# 20322 Enable nodeId# 20233 Enable nodeId# 20328 Disable nodeId# 20296 Enable nodeId# 20084 Disable nodeId# 20316 Delete nodeId# 20296 Pick Disable nodeId# 20308 Enable nodeId# 20316 Delete nodeId# 20346 Pick Add nodeId# 20359 Disable nodeId# 20260 Delete nodeId# 20316 Delete nodeId# 20352 Delete nodeId# 20084 Add nodeId# 20360 Disable nodeId# 20351 Add nodeId# 20361 Disable nodeId# 20297 Disable nodeId# 20274 Disable nodeId# 20312 Pick Enable nodeId# 20312 Enable nodeId# 20155 Disable nodeId# 20293 Pick Disable nodeId# 20357 Disable nodeId# 20223 Disable nodeId# 20301 Pick Enable nodeId# 20301 Disable nodeId# 20306 Enable nodeId# 20308 Delete nodeId# 20335 Disable nodeId# 20300 Delete nodeId# 20012 Pick Delete nodeId# 20333 Enable nodeId# 20357 Disable nodeId# 20289 Disable nodeId# 20239 Pick Pick Add nodeId# 20362 Pick Enable nodeId# 20300 Delete nodeId# 20356 Enable nodeId# 20306 Enable nodeId# 20297 Pick Delete nodeId# 20283 Pick Disable nodeId# 20332 Add nodeId# 20363 Disable nodeId# 20343 Disable nodeId# 20199 Pick Add nodeId# 20364 Enable nodeId# 20293 Pick Delete nodeId# 20312 Delete nodeId# 20290 Disable nodeId# 20324 Add nodeId# 20365 Pick Delete nodeId# 20221 Pick Delete nodeId# 20359 Enable nodeId# 20343 Disable nodeId# 20350 Delete nodeId# 20324 Pick Disable nodeId# 20320 Add nodeId# 20366 Enable nodeId# 20350 Delete nodeId# 20337 Disable nodeId# 20364 Add nodeId# 20367 Enable nodeId# 20223 Pick Delete nodeId# 20307 Delete nodeId# 20284 Delete nodeId# 20355 Disable nodeId# 20272 Pick Pick Pick Enable nodeId# 20364 Enable nodeId# 20239 Enable nodeId# 20289 Disable nodeId# 20362 Disable nodeId# 20361 Add nodeId# 20368 Pick Enable nodeId# 20272 Enable nodeId# 20260 Pick Pick Delete nodeId# 20295 Enable nodeId# 20274 Enable nodeId# 20320 Disable nodeId# 20095 Delete nodeId# 19852 Enable nodeId# 20362 Enable nodeId# 20095 Add nodeId# 20369 Add nodeId# 20370 Delete nodeId# 20366 Delete nodeId# 20330 Add nodeId# 20371 Pick Pick Disable nodeId# 20334 Delete nodeId# 20358 Delete nodeId# 20095 Add nodeId# 20372 Enable nodeId# 20332 Add nodeId# 20373 Disable nodeId# 20321 Pick Enable nodeId# 20361 Enable nodeId# 20321 Add nodeId# 20374 Disable nodeId# 20370 Disable nodeId# 20241 Disable nodeId# 20233 Add nodeId# 20375 Add nodeId# 20376 Pick Enable nodeId# 20241 Delete nodeId# 20209 Add nodeId# 20377 Add nodeId# 20378 Delete nodeId# 20345 Pick Add nodeId# 20379 Delete nodeId# 20298 Enable nodeId# 20370 Disable nodeId# 20326 Add nodeId# 20380 Add nodeId# 20381 Enable nodeId# 20334 Disable nodeId# 20297 Pick Pick Enable nodeId# 20233 Delete nodeId# 20369 Enable nodeId# 20326 Pick Disable nodeId# 20373 Pick Disable nodeId# 20326 Enable nodeId# 20199 Add nodeId# 20382 Delete nodeId# 20294 Disable nodeId# 20232 Pick Delete nodeId# 20301 Enable nodeId# 20373 Disable nodeId# 20378 Add nodeId# 20383 Add nodeId# 20384 Disable nodeId# 20261 Disable nodeId# 20199 Enable nodeId# 20261 Delete nodeId# 20201 Disable nodeId# 20169 Add nodeId# 20385 Delete nodeId# 20329 Delete nodeId# 20342 Delete nodeId# 20141 Delete nodeId# 20365 Delete nodeId# 20338 Delete nodeId# 20381 Add nodeId# 20386 Enable nodeId# 20351 Pick Enable nodeId# 20199 Add nodeId# 20387 Add nodeId# 20388 Disable nodeId# 20218 Disable nodeId# 20272 Enable nodeId# 20218 Delete nodeId# 20373 Enable nodeId# 20232 Add nodeId# 20389 Enable nodeId# 20326 Add nodeId# 20390 Disable nodeId# 20289 Disable nodeId# 20081 Enable nodeId# 20297 Delete nodeId# 20382 Pick Disable nodeId# 20351 Add nodeId# 20391 Delete nodeId# 20375 Enable nodeId# 20351 Delete nodeId# 20155 Pick Add nodeId# 20392 Enable nodeId# 20272 Disable nodeId# 20314 Delete nodeId# 20380 Delete nodeId# 20291 Delete nodeId# 20320 Delete nodeId# 20280 Delete nodeId# 20278 Disable nodeId# 20363 Disable nodeId# 20224 Enable nodeId# 20224 Disable nodeId# 20059 Disable nodeId# 20362 Pick Delete nodeId# 20392 Disable nodeId# 20354 Enable nodeId# 20354 Disable nodeId# 20377 Add nodeId# 20393 Enable nodeId# 20169 Add nodeId# 20394 Enable nodeId# 20363 Delete nodeId# 20341 Disable nodeId# 20257 Delete nodeId# 20297 Enable nodeId# 20081 Enable nodeId# 20362 Enable nodeId# 20377 Disable nodeId# 20276 Enable nodeId# 20314 Pick Delete nodeId# 20387 Enable nodeId# 20059 Enable nodeId# 20378 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpOverload::OltpOverloaded+Distributed [GOOD] >> KqpImmediateEffects::UpsertAfterInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2026-01-07T17:31:09.651660Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673551153067755:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:09.660090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0107 17:31:10.160477302 65739 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:10.160727881 65739 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:10.259756Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.259893Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.259964Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.281269Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.355456Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.355604Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.355747Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.355809Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.355840Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.355865Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.355925Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.359459Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.363414Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.379874Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.421479Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:15072 2026-01-07T17:31:10.426266Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.457062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:10.475405Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.483737Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.500255Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.510753Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.538349Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.539895Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.541143Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.597960Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.598037Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.598088Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.601847Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.718666Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:10.785187Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:15072: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:15072 } ] 2026-01-07T17:31:10.900187Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; ... etDistributedCommit; txId=281474976715775 2026-01-07T17:32:17.283525Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3719: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 3 2026-01-07T17:32:17.283677Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7592673839644829074:2527] TxId: 281474976715775. Ctx: { TraceId: 01kecr8dfge20g6hv43dskfgz4, Database: , SessionId: ydb://session/3?node_id=4&id=NmZlMjliYjctZjg3NjEyNzgtZjk5ZGZjMzgtMzhkMWM4MTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-07T17:32:17.283785Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3796: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Got transaction status, status: 16 2026-01-07T17:32:17.283916Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=NmZlMjliYjctZjg3NjEyNzgtZjk5ZGZjMzgtMzhkMWM4MTc=, ActorId: [4:7592673749450512577:2527], ActorState: ExecuteState, LegacyTraceId: 01kecr8dfge20g6hv43dskfgz4, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# 2026-01-07T17:32:17.284369Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3796: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Got transaction status, status: 17 2026-01-07T17:32:17.284416Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3550: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Start rollback 2026-01-07T17:32:17.284514Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Send EvWrite (external) to ShardID=72075186224037892, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715772 DataShard: 72075186224037892 Generation: 1 Counter: 13 SchemeShard: 72057594046644480 PathId: 51, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-07T17:32:17.284560Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Send EvWrite (external) to ShardID=72075186224037896, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715772 DataShard: 72075186224037896 Generation: 1 Counter: 11 SchemeShard: 72057594046644480 PathId: 58, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-07T17:32:17.284590Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Send EvWrite (external) to ShardID=72075186224037890, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715772 DataShard: 72075186224037890 Generation: 1 Counter: 13 SchemeShard: 72057594046644480 PathId: 54, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-07T17:32:17.285284Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:601: DB Error, Status: CLIENT_CANCELLED, Issues: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:20898 } ], Query: --!syntax_v1 -- Query name: HardPingTask(write) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $job_id as String; DECLARE $job as String; DECLARE $query as String; DECLARE $status as Int64; DECLARE $internal as String; DECLARE $result_id as String; DECLARE $query_id as String; DECLARE $now as Timestamp; DECLARE $ttl as Timestamp; DECLARE $retry_counter_update_time as Timestamp; DECLARE $retry_rate as Double; DECLARE $retry_counter as Uint64; DECLARE $owner as String; UPDATE `pending_small` SET `last_seen_at` = $now, `assigned_until` = $ttl, `retry_counter` = $retry_counter, `retry_counter_updated_at` = $retry_counter_update_time, `retry_rate` = $retry_rate, `owner` = $owner WHERE `tenant` = $tenant AND `scope` = $scope AND `query_id` = $query_id; UPSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`) VALUES($scope, $query_id, $job_id, $job); UPDATE `queries` SET `query` = $query, `status` = $status, `internal` = $internal, `result_sets_expire_at` = NULL, `expire_at` = NULL, `meta_revision` = `meta_revision` + 1 WHERE `scope` = $scope AND `query_id` = $query_id; 2026-01-07T17:32:17.287682Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4207: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Recv EvWriteResult (external) from ShardID=72075186224037890, Status=STATUS_COMPLETED, TxId=61, Locks= , Cookie=1 2026-01-07T17:32:17.288015Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4207: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Recv EvWriteResult (external) from ShardID=72075186224037892, Status=STATUS_COMPLETED, TxId=54, Locks= , Cookie=1 2026-01-07T17:32:17.289054Z node 4 :FQ_PINGER WARN: pinger.cpp:358: QueryId: utqudo61a7crgjdjpbu3, Owner: 1ab9e862-6dbf9f30-b706bc3a-81c598db18 Ping response error: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint [::]:20898 } ]. Retry after: 0.000000s 2026-01-07T17:32:17.290166Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: PingTaskRequest - PingTaskResult: {owner_id: "1ab9e862-6dbf9f30-b706bc3a-81c598db18" query_id { value: "utqudo61a7crgjdjpbu3" } ast: "(\n(let $1 (Configure! world (DataSource \'\"config\") \'\"DqEngine\" \'\"force\"))\n(let $2 (DataSink \'result))\n(let $3 \'(\'(\'type) \'(\'autoref) \'(\'columns \'(\'\"column0\"))))\n(let $4 (ResFill! $1 $2 (Key) (AsList (AsStruct \'(\'\"column0\" (Int32 \'\"1\")))) $3 \'\"dq\"))\n(return (Commit! $4 $2))\n)\n" plan: "{\"Detailed\":{\"Operations\":[{\"Id\":2,\"Name\":\"ResFill!\"}],\"OperationRoot\":2,\"Providers\":[],\"OperationStats\":{\"ResFill!\":1}},\"Basic\":{\"nodes\":[{\"id\":2,\"level\":1,\"name\":\"ResFill!\",\"type\":\"op\"}],\"links\":[]}}" scope: "yandexcloud://Execute_folder_id" deadline { seconds: 1767893536 nanos: 921442000 } tenant: "TestTenant" } ERROR: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:20898 } ] 2026-01-07T17:32:17.291991Z node 4 :YQL_PRIVATE_PROXY ERROR: task_ping.cpp:69: PrivatePingTask - QueryId: utqudo61a7crgjdjpbu3, Owner: 1ab9e862-6dbf9f30-b706bc3a-81c598db18, Failed with code: GENERIC_ERROR Details:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:20898
: Error: ControlPlane PingTaskError 2026-01-07T17:32:17.296027Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7592673839644829090:2527], Table: `Root/yq/jobs` ([72057594046644480:58:1]), SessionActorId: [4:7592673749450512577:2527]Recv EvWriteResult from ShardID=72075186224037896, Status=STATUS_COMPLETED, TxId=281474976715775, Locks= , Cookie=0 2026-01-07T17:32:17.296065Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1023: SelfId: [4:7592673839644829090:2527], Table: `Root/yq/jobs` ([72057594046644480:58:1]), SessionActorId: [4:7592673749450512577:2527]Got completed result TxId=281474976715775, TabletId=72075186224037896, Cookie=0, Mode=2, Locks= 2026-01-07T17:32:17.296175Z node 4 :GLOBAL ERROR: log.cpp:875: verification=State == ETransactionState::EXECUTING;fline=kqp_tx_manager.cpp:474; 2026-01-07T17:32:17.296706Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], statusCode=INTERNAL_ERROR. Issue=
: Error: (yexception) ydb/library/actors/core/log.cpp:881: verification=State == ETransactionState::EXECUTING;fline=kqp_tx_manager.cpp:474;, code: 2035 . sessionActorId=[4:7592673749450512577:2527]. 2026-01-07T17:32:17.297079Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], StateRollback: unknown message 278003713 2026-01-07T17:32:17.297206Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2359} SessionId: ydb://session/3?node_id=4&id=NmZlMjliYjctZjg3NjEyNzgtZjk5ZGZjMzgtMzhkMWM4MTc=, ActorId: [4:7592673749450512577:2527], ActorState: CleanupState, LegacyTraceId: 01kecr8dfge20g6hv43dskfgz4, got TEvKqpBuffer::TEvError in CleanupState, status: INTERNAL_ERROR send to: [4:7592673839644829094:2527] from: [4:7592673839644829073:2527] trace_id# 2026-01-07T17:32:17.297327Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1245} ActorId: [4:7592673839644829094:2527] TxId: 281474976715776. Ctx: { TraceId: 01kecr8dfge20g6hv43dskfgz4, Database: , SessionId: ydb://session/3?node_id=4&id=NmZlMjliYjctZjg3NjEyNzgtZjk5ZGZjMzgtMzhkMWM4MTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. {
: Error: (yexception) ydb/library/actors/core/log.cpp:881: verification=State == ETransactionState::EXECUTING;fline=kqp_tx_manager.cpp:474;, code: 2035 } trace_id# 2026-01-07T17:32:17.297413Z node 4 :FQ_PINGER WARN: pinger.cpp:358: QueryId: utqudo61a7crgjdjpbu3, Owner: 1ab9e862-6dbf9f30-b706bc3a-81c598db18 Ping response error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:20898: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:20898 } ]. Retry after: 0.090059s 2026-01-07T17:32:17.297615Z node 4 :KQP_SESSION ERROR: {KQPSA@kqp_session_actor.cpp:3081} SessionId: ydb://session/3?node_id=4&id=NmZlMjliYjctZjg3NjEyNzgtZjk5ZGZjMzgtMzhkMWM4MTc=, ActorId: [4:7592673749450512577:2527], ActorState: CleanupState, LegacyTraceId: 01kecr8dfge20g6hv43dskfgz4, Failed to cleanup issues#
: Fatal: Internal error while executing transaction., code: 1
: Error: (yexception) ydb/library/actors/core/log.cpp:881: verification=State == ETransactionState::EXECUTING;fline=kqp_tx_manager.cpp:474;, code: 2035 trace_id# 2026-01-07T17:32:17.298898Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3550: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Start rollback 2026-01-07T17:32:17.299084Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Send EvWrite (external) to ShardID=72075186224037892, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715772 DataShard: 72075186224037892 Generation: 1 Counter: 13 SchemeShard: 72057594046644480 PathId: 51, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-07T17:32:17.299181Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Send EvWrite (external) to ShardID=72075186224037896, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715772 DataShard: 72075186224037896 Generation: 1 Counter: 11 SchemeShard: 72057594046644480 PathId: 58, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-07T17:32:17.299262Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7592673839644829073:2527], SessionActorId: [4:7592673749450512577:2527], Send EvWrite (external) to ShardID=72075186224037890, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715772 DataShard: 72075186224037890 Generation: 1 Counter: 13 SchemeShard: 72057594046644480 PathId: 54, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-07T17:32:17.388322Z node 4 :FQ_PINGER WARN: pinger.cpp:358: QueryId: utqudo61a7crgjdjpbu3, Owner: 1ab9e862-6dbf9f30-b706bc3a-81c598db18 Ping response error: {
: Error: Client is stopped }. Retry after: 0.134676s |83.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> ColumnBuildTest::Initiating_Failed [GOOD] >> ColumnBuildTest::Initiating_DisableFlag >> KqpEffects::DeleteWithIndex+UseSecondaryIndex-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] Test command err: 2026-01-07T17:32:13.026649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673824455718968:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:13.026687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:13.375073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:13.383247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:13.383444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:13.428667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:13.547553Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:13.560960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:13.560992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:13.560998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:13.561074Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:13.566977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:13.843204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:13.851654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:14.057217Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:14.111684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:14.122415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:32:14.125708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:32:14.249549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.406966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.442773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.485133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.517947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.558670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.599138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.629572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.661492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.692012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.302941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673837340622469:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.303107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.303636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673837340622481:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.303672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673837340622482:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.303773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.307926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:16.325089Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673837340622485:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-07T17:32:16.388821Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673837340622536:3102] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:16.781645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.829346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.882589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.940170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.995356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-0 ... CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.315131Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 64ms 2026-01-07T17:32:18.315398Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.315445Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-07T17:32:18.315546Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 64ms 2026-01-07T17:32:18.315566Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.315592Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:32:18.315713Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 70ms 2026-01-07T17:32:18.316087Z node 1 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.316283Z node 1 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.506120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7592673845930557821:2440]: Pool not found 2026-01-07T17:32:18.506730Z node 1 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:32:18.688828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7592673845930557816:2437]: Pool not found 2026-01-07T17:32:18.690161Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:32:18.692998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673845930557926:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:18.693016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7592673845930557927:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:32:18.693091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:18.695216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673845930557930:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:18.695283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:18.937505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7592673845930557924:2455]: Pool not found 2026-01-07T17:32:18.937834Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:32:19.230692Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:43040) incoming connection opened 2026-01-07T17:32:19.230891Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:43040) -> (POST /Root, 24 bytes) 2026-01-07T17:32:19.231221Z node 1 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d8a5:4e25:bc7b:0:c0a5:4e25:bc7b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 68831dcd-333e9f40-7dbd8c42-f6dba9f6 2026-01-07T17:32:19.232325Z node 1 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [68831dcd-333e9f40-7dbd8c42-f6dba9f6] reply with status: BAD_REQUEST message: Failed to decode POST body 2026-01-07T17:32:19.232564Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:43040) <- (400 AccessDeniedException, 73 bytes) 2026-01-07T17:32:19.232663Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:43040) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: deflate Content-Length: 32 {"QueueName": "Example"} 2026-01-07T17:32:19.232701Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:43040) Response: HTTP/1.1 400 AccessDeniedException Connection: keep-alive x-amzn-requestid: 68831dcd-333e9f40-7dbd8c42-f6dba9f6 Content-Type: application/x-amz-json-1.1 Content-Length: 73 Http output full {"__type":"AccessDeniedException","message":"Failed to decode POST body"} 2026-01-07T17:32:19.234124Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:43040) connection closed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthNone [GOOD] Test command err: 2026-01-07T17:32:13.108255Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673822754974684:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:13.108579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:13.452856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:13.453078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:13.453384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:13.511784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:13.594731Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:13.598172Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673822754974572:2081] 1767807133086533 != 1767807133086536 2026-01-07T17:32:13.693104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:13.693133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:13.693146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:13.693259Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:13.741024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:13.893970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:13.901772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:14.117296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:14.124845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:32:14.126830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:32:14.139529Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:14.144778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T17:32:14.217776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.354556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.404814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.465349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-07T17:32:14.477047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.516335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.547379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.577241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.607079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.637513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.678527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.354742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673835639878118:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.354742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673835639878130:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.354812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.355060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673835639878133:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.355096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.358281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:16.374829Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673835639878132:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-07T17:32:16.469577Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673835639878185:3103] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:16.899825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.936603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.969404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.001310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ES ... Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.400541Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-07T17:32:18.400626Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 45ms 2026-01-07T17:32:18.401233Z node 1 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.402523Z node 1 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.402551Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 46ms 2026-01-07T17:32:18.402934Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.402967Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:32:18.403075Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 46ms 2026-01-07T17:32:18.403623Z node 1 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:18.532892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7592673844229813448:2431]: Pool not found 2026-01-07T17:32:18.533261Z node 1 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:32:18.749935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7592673844229813470:2438]: Pool not found 2026-01-07T17:32:18.750209Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:32:18.752687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673844229813576:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:18.752697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7592673844229813577:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:32:18.752747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:18.752972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673844229813580:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:18.753028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.005467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7592673844229813574:2455]: Pool not found 2026-01-07T17:32:19.005762Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:32:19.280763Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:57616) incoming connection opened 2026-01-07T17:32:19.280863Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:57616) -> (POST /Root) 2026-01-07T17:32:19.281255Z node 1 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [7836:47fa:a07b:0:6036:47fa:a07b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 4960bf6d-8d8b7b45-c21afe24-ebef0c68 2026-01-07T17:32:19.282391Z node 1 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [4960bf6d-8d8b7b45-c21afe24-ebef0c68] reply with status: BAD_REQUEST message: Empty body 2026-01-07T17:32:19.282638Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:57616) <- (400 InvalidArgumentException, 60 bytes) 2026-01-07T17:32:19.282697Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:57616) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json 2026-01-07T17:32:19.282729Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:57616) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 4960bf6d-8d8b7b45-c21afe24-ebef0c68 Content-Type: application/x-amz-json-1.1 Content-Length: 60 Http output full {"__type":"InvalidArgumentException","message":"Empty body"} 2026-01-07T17:32:19.285006Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:57616) connection closed |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink >> KqpWrite::ProjectReplace+UseSink >> ColumnBuildTest::Filling_Failed [GOOD] >> ColumnBuildTest::Filling_DisableFlag >> KqpImmediateEffects::ReplaceExistingKey >> KqpImmediateEffects::DeleteAfterUpsert >> ColumnBuildTest::Applying_Failed [GOOD] >> ColumnBuildTest::Applying_DisableFlag >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink >> KqpFail::OnPrepare |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |83.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpOverload::OltpOverloaded+Distributed [GOOD] Test command err: Trying to start YDB, gRPC: 18092, MsgBus: 61330 2026-01-07T17:32:01.419937Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673773935701332:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:01.420064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:01.456636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:01.912123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:01.916277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:01.916372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:01.953455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:02.077749Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673773935701217:2081] 1767807121356510 != 1767807121356513 2026-01-07T17:32:02.078441Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:02.203498Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:02.289358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:02.289386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:02.289392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:02.289486Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:02.453085Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:02.942816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:02.951943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:03.008072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.240848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.586579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.685751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.538632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673791115572287:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.538768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.539975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673791115572296:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.540042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.905076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.948954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.015956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.057216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.096330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.146182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.185636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.255687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.355287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673795410540468:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.355388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.355644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673795410540474:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.355692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673795410540473:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.355705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.360177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:06.371369Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673795410540477:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:06.421026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673773935701332:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:06.421101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:06.447969Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673795410540528:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... DB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 5968, MsgBus: 15327 ... waiting for SysViewsRoster update finished 2026-01-07T17:32:12.850161Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:12.854793Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:32:12.858269Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:32:12.858526Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:32:12.858639Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:32:13.099681Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:13.099842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:13.128948Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807129843352 != 1767807129843356 2026-01-07T17:32:13.132822Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:13.185906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:13.356260Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:32:13.714148Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:13.714214Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:13.714255Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:13.714516Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:13.727448Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:14.136840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:14.174777Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.410947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.817430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.118067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.641587Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1902:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.641871Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.642633Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1976:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.642706Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.672811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.920675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.186430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.485935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.758998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.044194Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.333471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.625791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.985193Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2788:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:17.985331Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:17.985694Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2792:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:17.985751Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:17.985807Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2795:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:17.991212Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:18.164020Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2797:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:18.209858Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=3; FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=281474976715675; |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValues >> ColumnBuildTest::Locking_DisableFlag [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |83.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WR2 >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Locking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:17.244160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:17.244254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:17.244307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:17.244363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:17.244395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:17.244422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:17.244477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:17.244532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:17.245354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:17.245660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:17.337711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:17.337771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:17.356573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:17.356959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:17.357145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:17.368309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:17.368714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:17.369526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:17.369802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:17.375645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:17.375887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:17.377224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:17.377306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:17.377461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:17.377521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:17.377570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:17.377782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:17.527528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.528419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.528514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.528581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.528642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.528682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.528727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.528791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.528915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.529029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.529097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.529163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.529240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.529349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.529449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-07T17:32:22.456921Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-07T17:32:22.456971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-07T17:32:22.457006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-07T17:32:22.457059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1562:3429] message: TxId: 281474976730758 2026-01-07T17:32:22.457101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-07T17:32:22.457134Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-07T17:32:22.457161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-07T17:32:22.457217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-07T17:32:22.461925Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-07T17:32:22.462004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-07T17:32:22.462077Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2026-01-07T17:32:22.462209Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-07T17:32:22.464195Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-07T17:32:22.464350Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:22.464407Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-07T17:32:22.466050Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-07T17:32:22.466186Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:22.466242Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-07T17:32:22.466414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:32:22.466458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1753:3607] TestWaitNotification: OK eventTxId 105 2026-01-07T17:32:22.467121Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-07T17:32:22.467438Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-07T17:32:22.468385Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:22.468594Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 237us result status StatusSuccess 2026-01-07T17:32:22.469144Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> ColumnBuildTest::Rejecting [GOOD] >> ColumnBuildTest::Unlocking_DisableFlag >> ColumnBuildTest::Initiating_DisableFlag [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery >> KqpWrite::Insert >> ColumnBuildTest::Cancelling [GOOD] >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag |83.9%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 8597, MsgBus: 64128 2026-01-07T17:31:55.089875Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673746545054714:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:55.090549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:55.655351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:55.655434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:55.757152Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:55.781603Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:55.794169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:55.987255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:56.036307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:56.036328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:56.036342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:56.036457Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:56.150235Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:56.715657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:56.815830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.083258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.247791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:57.341151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.334715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673763724925733:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.334821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.341269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673763724925743:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.341339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:59.652515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.692579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.740802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.809881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.848266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.891855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.926736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:59.975891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:00.066717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673768019893914:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.066780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.066854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673768019893920:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.067000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673768019893922:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.067057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:00.071979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:00.086583Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673768019893923:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:00.094736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673746545054714:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:00.094826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:00.146747Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673768019893975:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:01.863670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 61741, MsgBus: 18387 2026-01-07T17:32:03.983186Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeliver ... 2026-01-07T17:32:11.246041Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:11.254756Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:11.254859Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:11.281868Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:11.333883Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:11.333919Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:11.333930Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:11.334019Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:11.406824Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:11.762869Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:11.770556Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:11.774927Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.845829Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.016588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.105917Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.120035Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:14.884946Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673828838797247:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.885060Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.885512Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673828838797257:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.885560Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.962807Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.997293Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.035416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.074115Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.148785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.185304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.220867Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.294950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.363711Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673833133765431:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.363825Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.364088Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673833133765437:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.364137Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673833133765436:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.364143Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.367606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:15.378484Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673833133765440:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:15.433936Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673833133765491:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:16.113880Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673815953893528:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:16.113951Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:17.284199Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.326406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.370920Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> ColumnBuildTest::Filling_DisableFlag [GOOD] |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KeyValueGRPCService::SimpleWriteReadOverrunV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadRangeV1 >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 20202, MsgBus: 1468 2026-01-07T17:32:01.117012Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673773621667772:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:01.117062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:01.597125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:01.648649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:01.648727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:01.792326Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:01.803386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:01.882651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:01.960779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:01.960800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:01.960805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:01.960870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:02.140659Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:02.548249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:02.555246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:02.619711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:02.789663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:02.959377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.040553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.116914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673790801538798:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.117050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.117453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673790801538808:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.117506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.446679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.490342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.528048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.575486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.641909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.689924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.734803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.788649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.868167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673790801539675:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.868266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.868686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673790801539681:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.868725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.868747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673790801539680:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.873285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:05.885730Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673790801539684:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:05.974615Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673790801539735:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:06.121663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673773621667772:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:06.121751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:07.580806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... configuration 2026-01-07T17:32:16.423068Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:16.840495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:16.858142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:16.876849Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.950805Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.143730Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:17.193567Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:17.214863Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.620305Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673851318443331:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.620376Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.620750Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673851318443340:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.620800Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.685736Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.720811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.758867Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.795101Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.834659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.864344Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.950979Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.011973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.096672Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673855613411504:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:20.096766Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:20.096899Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673855613411509:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:20.097140Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673855613411511:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:20.097177Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:20.101108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:20.112282Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673855613411512:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:20.165982Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673855613411564:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:21.188315Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673838433539693:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:21.188411Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:21.755977Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.490616Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7592673864203346745:2533], TxId: 281474976710679, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr8jfb8wze27f653v7xrea. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=OWNlZjVjYWEtOTAxYmRhMGEtODBjN2RmZGEtMzA2OGViMGE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } } 2026-01-07T17:32:22.490701Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7592673864203346745:2533], TxId: 281474976710679, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr8jfb8wze27f653v7xrea. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=OWNlZjVjYWEtOTAxYmRhMGEtODBjN2RmZGEtMzA2OGViMGE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } }. 2026-01-07T17:32:22.491262Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=OWNlZjVjYWEtOTAxYmRhMGEtODBjN2RmZGEtMzA2OGViMGE=, ActorId: [3:7592673859908379157:2533], ActorState: ExecuteState, LegacyTraceId: 01kecr8jfb8wze27f653v7xrea, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready)" severity: 1 } } trace_id# |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> ColumnBuildTest::BaseCase [GOOD] >> ColumnBuildTest::Applying_DisableFlag [GOOD] |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> ColumnBuildTest::Cancellation_Applying_DisableFlag >> KqpPg::Returning+useSink [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> KqpWrite::UpsertNullKey >> ColumnBuildTest::DoNotRestoreDeletedRows [GOOD] >> KqpPg::Returning-useSink >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated >> ColumnBuildTest::DisabledAndEnabledFlag |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |83.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |83.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] Test command err: 2026-01-07T17:32:10.748211Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673811685484917:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:10.748512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:10.984568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:10.988770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:10.988862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:11.017231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:11.158297Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673811685484809:2081] 1767807130690500 != 1767807130690503 2026-01-07T17:32:11.159916Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:11.210713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:11.210732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:11.210736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:11.210807Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:11.234531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:11.416893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:11.424463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:11.640371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:11.650514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:32:11.652683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:32:11.677136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:32:11.741159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.748143Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:11.912895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.977749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.031303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-07T17:32:12.036379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.082369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.125280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.177109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.215008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.253036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.291991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.060419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673828865355645:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.060552Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.061095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673828865355657:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.061147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673828865355658:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.061275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.066203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:14.080457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673828865355661:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:32:14.158918Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673828865355712:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:14.543853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.583741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.616267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.652847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ES ... : executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-07T17:32:22.488337Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 3ms 2026-01-07T17:32:22.488557Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:22.489503Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:22.489526Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 7ms 2026-01-07T17:32:22.489900Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:22.489931Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:32:22.490021Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 8ms 2026-01-07T17:32:22.490554Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:22.651872Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592673861802064191:2441]: Pool not found 2026-01-07T17:32:22.652033Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:32:22.711258Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673840327225367:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:22.711336Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:22.862729Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592673861802064150:2436]: Pool not found 2026-01-07T17:32:22.863471Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:32:22.873405Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673861802064258:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.873495Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592673861802064259:2456], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:32:22.873552Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.890804Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673861802064262:2457], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.890967Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.166205Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592673861802064256:2454]: Pool not found 2026-01-07T17:32:23.166935Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:32:23.481328Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:57988) incoming connection opened Http output full {"__type":"InvalidArgumentException","message":"Can not parse request body from JSON"} 2026-01-07T17:32:23.481424Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:57988) -> (POST /Root, 44 bytes) 2026-01-07T17:32:23.481570Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b88e:59f:db7b:0:a08e:59f:db7b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 86f174cb-c86722ec-61eac57a-6a4e41cd 2026-01-07T17:32:23.482212Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [86f174cb-c86722ec-61eac57a-6a4e41cd] reply with status: BAD_REQUEST message: Can not parse request body from JSON 2026-01-07T17:32:23.482372Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:57988) <- (400 InvalidArgumentException, 86 bytes) 2026-01-07T17:32:23.482415Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:57988) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip Content-Length: 44 nhV ,M-MKMURPrH-IU2j 2026-01-07T17:32:23.482441Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:57988) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 86f174cb-c86722ec-61eac57a-6a4e41cd Content-Type: application/x-amz-json-1.1 Content-Length: 86 2026-01-07T17:32:23.485036Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:57988) connection closed >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> KqpStats::SysViewClientLost [GOOD] >> KqpStats::SysViewCancelled |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:18.718212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:18.718309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:18.718427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:18.718473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:18.718516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:18.718548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:18.718614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:18.718682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:18.719292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:18.719523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:18.796329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:18.796376Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:18.809204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:18.809468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:18.809646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:18.814920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:18.815218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:18.815937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:18.816155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:18.830435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:18.830644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:18.831757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:18.831805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:18.831916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:18.831963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:18.832000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:18.832145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:18.966594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.967897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.968895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-07T17:32:24.996773Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-07T17:32:24.996835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-07T17:32:24.996879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-07T17:32:24.996976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1825:3689] message: TxId: 281474976730757 2026-01-07T17:32:24.997040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-07T17:32:24.997078Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-07T17:32:24.997107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-07T17:32:24.997170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-07T17:32:25.003420Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-07T17:32:25.003502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-07T17:32:25.003569Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2026-01-07T17:32:25.003700Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-07T17:32:25.007604Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-07T17:32:25.007773Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:25.007824Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:32:25.009590Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-07T17:32:25.009711Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:25.009751Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-07T17:32:25.009879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:32:25.009910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1883:3736] TestWaitNotification: OK eventTxId 105 2026-01-07T17:32:25.010637Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-07T17:32:25.010975Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2026-01-07T17:32:25.011733Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:25.011936Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 234us result status StatusSuccess 2026-01-07T17:32:25.012428Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Filling_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:18.358257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:18.358372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:18.358439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:18.358484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:18.358524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:18.358556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:18.358619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:18.358710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:18.359815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:18.360191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:18.454137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:18.454195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:18.472497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:18.472807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:18.473045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:18.480951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:18.481282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:18.482045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:18.482272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:18.485230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:18.485408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:18.486579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:18.486638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:18.486759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:18.486825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:18.486870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:18.487033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:18.639299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.640372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.640502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.640587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.640666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.640731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.640803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.640917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.641049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.641125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.641184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.641258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.641326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.641403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.641511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 24.782937Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730759:0 ProgressState 2026-01-07T17:32:24.783018Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2026-01-07T17:32:24.783048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-07T17:32:24.783081Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2026-01-07T17:32:24.783110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-07T17:32:24.783163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730759, ready parts: 1/1, is published: true 2026-01-07T17:32:24.783225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1821:3686] message: TxId: 281474976730759 2026-01-07T17:32:24.783301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-07T17:32:24.783341Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730759:0 2026-01-07T17:32:24.783367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730759:0 2026-01-07T17:32:24.783425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-07T17:32:24.786069Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730759 2026-01-07T17:32:24.786131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730759 2026-01-07T17:32:24.786197Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730759 2026-01-07T17:32:24.786322Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730759 2026-01-07T17:32:24.787921Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-07T17:32:24.788063Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:24.788119Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-07T17:32:24.789656Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-07T17:32:24.789775Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:24.789819Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-07T17:32:24.789942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:32:24.789998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1966:3819] TestWaitNotification: OK eventTxId 105 2026-01-07T17:32:24.790711Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-07T17:32:24.791020Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-07T17:32:24.791940Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:24.792173Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 266us result status StatusSuccess 2026-01-07T17:32:24.792639Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> KqpImmediateEffects::ImmediateUpdate >> TControlPlaneProxyTest::ShouldSendListQueries >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |83.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Initiating_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:18.277584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:18.277703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:18.277743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:18.277784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:18.277819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:18.277846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:18.277914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:18.277986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:18.278795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:18.279069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:18.366289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:18.366345Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:18.380473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:18.380800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:18.381008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:18.387179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:18.387489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:18.388251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:18.388484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:18.390988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:18.391223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:18.392331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:18.392385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:18.392501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:18.392548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:18.392585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:18.392748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:18.537973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.538905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.539969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1474976730758 ready parts: 1/1 2026-01-07T17:32:24.016959Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-07T17:32:24.017008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-07T17:32:24.017043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-07T17:32:24.017104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1761:3627] message: TxId: 281474976730758 2026-01-07T17:32:24.017149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-07T17:32:24.017182Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-07T17:32:24.017214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-07T17:32:24.017277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-07T17:32:24.019612Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-07T17:32:24.019718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-07T17:32:24.019793Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2026-01-07T17:32:24.019913Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-07T17:32:24.021716Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-07T17:32:24.021894Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:24.021952Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-07T17:32:24.023508Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-07T17:32:24.023659Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:24.023701Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-07T17:32:24.023822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:32:24.023882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1821:3675] TestWaitNotification: OK eventTxId 105 2026-01-07T17:32:24.024652Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-07T17:32:24.024994Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-07T17:32:24.027818Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:24.028071Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 274us result status StatusSuccess 2026-01-07T17:32:24.028591Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] Test command err: 2026-01-07T17:32:12.611735Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673819166813715:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:12.614105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:12.932830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:12.933359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:12.933505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:12.959907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:13.111646Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:13.113245Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673819166813687:2081] 1767807132609148 != 1767807132609151 2026-01-07T17:32:13.184249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:13.184296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:13.184308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:13.184412Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:13.189381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:13.398229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:13.406065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:13.615452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:13.621652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:32:13.644747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:32:13.659515Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:13.788089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.927921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.979786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-07T17:32:13.985748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.056655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.103407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.134307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.171888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.206648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.245082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.286375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.822534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673832051717247:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.822535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673832051717236:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.822650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.822956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673832051717251:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.823027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:15.826377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:15.836751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673832051717250:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:32:15.932381Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673832051717303:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:16.271863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.314405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.350137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.385211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... quest [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 30ms 2026-01-07T17:32:24.230569Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:24.230602Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:32:24.230697Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 31ms 2026-01-07T17:32:24.231192Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:24.233982Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:24.234015Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 31ms 2026-01-07T17:32:24.234249Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:24.234280Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-07T17:32:24.234348Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 33ms 2026-01-07T17:32:24.234654Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:32:24.377975Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592673871434357142:2432]: Pool not found 2026-01-07T17:32:24.378599Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:32:24.636019Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592673871434357135:2428]: Pool not found 2026-01-07T17:32:24.636428Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:32:24.639539Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592673871434357278:2456], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:32:24.639560Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673871434357277:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.639672Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.639910Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673871434357281:2457], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.639948Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.679731Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673849959518313:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:24.679837Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:24.934802Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592673871434357275:2454]: Pool not found 2026-01-07T17:32:24.935438Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:32:25.185465Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:58260) incoming connection opened 2026-01-07T17:32:25.185542Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:58260) -> (POST /Root) 2026-01-07T17:32:25.185695Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [589b:864d:bb7b:0:409b:864d:bb7b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 7987757e-be44e59b-1733beec-37b7c02b 2026-01-07T17:32:25.186176Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [7987757e-be44e59b-1733beec-37b7c02b] reply with status: BAD_REQUEST message: Empty body 2026-01-07T17:32:25.186356Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:58260) <- (400 InvalidArgumentException, 60 bytes) 2026-01-07T17:32:25.186414Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:58260) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip 2026-01-07T17:32:25.186443Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:58260) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 7987757e-be44e59b-1733beec-37b7c02b Content-Type: application/x-amz-json-1.1 Content-Length: 60 Http output full {"__type":"InvalidArgumentException","message":"Empty body"} 2026-01-07T17:32:25.193089Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:58260) connection closed >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::CreatePartitionStream |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex-UseSink [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2026-01-07T17:32:27.540778Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.548246Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.548289Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.548852Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.549439Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.561435Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.561879Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.565762Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.565788Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.565812Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.566182Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.566674Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.567003Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.568551Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.568965Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:32:27.571997Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.572047Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.572072Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.572541Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.573263Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.573407Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.573928Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.575475Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.576030Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.576154Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.576216Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:32:27.577362Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.577393Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.577417Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.583849Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.593163Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.593380Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.593727Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.594232Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.594584Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.594695Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.594743Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-07T17:32:27.595572Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.595635Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.595668Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.596033Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.600759Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.600999Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.603236Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.603774Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.605439Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.605597Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.605668Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-07T17:32:27.610140Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.610182Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.610214Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.618024Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.618790Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.618930Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.619214Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.619785Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.620107Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.620224Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.620276Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-07T17:32:27.621340Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.621373Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.621397Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.621708Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.627095Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.627288Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.627562Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.629183Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.629636Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.629740Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.629788Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:32:27.631298Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.631328Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.631354Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.631724Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.632679Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.632838Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.633061Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.634192Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:32:27.634962Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:32:27.635377Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2026-01-07T17:32:27.635546Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:32:27.635730Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.635770Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:32:27.635801Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2026-01-07T17:32:27.635818Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2026-01-07T17:32:27.635861Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2026-01-07T17:32:27.635897Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-07T17:32:27.636092Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2026-01-07T17:32:27.636276Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } |84.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] >> Initializer::Simple [GOOD] >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:17.516680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:17.516775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:17.516836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:17.516885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:17.516920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:17.516965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:17.517022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:17.517094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:17.518111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:17.518396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:17.609770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:17.609825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:17.621690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:17.622058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:17.622251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:17.629415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:17.629780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:17.630532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:17.630802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:17.636419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:17.636637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:17.637866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:17.637931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:17.638071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:17.638127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:17.638166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:17.638329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:17.783917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.784755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.784853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.784942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.785690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-07T17:32:27.182749Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725761:0 progress is 1/1 2026-01-07T17:32:27.182786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-07T17:32:27.182831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2026-01-07T17:32:27.182914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1891:3755] message: TxId: 281474976725761 2026-01-07T17:32:27.182971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-07T17:32:27.183008Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2026-01-07T17:32:27.183043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:0 2026-01-07T17:32:27.183118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-07T17:32:27.187150Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-07T17:32:27.187266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-07T17:32:27.187382Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976725761 2026-01-07T17:32:27.187536Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2026-01-07T17:32:27.189524Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-07T17:32:27.189655Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:27.189740Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:32:27.191433Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-07T17:32:27.191560Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:27.191598Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-07T17:32:27.191767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:32:27.191813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1948:3801] TestWaitNotification: OK eventTxId 105 2026-01-07T17:32:27.192470Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-07T17:32:27.192845Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2026-01-07T17:32:27.193710Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:27.194084Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 393us result status StatusSuccess 2026-01-07T17:32:27.194649Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |84.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink [GOOD] >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::OutOfSpace >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3551, MsgBus: 22502 2026-01-07T17:31:30.753171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:30.765013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:30.817389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:30.817492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:30.937062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673638952858326:2081] 1767807090115926 != 1767807090115929 2026-01-07T17:31:30.975752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:30.992198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:31.213114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:31.227219Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:31.261018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:31.261037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:31.261046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:31.261131Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:32.045162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:32.058201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:32.105660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.309547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.509567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.584977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.655318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673656132729392:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.655435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.656339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673656132729402:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:34.656410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.167155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.215931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.301553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.359593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.429857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.514302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.573257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.665945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.783554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673660427697587:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.783663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.784014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673660427697592:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.784059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673660427697593:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.784090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.789271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:35.807874Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673660427697596:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:31:35.874950Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673660427697649:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 51ms during compilation 2026-01-07T17:31:38.101336Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NjIyYjA0ZDEtNmJmY2ZjOC05NDY4NzRmLTc5NzA4YzVh, ActorId: [1:7592673673312599857:2534], ActorState: ExecuteState, LegacyTraceId: 01kecr7761398mx7cq78vcd5bf, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 50ms exceeded" severity: 1 }{ message: "Cancelling after 51ms during compilation" severity: 1 } trace_id# Trying to start YDB, gRPC: 11438, MsgBus: 30672 2026-01-07T17:31:40.009027Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution ta ... ion 2026-01-07T17:32:17.583489Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:18.034630Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:18.080551Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:18.340747Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:18.726465Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.019505Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.653911Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1906:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.654341Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.655047Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1980:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.655203Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:19.684310Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:19.877088Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.191463Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.445945Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.709698Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.981972Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:21.260373Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:21.570536Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:21.997719Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2792:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.997869Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.998419Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2796:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.998539Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.998631Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2799:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.006156Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:22.176379Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:2801:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:22.241748Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:2861:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:24.428535Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.752961Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.117254Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.100499Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [5:3564:4767], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr8nhy8fdpz7pqvhh15rzp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZWUwYzAyYS03MWFlZDBmZS00ZTMxNjM3MC1iZmQxN2U5NQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2026-01-07T17:32:27.100640Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [5:3564:4767], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr8nhy8fdpz7pqvhh15rzp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZWUwYzAyYS03MWFlZDBmZS00ZTMxNjM3MC1iZmQxN2U5NQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2026-01-07T17:32:27.104088Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:3565:4768], TxId: 281474976715676, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr8nhy8fdpz7pqvhh15rzp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZWUwYzAyYS03MWFlZDBmZS00ZTMxNjM3MC1iZmQxN2U5NQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:3558:4431], status: OVERLOADED, reason: {
: Error: Terminate execution } 2026-01-07T17:32:27.104850Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZWUwYzAyYS03MWFlZDBmZS00ZTMxNjM3MC1iZmQxN2U5NQ==, ActorId: [5:3119:4431], ActorState: ExecuteState, LegacyTraceId: 01kecr8nhy8fdpz7pqvhh15rzp, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Table \'/Root/SecondaryKeys\' retry limit exceeded." severity: 1 } trace_id# |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:31:17.584068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:17.704101Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:31:17.722119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:31:17.722542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:17.722590Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:31:18.016169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:18.016323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:18.117526Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807074390443 != 1767807074390447 2026-01-07T17:31:18.129836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:18.177333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:18.287540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:31:18.831792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:18.833995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:18.834054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:18.834085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:18.834327Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:18.848301Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:29.048855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2764], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.053483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:908:2769], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.053620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.054714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:914:2774], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.054842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.062956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:29.192124Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:912:2772], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:31:29.281766Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:969:2810] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:29.761256Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:979:2819], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:31:29.764457Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDhhYmJlYjctM2E5ZWIzZWItMzUxODlhY2ItNWFmZWM1OTA=, ActorId: [1:894:2761], ActorState: ExecuteState, LegacyTraceId: 01kecr6ycn2qsgdbstyvvmcmbb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/test]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2026-01-07T17:31:29.852249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.082925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:31:31.667131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.643844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Initialization finished REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2026-01-07T17:31:55.548838Z node 1 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [1:1517:3220] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2026-01-07T17:31:55.549049Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1517:3220] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:946 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2026-01-07T17:32:27.552747Z node 1 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [1:1674:3335] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2026-01-07T17:32:27.552914Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1674:3335] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:17.672791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:17.672882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:17.672947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:17.672987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:17.673022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:17.673052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:17.673107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:17.673179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:17.674012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:17.674281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:17.770634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:17.770688Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:17.785820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:17.786113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:17.786335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:17.795054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:17.795484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:17.796289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:17.796540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:17.799972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:17.800172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:17.801448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:17.801525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:17.801673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:17.801730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:17.801773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:17.801996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:17.962395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.963413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.963549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.963642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.963744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.963817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.963911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.964001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.964126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.964232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.964299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.964371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.964439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.964562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:17.964664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 91Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-07T17:32:28.626640Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730761:0 progress is 1/1 2026-01-07T17:32:28.626666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-07T17:32:28.626694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730761, ready parts: 1/1, is published: true 2026-01-07T17:32:28.626755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1560:3427] message: TxId: 281474976730761 2026-01-07T17:32:28.626798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-07T17:32:28.626831Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730761:0 2026-01-07T17:32:28.626857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730761:0 2026-01-07T17:32:28.626919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-07T17:32:28.630214Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730761 2026-01-07T17:32:28.630299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730761 2026-01-07T17:32:28.630357Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730761 2026-01-07T17:32:28.630478Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1753:3607], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730761 2026-01-07T17:32:28.633914Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-07T17:32:28.634048Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1753:3607], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:28.634098Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:32:28.638855Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-07T17:32:28.638999Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1753:3607], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:28.639059Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-07T17:32:28.639194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:32:28.639237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1769:3623] TestWaitNotification: OK eventTxId 105 2026-01-07T17:32:28.639906Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-07T17:32:28.640242Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2026-01-07T17:32:28.640993Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:28.641216Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 261us result status StatusSuccess 2026-01-07T17:32:28.641684Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |84.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> KqpWrite::CastValues [GOOD] >> KqpWrite::CastValuesOptional >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61347, MsgBus: 16372 2026-01-07T17:31:27.667975Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673625329873621:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:27.668517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:28.226092Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:28.243537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:28.243652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:28.314210Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673625329873583:2081] 1767807087663187 != 1767807087663190 2026-01-07T17:31:28.328640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:28.332515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:28.412971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:28.455358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:28.455380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:28.455386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:28.455490Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:28.676316Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:28.962745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:29.053558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:29.250232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:29.455745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:29.544862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.536473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673642509744651:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.536597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.537106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673642509744661:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.537182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.869702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.946779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.009361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.064274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.121093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.166236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.224681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.297473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.406663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673646804712833:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.406740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.407143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673646804712839:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.407177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673646804712838:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.407208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.411559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:32.425554Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673646804712842:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:31:32.510767Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673646804712895:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:32.671736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673625329873621:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:32.671814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:34.659454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... ons } 2026-01-07T17:32:20.460015Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:20.540656Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.579132Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.622659Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.675021Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.711128Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.769260Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.827760Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.899022Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:21.021274Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673856239189657:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.021398Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.021790Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673856239189662:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.021827Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673856239189663:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.021907Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:21.027852Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:21.045262Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592673856239189666:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:21.120707Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592673856239189717:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:21.446456Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592673834764350434:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:21.446543Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:23.793805Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.866727Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.930978Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":18,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":17,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_1_2","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_2"}],"Node Type":"Effect"},{"PlanNodeId":16,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":15,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Delete","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_1","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_1_1"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Upsert","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","ReadRangesPointPrefixLen":"1","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","ReadRangesKeys":["Fk"],"Table":"SecondaryKeys\/Index\/indexImplTable","ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Subplan Name":"CTE Stage_5","Node Type":"Stage","Parent Relationship":"InitPlan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_1","Node Type":"Precompute_1_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_2","Node Type":"Precompute_1_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"}]},{"name":"\/Root\/SecondaryKeys\/Index\/indexImplTable","reads":[{"columns":["Fk","Key"],"scan_by":["Fk [1, 4)"],"type":"Scan"}],"writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":7,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Operators":[{"Name":"Delete","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":14,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Operators":[{"Name":"Upsert","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":22,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2026-01-07T17:32:29.628324Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.628365Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.628410Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:29.628774Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:29.629816Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:32:29.629878Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.630686Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.630703Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.630721Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:29.630954Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:29.631210Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:32:29.631239Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.631784Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.631801Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.631814Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:29.632105Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:32:29.632171Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.632206Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.632313Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2026-01-07T17:32:29.633105Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.633129Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.633140Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:29.639753Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-07T17:32:29.639797Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.639833Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.639891Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2026-01-07T17:32:29.641049Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-07T17:32:29.641074Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-07T17:32:29.641104Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:29.641383Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:29.641944Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:29.656106Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-07T17:32:29.657042Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:29.657371Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2026-01-07T17:32:29.663687Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2026-01-07T17:32:29.663988Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:29.664030Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:32:29.664064Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:32:29.664085Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-07T17:32:29.664111Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-07T17:32:29.664131Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-07T17:32:29.664186Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2026-01-07T17:32:29.664204Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2026-01-07T17:32:29.664233Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2026-01-07T17:32:29.664250Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2026-01-07T17:32:29.664266Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2026-01-07T17:32:29.664295Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2026-01-07T17:32:29.664311Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2026-01-07T17:32:29.664328Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2026-01-07T17:32:29.664343Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2026-01-07T17:32:29.664360Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2026-01-07T17:32:29.664549Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2026-01-07T17:32:29.664568Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2026-01-07T17:32:29.664585Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2026-01-07T17:32:29.664614Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2026-01-07T17:32:29.664632Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2026-01-07T17:32:29.664653Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2026-01-07T17:32:29.664674Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2026-01-07T17:32:29.664692Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2026-01-07T17:32:29.664707Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2026-01-07T17:32:29.664733Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2026-01-07T17:32:29.664752Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2026-01-07T17:32:29.664785Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2026-01-07T17:32:29.664802Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2026-01-07T17:32:29.664818Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2026-01-07T17:32:29.664836Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2026-01-07T17:32:29.664860Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2026-01-07T17:32:29.664946Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2026-01-07T17:32:29.664970Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2026-01-07T17:32:29.664996Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2026-01-07T17:32:29.665016Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2026-01-07T17:32:29.665033Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2026-01-07T17:32:29.665049Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2026-01-07T17:32:29.665068Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2026-01-07T17:32:29.665085Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2026-01-07T17:32:29.665100Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2026-01-07T17:32:29.665121Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2026-01-07T17:32:29.665211Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2026-01-07T17:32:29.665285Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2026-01-07T17:32:29.665301Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2026-01-07T17:32:29.665318Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2026-01-07T17:32:29.665333Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2026-01-07T17:32:29.665353Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2026-01-07T17:32:29.665370Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2026-01-07T17:32:29.665387Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2026-01-07T17:32:29.665454Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-07T17:32:29.667820Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2026-01-07T17:32:29.668008Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2026-01-07T17:32:29.668039Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2026-01-07T17:32:29.668060Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2026-01-07T17:32:29.668077Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2026-01-07T17:32:29.668097Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2026-01-07T17:32:29.668115Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2026-01-07T17:32:29.668132Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2026-01-07T17:32:29.668151Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2026-01-07T17:32:29.668181Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2026-01-07T17:32:29.668198Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2026-01-07T17:32:29.668213Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2026-01-07T17:32:29.668246Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2026-01-07T17:32:29.668266Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2026-01-07T17:32:29.668284Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2026-01-07T17:32:29.668303Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2026-01-07T17:32:29.668329Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2026-01-07T17:32:29.668370Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2026-01-07T17:32:29.668387Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2026-01-07T17:32:29.668430Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2026-01-07T17:32:29.668449Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2026-01-07T17:32:29.668466Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2026-01-07T17:32:29.668483Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2026-01-07T17:32:29.668500Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2026-01-07T17:32:29.668517Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2026-01-07T17:32:29.668545Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2026-01-07T17:32:29.668562Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2026-01-07T17:32:29.668578Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2026-01-07T17:32:29.668606Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2026-01-07T17:32:29.668625Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2026-01-07T17:32:29.668641Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2026-01-07T17:32:29.668660Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2026-01-07T17:32:29.668686Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2026-01-07T17:32:29.668764Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2026-01-07T17:32:29.668784Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2026-01-07T17:32:29.668799Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2026-01-07T17:32:29.668814Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2026-01-07T17:32:29.668829Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2026-01-07T17:32:29.668844Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2026-01-07T17:32:29.668859Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2026-01-07T17:32:29.668874Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2026-01-07T17:32:29.668901Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2026-01-07T17:32:29.668922Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2026-01-07T17:32:29.668955Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2026-01-07T17:32:29.668972Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2026-01-07T17:32:29.668989Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2026-01-07T17:32:29.669004Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2026-01-07T17:32:29.669026Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2026-01-07T17:32:29.669053Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2026-01-07T17:32:29.669072Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2026-01-07T17:32:29.669094Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2026-01-07T17:32:29.669157Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-07T17:32:29.674756Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-07T17:32:29.676407Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.676552Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.676576Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:29.676952Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:29.677435Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:29.677621Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.678011Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:29.781830Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.782054Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:32:29.782118Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:29.782157Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-07T17:32:29.782229Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-07T17:32:29.985041Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-07T17:32:30.087268Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-07T17:32:30.087408Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:32:30.087576Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-07T17:32:30.088804Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:30.088825Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:30.088846Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:30.096318Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:30.096910Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:30.097140Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:30.097653Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:30.201820Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:30.202311Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:32:30.202371Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:30.202407Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-07T17:32:30.202483Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-07T17:32:30.202573Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-07T17:32:30.202740Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-07T17:32:30.202803Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:32:30.202900Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |84.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64547, MsgBus: 6208 2026-01-07T17:32:01.421173Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673773769602308:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:01.421218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:01.861546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:01.916186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:01.916265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:01.967059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:01.992744Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673773769602115:2081] 1767807121342656 != 1767807121342659 2026-01-07T17:32:02.030642Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:02.106800Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:02.132773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:02.132805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:02.132812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:02.132899Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:02.449517Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:02.792589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:02.813240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:02.879300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.123701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.324698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:03.423772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.430783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673790949473163:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.430976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.431240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673790949473175:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.431290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.805042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.838745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.873845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.914777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.988128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.037124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.093523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.159494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:06.263235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673795244441339:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.263327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.263630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673795244441344:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.263663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673795244441345:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.263998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:06.267877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:06.281125Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673795244441348:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:06.363457Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673795244441399:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:06.424744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673773769602308:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:06.425017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:08.141222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... tect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:19.740131Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:19.834566Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:19.834644Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:19.839581Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:19.839948Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:19.871259Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673851371447129:2081] 1767807139710999 != 1767807139711002 2026-01-07T17:32:19.937524Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:19.937546Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:19.937554Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:19.937630Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:20.030113Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:20.353097Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:20.368668Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.420257Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.609638Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.679175Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.749158Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:23.117177Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673868551318188:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.117272Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.125063Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673868551318198:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.125159Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.207692Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.257062Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.344972Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.372949Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.409477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.470066Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.558873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.612311Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.692151Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673868551319072:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.692221Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.692425Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673868551319078:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.692470Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.692488Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673868551319077:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.695784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:23.706825Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673868551319081:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:23.785685Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673868551319132:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:24.712222Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673851371447151:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:24.712311Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:25.683552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.759478Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:18.298357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:18.298471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:18.298514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:18.298568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:18.298608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:18.298639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:18.298700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:18.298775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:18.299698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:18.300256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:18.402454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:18.402509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:18.421273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:18.421575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:18.421839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:18.427779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:18.428074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:18.428812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:18.429036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:18.431418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:18.431584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:18.432751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:18.432807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:18.432945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:18.433000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:18.433041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:18.433223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:18.592779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.593700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.593820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.593889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.593954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:18.594743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730758:0, at schemeshard: 72075186233409549 2026-01-07T17:32:29.518463Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730758:0 ProgressState 2026-01-07T17:32:29.518566Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-07T17:32:29.518603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-07T17:32:29.518646Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-07T17:32:29.518681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-07T17:32:29.518717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-07T17:32:29.518790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2457:4216] message: TxId: 281474976730758 2026-01-07T17:32:29.518844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-07T17:32:29.518879Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-07T17:32:29.518909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-07T17:32:29.518981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2026-01-07T17:32:29.521715Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-07T17:32:29.521808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-07T17:32:29.521897Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730758 2026-01-07T17:32:29.522037Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-07T17:32:29.524206Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2026-01-07T17:32:29.524351Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:29.524409Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-07T17:32:29.526301Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2026-01-07T17:32:29.526449Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:29.526504Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-07T17:32:29.526640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:32:29.526715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2528:4276] TestWaitNotification: OK eventTxId 106 2026-01-07T17:32:29.527418Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-07T17:32:29.527753Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-07T17:32:29.528559Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:29.528830Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 289us result status StatusSuccess 2026-01-07T17:32:29.529546Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |84.0%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:19.069666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:19.069797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:19.069855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:19.069904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:19.069942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:19.069975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:19.070048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:19.070142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:19.071105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:19.071394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:19.154767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:19.154821Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:19.169355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:19.169692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:19.169879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:19.177408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:19.177884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:19.178823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:19.179134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:19.183536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:19.183769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:19.184962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:19.185029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:19.185156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:19.185214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:19.185261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:19.185479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:19.336294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.338948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.339010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.339089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.339164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.339242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:19.339343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... G: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730757:0, at schemeshard: 72075186233409549 2026-01-07T17:32:30.290872Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730757:0 ProgressState 2026-01-07T17:32:30.290973Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-07T17:32:30.291014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-07T17:32:30.291053Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-07T17:32:30.291085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-07T17:32:30.291130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-07T17:32:30.291208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2599:4358] message: TxId: 281474976730757 2026-01-07T17:32:30.291257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-07T17:32:30.291291Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-07T17:32:30.291323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-07T17:32:30.291393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2026-01-07T17:32:30.298831Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-07T17:32:30.298951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-07T17:32:30.299033Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730757 2026-01-07T17:32:30.299194Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-07T17:32:30.304968Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2026-01-07T17:32:30.305187Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:30.305275Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-07T17:32:30.314097Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2026-01-07T17:32:30.314322Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:32:30.314389Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-07T17:32:30.314606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:32:30.314674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2670:4418] TestWaitNotification: OK eventTxId 106 2026-01-07T17:32:30.315610Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-07T17:32:30.315946Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-07T17:32:30.321113Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:32:30.321517Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 4.57ms result status StatusSuccess 2026-01-07T17:32:30.322154Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2026-01-07T17:32:28.905339Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:28.905375Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:28.905421Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:28.906758Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:28.907430Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:28.918507Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:28.919482Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2026-01-07T17:32:28.920518Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:32:28.920728Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-07T17:32:28.921064Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-07T17:32:28.921261Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-07T17:32:28.921388Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:28.921421Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:32:28.921465Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:32:28.921601Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2026-01-07T17:32:28.921633Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:32:28.921672Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-07T17:32:28.921692Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:32:28.921799Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2026-01-07T17:32:28.921877Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-07T17:32:28.921897Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-07T17:32:28.921915Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:32:28.921991Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2026-01-07T17:32:28.922015Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-07T17:32:28.922044Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-07T17:32:28.922069Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:32:28.922154Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2026-01-07T17:32:28.923392Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:28.923417Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:28.923436Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:28.937195Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:28.937632Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:28.937866Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:28.939583Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-07T17:32:28.940533Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:32:28.940759Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-07T17:32:28.941320Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-07T17:32:28.941554Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-07T17:32:28.941744Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:28.941773Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:32:28.941803Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:32:28.941942Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2026-01-07T17:32:28.941973Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:32:28.941993Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-07T17:32:28.942013Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:32:28.942144Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2026-01-07T17:32:28.942218Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-07T17:32:28.942236Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-07T17:32:28.942252Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:32:28.942310Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2026-01-07T17:32:28.942337Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-07T17:32:28.942356Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-07T17:32:28.942373Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00 ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:32:31.263142Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2026-01-07T17:32:31.350607Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-07T17:32:31.350711Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-07T17:32:31.350828Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:31.357195Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:31.357959Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:31.358375Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-07T17:32:31.361549Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2026-01-07T17:32:31.470784Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2026-01-07T17:32:31.472009Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:31.474366Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:32:31.486313Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:32:31.487599Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-07T17:32:31.500912Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-07T17:32:31.501843Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-07T17:32:31.502732Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2026-01-07T17:32:31.503657Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2026-01-07T17:32:31.515143Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2026-01-07T17:32:31.516079Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2026-01-07T17:32:31.516157Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2026-01-07T17:32:31.516351Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:32:31.520247Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2026-01-07T17:32:31.530552Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:31.530586Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:31.530613Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:31.531133Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:31.531684Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:31.531847Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:31.532119Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:31.532529Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2026-01-07T17:32:31.533746Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:31.533774Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:31.533812Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:31.534107Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:31.543361Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:31.543575Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:31.544339Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:31.544529Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:31.544676Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:31.544725Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:32:31.544965Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink |84.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> KqpEffects::DeleteWithIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex+UseSink >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> DataShardVolatile::DistributedWrite |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |84.1%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery >> TMLPWriterTests::Deduplicated_Reboot [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::Optional [GOOD] |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |84.1%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |84.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg >> ResultFormatter::Pg [GOOD] >> KqpWrite::UpsertNullKey [GOOD] >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpWrite::ProjectReplace-UseSink |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> KqpImmediateEffects::ImmediateUpdateSelect >> KqpFail::OnPrepare [GOOD] >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> KeyValueGRPCService::SimpleWriteReadRangeV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadRangeV2 |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] |84.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink >> Yq_1::Basic_EmptyDict [GOOD] |84.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |84.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |84.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |84.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |84.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::EffectWithSelect+UseSink |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/mlp/ut/unittest >> TMLPWriterTests::Deduplicated_Reboot [GOOD] Test command err: 2026-01-07T17:24:46.040163Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592671905854325049:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:46.040215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:24:46.065611Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:24:46.245642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:24:46.245791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:24:46.275527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:24:46.343109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:24:46.343586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:24:46.348838Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592671905854325019:2081] 1767806686038889 != 1767806686038892 2026-01-07T17:24:46.461679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0039c1/r3tmp/yandex2LaJCw.tmp 2026-01-07T17:24:46.461708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0039c1/r3tmp/yandex2LaJCw.tmp 2026-01-07T17:24:46.462586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0039c1/r3tmp/yandex2LaJCw.tmp 2026-01-07T17:24:46.462792Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:24:46.541206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:24:46.592837Z INFO: TTestServer started on Port 11295 GrpcPort 2845 PQClient connected to localhost:2845 2026-01-07T17:24:47.021846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:24:47.049452Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:24:47.207169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:51.040808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592671905854325049:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:51.041524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:24:54.704275Z node 1 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kecqtp9440hcm7prsg7qf00w", Request deadline has expired for 2.201324s seconds 2026-01-07T17:24:54.795930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671940214064448:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:54.796240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671940214064456:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:54.796454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:54.798248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592671940214064463:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:54.798574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:24:54.807881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:24:54.831774Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592671940214064462:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:24:54.889465Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592671940214064528:2657] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:24:55.497646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:55.522485Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592671940214064536:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:24:55.523241Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmUxMGNhNDEtMzFhY2ZkZWEtMTI1ZDcwNjMtNmE5ZjkyZGU=, ActorId: [1:7592671940214064444:2338], ActorState: ExecuteState, LegacyTraceId: 01kecqtx9faepspx9y36ggg062, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:24:55.525924Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:24:55.527357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:24:55.609346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592671948803999419:2839] 2026-01-07T17:25:01.193628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:25:01.193648Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded === CheckClustersList. Ok 2026-01-07T17:25:02.003896Z :TODO INFO: TTopicSdkTestSetup started 2026-01-07T17:25:02.472675Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:25:02.565050Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592671974573803394:2938] connected; active server actors: 1 2026-01-07T17:25:02.566104Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:25:02.606366Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:25:02.606782Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T17:25:02.685582Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:25:02.690518Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:25:02.690987Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T17:25:02.691019Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 7207518 ... 991] connected; active server actors: 1 2026-01-07T17:32:31.568817Z node 19 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][0][StateInit] bootstrapping 0 [19:7592673899515446091:2472] 2026-01-07T17:32:31.570601Z node 19 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][0][StateInit] init complete for topic 'topic1' partition 0 generation 1 [19:7592673899515446091:2472] 2026-01-07T17:32:31.570731Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037894][Partition][0][StateIdle] Creating MLP consumer 'mlp-consumer' 2026-01-07T17:32:31.571737Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037894][0][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2026-01-07T17:32:31.571912Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-07T17:32:31.575482Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037894][0][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2026-01-07T17:32:31.575519Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:225: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-07T17:32:31.575547Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-07T17:32:31.575583Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-07T17:32:31.575624Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 0 vs 0 2026-01-07T17:32:31.575653Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:295: [72075186224037894][0][MLP][mlp-consumer] Initialized 2026-01-07T17:32:31.575682Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-07T17:32:31.575714Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-07T17:32:31.575746Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-07T17:32:31.577917Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037894][Partition][0][StateIdle] Updateing MLP consumer 'mlp-consumer' config 2026-01-07T17:32:31.578391Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-07T17:32:31.578688Z node 19 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:32:31.612415Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:30: [[19:7592673899515446108:3034]] Start describe 2026-01-07T17:32:31.614912Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:41: [[19:7592673899515446108:3034]] Handle NDescriber::TEvDescribeTopicsResponse 2026-01-07T17:32:31.614936Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:116: [[19:7592673899515446108:3034]] Start write 2026-01-07T17:32:31.621352Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:386: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPQ::TEvEndOffsetChanged. Offset: 1 2026-01-07T17:32:31.621405Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:694: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7592673899515446091:2472] 2026-01-07T17:32:31.623560Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:713: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPersQueue::TEvResponse 2026-01-07T17:32:31.623675Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:763: [72075186224037894][0][MLP][mlp-consumer] Fetched 1 messages 2026-01-07T17:32:31.623707Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-07T17:32:31.623731Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-07T17:32:31.623759Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-07T17:32:31.624593Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:175: [[19:7592673899515446108:3034]] Handle TEvPersQueue::TEvResponse >>>>>> reload PQ tablet 2026-01-07T17:32:31.626016Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:644: [72075186224037894][0][MLP][mlp-consumer] Delete old WAL: c0000000000wmlp-consumer|0000000000000000 - c0000000000wmlp-consumer|0000000000000000 2026-01-07T17:32:31.626106Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:654: [72075186224037894][0][MLP][mlp-consumer] Write Snapshot Count: 1 Size: 69 cookie: 3 2026-01-07T17:32:31.679201Z node 19 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:32:31.679257Z node 19 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037894] has a tx writes info 2026-01-07T17:32:31.680283Z node 19 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][0][StateInit] bootstrapping 0 [19:7592673899515446156:2476] 2026-01-07T17:32:31.692574Z node 19 :PERSQUEUE INFO: partition_init.cpp:1016: [topic1:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:32:31.693473Z node 19 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][0][StateInit] init complete for topic 'topic1' partition 0 generation 2 [19:7592673899515446156:2476] 2026-01-07T17:32:31.693648Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037894][Partition][0][StateIdle] Creating MLP consumer 'mlp-consumer' 2026-01-07T17:32:31.694056Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037894][0][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2026-01-07T17:32:31.694232Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-07T17:32:31.697214Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037894][0][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2026-01-07T17:32:31.697258Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:225: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-07T17:32:31.697286Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-07T17:32:31.697321Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-07T17:32:31.697377Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:694: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7592673899515446156:2476] 2026-01-07T17:32:31.699016Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:707: [72075186224037894][0][MLP][mlp-consumer] Initialized 2026-01-07T17:32:31.699066Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:713: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPersQueue::TEvResponse 2026-01-07T17:32:31.699231Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:763: [72075186224037894][0][MLP][mlp-consumer] Fetched 1 messages 2026-01-07T17:32:31.699273Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-07T17:32:31.699300Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-07T17:32:31.699352Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-07T17:32:31.699960Z node 19 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72075186224037894' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' size 152 2026-01-07T17:32:31.700781Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:644: [72075186224037894][0][MLP][mlp-consumer] Delete old WAL: c0000000000wmlp-consumer|0000000000000000 - c0000000000wmlp-consumer|0000000000000000 2026-01-07T17:32:31.700862Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:654: [72075186224037894][0][MLP][mlp-consumer] Write Snapshot Count: 1 Size: 69 cookie: 3 2026-01-07T17:32:31.702046Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:302: [72075186224037894][0][MLP][mlp-consumer] HandleOnWrite TEvKeyValue::TEvResponse Status: 1 Cookie: 3 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2026-01-07T17:32:31.702084Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:327: [72075186224037894][0][MLP][mlp-consumer] TX write finished 2026-01-07T17:32:31.702109Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-07T17:32:31.702144Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-07T17:32:31.702174Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-07T17:32:31.702200Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-07T17:32:31.702231Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-07T17:32:32.575337Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-07T17:32:32.609932Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:32:32.609971Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:32.633073Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:30: [[19:7592673903810413489:3070]] Start describe 2026-01-07T17:32:32.633420Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:41: [[19:7592673903810413489:3070]] Handle NDescriber::TEvDescribeTopicsResponse 2026-01-07T17:32:32.633440Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:116: [[19:7592673903810413489:3070]] Start write 2026-01-07T17:32:32.635610Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:175: [[19:7592673903810413489:3070]] Handle TEvPersQueue::TEvResponse 2026-01-07T17:32:32.694549Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-07T17:32:32.694602Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-07T17:32:32.694635Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-07T17:32:32.694671Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/mlp/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |84.2%| [TM] {RESULT} ydb/core/persqueue/public/mlp/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2026-01-07T17:31:10.955175Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673552803677924:2264];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:10.955368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:10.983310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:11.739675Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:11.740004Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] E0107 17:31:11.821457009 66540 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:31:11.821581997 66540 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:31:11.838975Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:11.839187Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:11.954666Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:11.954791Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:11.954831Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:11.958130Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:11.960994Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.019498Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.050244Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.050310Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.149570Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.202079Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.202250Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.202549Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.205357Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30202 2026-01-07T17:31:12.205478Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.205530Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.261784Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.293421Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.295951Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.296012Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.296044Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.339819Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.339904Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.348570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:12.348685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:12.396511Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.507625Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30202: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30202 } ] 2026-01-07T17:31:12.568590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeSc ... 4037899, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-07T17:32:34.058012Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:34.058021Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. BEFORE: 1.0 2026-01-07T17:32:34.058106Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. Send EvRead to shardId: 72075186224037899, tablePath: Root/yq/pending_small, ranges: [(String : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-07T17:32:34.058138Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. AFTER: 0.1 2026-01-07T17:32:34.058148Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-07T17:32:34.060657Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. Recv TEvReadResult from ShardID=72075186224037899, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-07T17:32:34.060689Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. Taken 0 locks 2026-01-07T17:32:34.060705Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:32:34.060735Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673911245518408:2947], TxId: 281474976715740, task: 1. Ctx: { TraceId : 01kecr8xq6bfkyv4hjr3dj63wa. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzhkYmI4OTUtNjY1ZTIzYjEtYzdhYmRmMjYtNDA2OTFhMjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-07T17:32:34.060757Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673911245518408:2947], TxId: 281474976715740, task: 1. Ctx: { TraceId : 01kecr8xq6bfkyv4hjr3dj63wa. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzhkYmI4OTUtNjY1ZTIzYjEtYzdhYmRmMjYtNDA2OTFhMjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:34.060780Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:32:34.060801Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. enter pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:34.060819Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. exit pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:34.060832Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. returned 0 rows; processed 0 rows 2026-01-07T17:32:34.060868Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. dropping batch for read #0 2026-01-07T17:32:34.060882Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:34.060896Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:32:34.060916Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715740, task: 1, CA Id [7:7592673911245518408:2947]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:32:34.061008Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7592673911245518408:2947], TxId: 281474976715740, task: 1. Ctx: { TraceId : 01kecr8xq6bfkyv4hjr3dj63wa. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzhkYmI4OTUtNjY1ZTIzYjEtYzdhYmRmMjYtNDA2OTFhMjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:34.061238Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715740;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:34.061279Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-07T17:32:34.061293Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. Taken 0 locks 2026-01-07T17:32:34.061306Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:32:34.061332Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673911245518393:2943], TxId: 281474976715739, task: 1. Ctx: { TraceId : 01kecr8xb34e9kz5rd3kj6fa9x. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YzEzNmE5ODgtNmUxOTYzMjMtZDdmZjQyNDQtZTdlNjQyYjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-07T17:32:34.061350Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673911245518393:2943], TxId: 281474976715739, task: 1. Ctx: { TraceId : 01kecr8xb34e9kz5rd3kj6fa9x. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YzEzNmE5ODgtNmUxOTYzMjMtZDdmZjQyNDQtZTdlNjQyYjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:34.061367Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:32:34.061383Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:32:34.061412Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-01-07T17:32:34.061431Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. returned 1 rows; processed 1 rows 2026-01-07T17:32:34.061463Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. dropping batch for read #0 2026-01-07T17:32:34.061467Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673911245518409:2948], TxId: 281474976715740, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr8xq6bfkyv4hjr3dj63wa. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzhkYmI4OTUtNjY1ZTIzYjEtYzdhYmRmMjYtNDA2OTFhMjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:32:34.061475Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. effective maxinflight 1024 sorted 0 2026-01-07T17:32:34.061487Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:32:34.061503Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715739, task: 1, CA Id [7:7592673911245518393:2943]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:32:34.061523Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7592673911245518409:2948], TxId: 281474976715740, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr8xq6bfkyv4hjr3dj63wa. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzhkYmI4OTUtNjY1ZTIzYjEtYzdhYmRmMjYtNDA2OTFhMjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:34.061635Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715740;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:34.061637Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7592673911245518393:2943], TxId: 281474976715739, task: 1. Ctx: { TraceId : 01kecr8xb34e9kz5rd3kj6fa9x. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YzEzNmE5ODgtNmUxOTYzMjMtZDdmZjQyNDQtZTdlNjQyYjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:34.061784Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715739;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:32:34.061948Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7592673911245518394:2944], TxId: 281474976715739, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr8xb34e9kz5rd3kj6fa9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=YzEzNmE5ODgtNmUxOTYzMjMtZDdmZjQyNDQtZTdlNjQyYjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-07T17:32:34.062092Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7592673911245518394:2944], TxId: 281474976715739, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr8xb34e9kz5rd3kj6fa9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=YzEzNmE5ODgtNmUxOTYzMjMtZDdmZjQyNDQtZTdlNjQyYjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-07T17:32:34.062193Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715739;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |84.2%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> KqpWrite::CastValuesOptional [GOOD] >> LocalTableWriter::SupportedTypes >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1682, MsgBus: 28255 2026-01-07T17:32:22.222278Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673864036731889:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:22.222328Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:22.419051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:22.468128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:22.468229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:22.545071Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673864036731865:2081] 1767807142220881 != 1767807142220884 2026-01-07T17:32:22.548333Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:22.560203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:22.649616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:22.649647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:22.649654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:22.649738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:22.702630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:23.137953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:23.145617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:23.228686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.230243Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:23.395381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.553029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.622606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.609026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673876921635630:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.609126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.609600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673876921635640:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.609651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.934347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.969338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.997931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.029214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.055778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.084844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.126052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.176327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.266179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673881216603810:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.266282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.266546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673881216603815:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.266609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.266628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673881216603816:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.269370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:26.279266Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673881216603819:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:26.354347Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673881216603870:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:27.223106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673864036731889:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:27.223169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:28.034902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:29.876338Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:29.936409Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:30.036573Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:30.036651Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:30.037247Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:30.038899Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673894136005700:2081] 1767807149860035 != 1767807149860038 2026-01-07T17:32:30.068696Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:30.224524Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:30.261547Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:30.261572Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:30.261580Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:30.261663Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:30.858323Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:30.903102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:30.913752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:30.923707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:31.066802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:31.309033Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:31.384736Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.518306Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673911315876747:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.518407Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.518860Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673911315876757:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.518913Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.636881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.754424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.822866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.886874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.942876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:34.024598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:34.139896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:34.252396Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:34.512979Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673915610844929:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:34.513076Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:34.517126Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673915610844934:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:34.517220Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673915610844935:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:34.517281Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:34.522939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:34.556897Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673915610844938:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:34.658393Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673915610844989:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:34.861944Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673894136005724:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:34.862307Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:37.026054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::Interactive |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |84.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> LocalTableWriter::DecimalKeys >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> LocalTableWriter::WriteTable >> KqpResultSetFormats::ValueFormat_Simple |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite >> LocalTableWriter::DataAlongWithHeartbeat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 1345, MsgBus: 7012 2026-01-07T17:32:22.815490Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673860757298678:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:22.815585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:23.141653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:23.141770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:23.201647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:23.304000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:23.304473Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673860757298650:2081] 1767807142810312 != 1767807142810315 2026-01-07T17:32:23.338101Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:23.531037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:23.531071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:23.531079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:23.531157Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:23.591765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:23.846570Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:23.982174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:24.034911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.200737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.380640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.453359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.371676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673877937169704:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.381094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.389331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673877937169714:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.389423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.731521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.763983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.794352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.834144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.868517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.938697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.973229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.021028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.117166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673882232137881:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.117250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.121320Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673882232137884:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.121456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.121772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673882232137888:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.127118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:27.140570Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673882232137890:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:27.227234Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673882232137941:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:27.821049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673860757298678:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:27.821120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22551, MsgBus: 3972 2026-01-07T17:32:30.706158Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673895985259619:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:30.706207Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:30.758264Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:30.901453Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:30.981148Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:30.982406Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:30.982489Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:30.990990Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:31.233308Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:31.233331Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:31.233339Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:31.233423Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:31.446368Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:31.690333Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:31.910674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:31.927929Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:32.002700Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:32.188032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:32.289176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.706523Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673895985259619:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:35.706589Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:35.757311Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673917460097919:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.757408Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.757672Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673917460097929:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.757735Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.833682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.917532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.960881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.006286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.049885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.087887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.137055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.200019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.319234Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673921755066096:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.319332Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.319784Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673921755066101:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.319822Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673921755066102:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.320088Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.323949Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:36.343914Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673921755066105:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:36.443882Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673921755066156:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> TBlobStorageProxyTest::TestPartialGetBlock >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression |84.3%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink >> KqpResultSetFormats::DefaultFormat >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections >> KqpQuery::ExecuteWriteQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 9104, MsgBus: 26741 ... waiting for SysViewsRoster update finished 2026-01-07T17:31:25.893095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:26.008752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:31:26.025842Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:31:26.026349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:26.026411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:31:26.318627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:26.318803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:26.390476Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807082849754 != 1767807082849758 2026-01-07T17:31:26.400080Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:26.445894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:26.589427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:31:27.038582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:27.038636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:27.038665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:27.039071Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:27.053669Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:27.451321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:27.584208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:27.899273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.293814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:28.665737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:29.754789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.755059Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.756016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1981:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.756158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:29.808148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.040617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.381599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:30.699208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.024738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.374647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.741567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.188918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:32.710628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.710811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.711216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.711455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4180], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.711725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:32.727964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:32.934931Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2803:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:31:33.000294Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2863:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:35.807793Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3164:4457] TxId: 281474976715673. Ctx: { TraceId: 01kecr74zrbe5wh97bxhfcpw0n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjkzYmYzNTQtNmVjN2M2YWMtODE1NDM2MjItODgwNjcwY2I=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECI ... lterResource ok# false data# peer# 2026-01-07T17:32:39.249264Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a7dfe80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2026-01-07T17:32:39.249565Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a7c2980] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2026-01-07T17:32:39.249873Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a7e0580] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2026-01-07T17:32:39.250545Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a96d980] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2026-01-07T17:32:39.250862Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a860f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2026-01-07T17:32:39.251132Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a960780] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2026-01-07T17:32:39.251383Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a92cd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2026-01-07T17:32:39.251681Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a974980] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2026-01-07T17:32:39.251965Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a7ec280] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2026-01-07T17:32:39.252246Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950aabee80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2026-01-07T17:32:39.252516Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a84e980] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2026-01-07T17:32:39.252792Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a7e9180] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2026-01-07T17:32:39.253371Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a8b1e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2026-01-07T17:32:39.253681Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a8ee480] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2026-01-07T17:32:39.253958Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a8c2180] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2026-01-07T17:32:39.254237Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a8c3d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2026-01-07T17:32:39.254531Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a635580] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2026-01-07T17:32:39.254807Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a88a880] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2026-01-07T17:32:39.255078Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a636380] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2026-01-07T17:32:39.255361Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a88a180] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2026-01-07T17:32:39.255653Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a887e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2026-01-07T17:32:39.255908Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a882380] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2026-01-07T17:32:39.256180Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a796d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2026-01-07T17:32:39.256440Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a607d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2026-01-07T17:32:39.256700Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a8d0f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2026-01-07T17:32:39.259524Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950aaa9e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2026-01-07T17:32:39.259891Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a79dd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2026-01-07T17:32:39.260194Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a92e280] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2026-01-07T17:32:39.260481Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a9eff80] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2026-01-07T17:32:39.260765Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a93a680] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2026-01-07T17:32:39.261066Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a939180] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2026-01-07T17:32:39.261346Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a9f4c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2026-01-07T17:32:39.261650Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a9f9280] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2026-01-07T17:32:39.261933Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a9a6080] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2026-01-07T17:32:39.262202Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950aad5380] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2026-01-07T17:32:39.262485Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a9f6180] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2026-01-07T17:32:39.262760Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a6ada80] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2026-01-07T17:32:39.263031Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950aabf580] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2026-01-07T17:32:39.263286Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a74d580] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2026-01-07T17:32:39.263575Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950aa5e380] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2026-01-07T17:32:39.263854Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a9f2280] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2026-01-07T17:32:39.264113Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a70c280] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2026-01-07T17:32:39.264392Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a6a4080] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2026-01-07T17:32:39.264683Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a6a4e80] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2026-01-07T17:32:39.267156Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950aa10c80] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2026-01-07T17:32:39.267566Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950aa0f780] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2026-01-07T17:32:39.267886Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a86c580] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2026-01-07T17:32:39.268215Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a6a5c80] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2026-01-07T17:32:39.268529Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a6a6a80] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2026-01-07T17:32:39.268813Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a6a8680] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2026-01-07T17:32:39.269131Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a6a7880] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2026-01-07T17:32:39.269476Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a6a9480] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2026-01-07T17:32:39.269759Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a7bff80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2026-01-07T17:32:39.270025Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a7b9680] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2026-01-07T17:32:39.275445Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a8a2280] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2026-01-07T17:32:39.275833Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a86d380] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2026-01-07T17:32:39.276116Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a74c780] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2026-01-07T17:32:39.276765Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a976c80] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2026-01-07T17:32:39.277070Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a5f8180] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2026-01-07T17:32:39.277370Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a7fbe80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2026-01-07T17:32:39.277664Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950aa6d880] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2026-01-07T17:32:39.277960Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d950a975780] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> KqpWrite::OutOfSpace [GOOD] >> KqpWrite::InsertRevert [GOOD] >> KqpWrite::ProjectReplace-UseSink [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts >> KqpEffects::DeleteWithIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink >> TBlobStorageProxyTest::TestDoubleGroups >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> LocalTableWriter::SupportedTypes [GOOD] >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 16709, MsgBus: 19972 2026-01-07T17:32:24.813224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673872702627453:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:24.819471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:25.184384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:25.244247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:25.244351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:25.270957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:25.356857Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:25.363386Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673872702627419:2081] 1767807144809382 != 1767807144809385 2026-01-07T17:32:25.429098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:25.461658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:25.461682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:25.461690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:25.461804Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:25.822500Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:26.010247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:26.019045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:26.074029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.226004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.422014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.508438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.238708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673889882498488:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.238816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.244440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673889882498498:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.244537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.618022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.674682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.730645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.773919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.841591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.925537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.008159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.097384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.241477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673894177466667:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:29.241571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:29.241704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673894177466672:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:29.245078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673894177466674:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:29.245142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:29.245345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:29.259314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673894177466675:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:29.347413Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673894177466727:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:29.810985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673872702627453:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:29.811041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:31.097166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... 46131:7762515]; 2026-01-07T17:32:33.048689Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:33.067177Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:33.223062Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:33.269283Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673907621573533:2081] 1767807153017088 != 1767807153017091 2026-01-07T17:32:33.298010Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:33.299781Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:33.299842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:33.313584Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:33.526828Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:33.609587Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:33.609608Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:33.609615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:33.609692Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:34.061087Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:34.390938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:34.427135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:34.584471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.093750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.257698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.630624Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673929096411876:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:37.630747Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:37.630983Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673929096411885:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:37.631023Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:37.691950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.728044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.758898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.803054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.847915Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.926801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.987652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:38.049488Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673911916540890:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:38.050154Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:38.053506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:38.199667Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673933391380057:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:38.199757Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:38.200235Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673933391380063:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:38.200274Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:38.200297Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673933391380062:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:38.204913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:38.233876Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673933391380066:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:38.315162Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673933391380117:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:40.108637Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27372, MsgBus: 4091 2026-01-07T17:32:25.993052Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673876880885710:2214];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:25.993110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:26.248537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:26.355144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:26.355245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:26.359858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:26.387539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:26.484244Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:26.538214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:26.538232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:26.538238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:26.538323Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:26.992811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:27.004530Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:27.055210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.214548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.369346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.465067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.925062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673894060756573:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:29.925156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:29.928703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673894060756583:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:29.928786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:30.381538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.438582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.488036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.537479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.582112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.637637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.703835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.796286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.899076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673898355724755:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:30.899181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:30.899607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673898355724760:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:30.899651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673898355724761:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:30.899934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:30.903983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:30.933493Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673898355724764:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:30.992466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673876880885710:2214];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:30.992534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:31.033501Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673902650692114:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26498, MsgBus: 17050 2026-01-07T17:32:35.349009Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673920103715912:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:35.349080Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:35.405240Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:35.494293Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:35.507949Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673920103715884:2081] 1767807155347959 != 1767807155347962 2026-01-07T17:32:35.526323Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:35.526410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:35.539943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:35.646424Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:35.682766Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:35.682782Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:35.682786Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:35.682837Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:36.193551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:36.202874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:36.218699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.290891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.402227Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:36.461548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.564075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.481105Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673937283586935:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.481207Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.481526Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673937283586945:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.481569Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.569741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.643074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.682156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.735371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.787861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.831198Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.881814Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.949154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:40.050732Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673941578555106:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.050825Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.051313Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673941578555112:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.051358Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673941578555111:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.051391Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.055855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:40.072391Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673941578555115:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:40.132143Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673941578555166:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:40.350247Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673920103715912:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:40.351103Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::OutOfSpace [GOOD] Test command err: Trying to start YDB, gRPC: 22961, MsgBus: 7015 2026-01-07T17:32:21.667770Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673859397572182:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:21.668905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:21.715946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:22.004502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:22.027932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:22.028026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:22.045351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:22.126547Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:22.217163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:22.234400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:22.234436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:22.234451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:22.234544Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:22.673188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:22.676775Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:22.740889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.911825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.076831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.161117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.163867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673876577443183:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.164004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.166365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673876577443193:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.166461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.577183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.618388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.658175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.732004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.771823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.815594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.897574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.970802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.052258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673880872411376:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.052364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.052839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673880872411381:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.052860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673880872411382:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.052912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.058989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:26.073622Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673880872411385:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:26.150852Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673880872411438:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:26.668231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673859397572182:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:26.668288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23000, MsgBus: 25191 ... waiting for SysViewsRoster update finished 2026-01-07T17:32:32.629852Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown ... node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:33.406673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:32:33.974217Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:33.974280Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:33.974323Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:33.974560Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:33.989558Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:34.750899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:34.868287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.241331Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.705234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.038392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.805455Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1915:3521], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.805801Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.806652Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1989:3540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.806760Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.834984Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.043798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.335255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.588019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.839719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:38.255636Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:38.557972Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:38.931640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.377265Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2799:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.377429Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.377943Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2803:4183], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.378032Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.378130Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2806:4186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.384071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:39.603074Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2808:4188], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:39.701913Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2870:4231] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_DISK_GROUP_OUT_OF_SPACE;details=;tx_id=3; 2026-01-07T17:32:42.071308Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:860: SelfId: [2:3179:4464], Table: `/Root/KeyValue` ([72057594046644480:42:1]), SessionActorId: [2:3154:4464]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[2:3179:4464]. Ignored this error.{
: Error: , code: 2006 } 2026-01-07T17:32:42.071996Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:3173:4464], SessionActorId: [2:3154:4464], statusCode=UNAVAILABLE. Issue=
: Error: Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`., code: 2036
: Error: , code: 2006 . sessionActorId=[2:3154:4464]. 2026-01-07T17:32:42.072480Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=MTkwMjRkNDUtYmI3MTQxOWUtYmVlNmUwYzEtOWEwOTRiYzQ=, ActorId: [2:3154:4464], ActorState: ExecuteState, LegacyTraceId: 01kecr95hab4ykg7rsybkd0pgp, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [2:3174:4464] from: [2:3173:4464] trace_id# 2026-01-07T17:32:42.072673Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:3174:4464] TxId: 281474976715673. Ctx: { TraceId: 01kecr95hab4ykg7rsybkd0pgp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MTkwMjRkNDUtYmI3MTQxOWUtYmVlNmUwYzEtOWEwOTRiYzQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`., code: 2036 subissue: {
: Error: , code: 2006 } } trace_id# 2026-01-07T17:32:42.073071Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=MTkwMjRkNDUtYmI3MTQxOWUtYmVlNmUwYzEtOWEwOTRiYzQ=, ActorId: [2:3154:4464], ActorState: ExecuteState, LegacyTraceId: 01kecr95hab4ykg7rsybkd0pgp, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`." issue_code: 2036 severity: 1 issues { issue_code: 2006 severity: 1 } } trace_id# |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ExecuteWriteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 10287, MsgBus: 8914 2026-01-07T17:31:26.578222Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673620582408129:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:26.578387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:27.015702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:27.034511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:27.035780Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673620582407997:2081] 1767807086546640 != 1767807086546643 2026-01-07T17:31:27.056641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:27.056752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:27.101499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:27.393125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:27.400026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:27.400064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:27.400071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:27.400463Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:27.549263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:28.436562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:28.449806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:31:31.362024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673642057245342:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.362127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.365058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673642057245377:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.365499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673642057245380:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.365595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.369957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:31.385851Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673642057245381:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:31:31.486871Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673642057245433:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:31.579496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673620582408129:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:31.579550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:31:31.852656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 4587, MsgBus: 12350 2026-01-07T17:31:33.207438Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673652051909763:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:33.209114Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:33.311738Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:33.402050Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:33.415427Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673652051909743:2081] 1767807093205580 != 1767807093205583 2026-01-07T17:31:33.425058Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:33.425121Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:33.480182Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:33.489032Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:33.665694Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:33.665719Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:33.665727Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:33.665805Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:34.262926Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:34.278679Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:37.663721Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673669231779818:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:37.663841Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:37.664607Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673669231779831:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:37.664644Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673669231779832:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:37.664708Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:37.668499Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:37.682941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:31:37.683624Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673669231779835:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:31:37.751308Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673669231779886:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, req ... OUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.232596Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673791147157520:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.232645Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.341060Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.395401Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.448760Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.494950Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.540670Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.607677Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.673900Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.729855Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592673769672319322:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:05.729931Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:05.731525Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:05.837950Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673791147158396:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.838053Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.838572Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673791147158401:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.838621Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592673791147158402:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.838673Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:05.844097Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:05.863047Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592673791147158405:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:05.923035Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592673791147158457:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:08.260510Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.840919Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:32:15.840966Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:17.765786Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-07T17:32:17.807653Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-07T17:32:18.832498Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-07T17:32:19.299649Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-07T17:32:24.393853Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecr84gr36ntpq76f2cxv2ye", SessionId: ydb://session/3?node_id=5&id=MTExNDA0ZmMtNTA4NzQ1NGMtOGMzNzQzNDEtYWMxZmYyOGM=, Slow query, duration: 16.299810s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2026-01-07T17:32:24.727804Z node 5 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 72.7112 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037927 table: [/Root/.tmp/sessions/99537521-4eae-7440-e72d-6a9d051e6f01/Root/test_table_4dc1e863-4052-12b9-569e-7ca42fac9c52] 2026-01-07T17:32:24.996446Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-07T17:32:25.006386Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-07T17:32:25.419760Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecr84gr36ntpq76f2cxv2ye", SessionId: ydb://session/3?node_id=5&id=MTExNDA0ZmMtNTA4NzQ1NGMtOGMzNzQzNDEtYWMxZmYyOGM=, Slow query, duration: 17.325702s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2026-01-07T17:32:25.428994Z --------------- Start update --------------- 2026-01-07T17:32:25.428833Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:32:25.436132Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:32:39.542986Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecr8nf39h2phtq68jwwn1pj", SessionId: ydb://session/3?node_id=5&id=OTk4NGUwNDYtOTRlZTY4NmEtZTIwMDdjODgtYWZlMTIyN2E=, Slow query, duration: 14.095096s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n UPDATE test_table SET data = \"a\"\n ", parameters: 0b |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> LocalTableWriter::DecimalKeys [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 23169, MsgBus: 17280 2026-01-07T17:32:21.103181Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673856918815882:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:21.103257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:21.328629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:21.334352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:21.338341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:21.423985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:21.424445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:21.520405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:21.520426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:21.520449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:21.520547Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:21.579705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:22.023666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:22.042650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:22.090539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.113180Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:22.246104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.415066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.492471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.454394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673869803719592:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.454524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.461062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673869803719602:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.461141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.788077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.830411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.868660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.914928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.957235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.000443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.049747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.108055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.206248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673874098687771:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.206366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.206687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673874098687776:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.206725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673874098687777:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.206835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.210466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:25.222711Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673874098687780:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:25.314275Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673874098687831:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:26.104814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673856918815882:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:26.104869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:27.088471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... :32:28.659956Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:28.673037Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673888703468224:2081] 1767807148559867 != 1767807148559870 2026-01-07T17:32:28.697162Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:28.697245Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:28.712903Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:28.853645Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:28.949362Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:28.949389Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:28.949397Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:28.949483Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:29.422798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:29.451158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.546147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.655980Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:29.708545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.782297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:32.857122Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673905883339277:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:32.857227Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:32.857873Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673905883339287:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:32.857963Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:32.933692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:32.976471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.011988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.050629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.137366Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.173257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.210345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.263412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:33.381449Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673910178307460:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.381564Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.381933Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673910178307465:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.381983Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673910178307466:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.382015Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:33.386740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:33.406790Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673910178307469:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:33.475780Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673910178307520:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:33.569041Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673888703468328:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:33.569115Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:35.656583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.705478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.745857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2026-01-07T17:32:39.901761Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673937168145112:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:39.903688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:39.938658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:40.304531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:40.356912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:40.357494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:40.373315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:40.481025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:40.527226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:40.810201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:40.810230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:40.810246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:40.810388Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:40.909172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:41.248767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:41.262540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.507693Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Handshake: worker# [1:7592673945758080484:2490] 2026-01-07T17:32:41.508046Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:32:41.508361Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:32:41.508393Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Send handshake: worker# [1:7592673945758080484:2490] 2026-01-07T17:32:41.513599Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:32:41.514437Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2026-01-07T17:32:41.514778Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673945758080580:2551] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T17:32:41.514834Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:41.515110Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673945758080580:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2026-01-07T17:32:41.573081Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673945758080580:2551] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:32:41.573190Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:41.573273Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673945758080577:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } >> TBlobStorageProxyTest::TestProxyGetSingleTimeout |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 26368, MsgBus: 12884 2026-01-07T17:32:19.283002Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673851130143307:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:19.283067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:19.510905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:19.542200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:19.542308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:19.596374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:19.613123Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:19.616988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673851130143267:2081] 1767807139280900 != 1767807139280903 2026-01-07T17:32:19.686973Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:19.728270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:19.728294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:19.728325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:19.728429Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:20.201905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:20.263698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.304053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:20.413385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.600382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.668861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.718184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673864015047043:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.718410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.718791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673864015047053:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.718868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.036350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.078538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.112043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.158574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.234153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.318099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.394556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.442753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.525902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673868310015223:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.525985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.526237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673868310015228:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.526267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673868310015229:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.526302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.530046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:23.542106Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673868310015232:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:23.606519Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673868310015283:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:24.283453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673851130143307:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:24.283512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:25.307973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... 918190783181:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:35.420056Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:35.556204Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:35.557368Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:35.558233Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:35.558298Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:35.558799Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673918190783158:2081] 1767807155419468 != 1767807155419471 2026-01-07T17:32:35.600034Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:35.693546Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:35.693588Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:35.693595Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:35.693673Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:35.832432Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:36.178506Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:36.186425Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:36.199455Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.257767Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.429912Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:36.433060Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:36.554333Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.364777Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673935370654199:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.364885Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.365297Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673935370654209:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.365354Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:39.530737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.592491Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.637919Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.716322Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.773192Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.837945Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.939938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:40.035196Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:40.133702Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673939665622389:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.133804Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.134649Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673939665622394:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.134729Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673939665622395:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.134917Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:40.141656Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:40.156994Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673939665622398:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:40.253487Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673939665622449:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:40.421031Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673918190783181:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:40.421129Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:42.670050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2026-01-07T17:32:41.307996Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673942830124662:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:41.308039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:41.781029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:41.829637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:41.829784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:41.837721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:41.897263Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:41.900740Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673942830124638:2081] 1767807161305668 != 1767807161305671 2026-01-07T17:32:42.059137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:42.144502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:42.144530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:42.144537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:42.144661Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:42.341806Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:42.435866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:42.445896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:42.450946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.620807Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Handshake: worker# [1:7592673947125092773:2490] 2026-01-07T17:32:42.621202Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:32:42.621506Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:32:42.621563Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Send handshake: worker# [1:7592673947125092773:2490] 2026-01-07T17:32:42.625301Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:32:42.625569Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2026-01-07T17:32:42.625771Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673947125092866:2550] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T17:32:42.625813Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:42.625906Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673947125092866:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2026-01-07T17:32:42.637511Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673947125092866:2550] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:32:42.637584Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:42.637671Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673947125092863:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> LocalTableWriter::ConsistentWrite [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2026-01-07T17:32:41.567965Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673942157531383:2189];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:41.568141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:41.604522Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:41.896128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:41.896204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:41.952329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:41.959161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:41.994625Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:41.998381Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673942157531223:2081] 1767807161528274 != 1767807161528277 2026-01-07T17:32:42.163383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:42.280996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:42.281020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:42.281030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:42.281155Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:42.572907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:42.577931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:42.580729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:42.585873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.738283Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handshake: worker# [1:7592673946452499445:2553] 2026-01-07T17:32:42.738631Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:32:42.738899Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:32:42.738928Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Send handshake: worker# [1:7592673946452499445:2553] 2026-01-07T17:32:42.739295Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:32:42.744830Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-07T17:32:42.745094Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-07T17:32:42.745287Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673946452499448:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T17:32:42.745352Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:42.745439Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673946452499448:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-07T17:32:42.749821Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673946452499448:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:32:42.749896Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:42.749959Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673946452499444:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TBlobStorageProxyTest::TestPersistence >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> TBlobStorageProxyTest::TestProxySimpleDiscover >> TBlobStorageProxyTest::TestSingleFailureMirror >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection |84.3%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots >> KeyValueGRPCService::SimpleWriteReadRangeV2 [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV1 >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> TBlobStorageProxyTest::TestVPutVCollectVGetRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2026-01-07T17:32:41.600971Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673942678551841:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:41.629035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:42.197042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:42.236803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:42.236898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:42.374845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:42.445133Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673942678551727:2081] 1767807161553607 != 1767807161553610 2026-01-07T17:32:42.446627Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:42.461089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:42.745330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:42.973532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:42.973555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:42.973566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:42.973634Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:43.556453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:43.580128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:43.584436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.890609Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handshake: worker# [1:7592673951268487156:2495] 2026-01-07T17:32:43.891044Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:32:43.891365Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:32:43.891398Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Send handshake: worker# [1:7592673951268487156:2495] 2026-01-07T17:32:43.892079Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:32:43.901194Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-07T17:32:43.901432Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2026-01-07T17:32:43.901661Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673951268487249:2554] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T17:32:43.901703Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:43.901804Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673951268487249:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-07T17:32:43.909729Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673951268487249:2554] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:32:43.909807Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:43.909856Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2026-01-07T17:32:43.910237Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:32:43.910929Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:32:43.911531Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2026-01-07T17:32:43.911658Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2026-01-07T17:32:43.911813Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673951268487249:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-07T17:32:43.929279Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673951268487249:2554] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:32:43.929384Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:43.929427Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2026-01-07T17:32:43.933353Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:32:43.933668Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2026-01-07T17:32:43.933838Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673951268487249:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-07T17:32:43.953206Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673951268487249:2554] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:32:43.953335Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:43.953384Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2026-01-07T17:32:43.954769Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673951268487245:2554] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> TBlobStorageProxyTest::TestPutGetMany [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2026-01-07T17:32:41.514292Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673944394878839:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:41.514364Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:41.566213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:42.081204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:42.174029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:42.174140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:42.250959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:42.279030Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:42.285314Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673944394878585:2081] 1767807161420631 != 1767807161420634 2026-01-07T17:32:42.390156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:42.513262Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:42.708361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:42.708390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:42.708397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:42.708490Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:43.097747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:43.108074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:43.120257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.386264Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Handshake: worker# [1:7592673952984814017:2494] 2026-01-07T17:32:43.386682Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:32:43.387280Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:32:43.387362Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Send handshake: worker# [1:7592673952984814017:2494] 2026-01-07T17:32:43.388542Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 35b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 23b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:32:43.388766Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 35 },{ Order: 3 BodySize: 23 }] } 2026-01-07T17:32:43.389054Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673952984814113:2555] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T17:32:43.389101Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:43.389211Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673952984814113:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 35b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 23b }] } 2026-01-07T17:32:43.401908Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592673952984814113:2555] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:32:43.402000Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:32:43.402062Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592673952984814110:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> TBlobStorageProxyTest::TestDoubleFailure >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26419, MsgBus: 6752 2026-01-07T17:32:20.647048Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673855936649739:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:20.647162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:20.884161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:20.948119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:20.948301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:21.002563Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:21.003166Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673855936649713:2081] 1767807140645619 != 1767807140645622 2026-01-07T17:32:21.007236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:21.070016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:21.070048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:21.070064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:21.070160Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:21.167470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:21.492537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:21.549426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:21.661045Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:21.700701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:21.892183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:21.993262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.761546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673868821553477:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.761693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.762089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673868821553487:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.762141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.169856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.213797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.262944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.300008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.337543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.394316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.435653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.501005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.590513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673873116521648:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.590592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.590839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673873116521653:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.590883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673873116521654:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.591119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.593877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:24.605215Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673873116521657:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:24.684675Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673873116521710:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:25.652241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673855936649739:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:25.652309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:26.431051Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-07T17:32:26.440785Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS ... 94037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:37.185513Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:37.185883Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:37.185901Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:37.186339Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:37.572737Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:37.679665Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:37.685216Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:37.706122Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.789393Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.798157Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:38.025277Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:38.116296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.286714Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673944841891613:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:41.286828Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:41.287174Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673944841891623:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:41.287225Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:41.383134Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.434267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.496363Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.561794Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.616992Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.732209Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.784842Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673923367053472:2239];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:41.784904Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:41.813809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:41.922197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.099276Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673949136859802:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.099394Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.099746Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673949136859807:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.099799Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673949136859808:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.100103Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.104320Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:42.145136Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673949136859811:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:42.240907Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673949136859863:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:45.566412Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7592673962021762078:2541], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr98kvey03ceywqpxrr0jk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=OThlOWY2ZC1mYzVjYTI1Mi1mZGExZDgwZC03ZDQ1Mg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:32:45.566650Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7592673962021762079:2542], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr98kvey03ceywqpxrr0jk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=OThlOWY2ZC1mYzVjYTI1Mi1mZGExZDgwZC03ZDQ1Mg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7592673962021762075:2528], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:32:45.566947Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=OThlOWY2ZC1mYzVjYTI1Mi1mZGExZDgwZC03ZDQ1Mg==, ActorId: [3:7592673962021762045:2528], ActorState: ExecuteState, LegacyTraceId: 01kecr98kvey03ceywqpxrr0jk, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> TBlobStorageProxyTest::TestVPutVGet >> TBlobStorageProxyTest::TestGetMultipart >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData >> TBlobStorageProxyTest::TestNormal |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |84.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> KqpImmediateEffects::Interactive [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink >> KqpResultSetFormats::ValueFormat_Simple [GOOD] >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27636, MsgBus: 63784 2026-01-07T17:32:21.885328Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673860262086061:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:21.890458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:22.193123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:22.225246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:22.225357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:22.269860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:22.371887Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:22.373015Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673860262085926:2081] 1767807141863717 != 1767807141863720 2026-01-07T17:32:22.392229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:22.456054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:22.456083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:22.456093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:22.456180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:22.890240Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:22.907595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:22.916414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:22.986527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.137510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.398754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.481150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.490556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673877441956979:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.490633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.491387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673877441956989:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.491443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.843978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.877641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.916591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.958056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.000328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.036820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.114775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.169712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.243796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673881736925163:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.243886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.244181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673881736925168:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.244207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.244221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673881736925169:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.247027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:26.261279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673881736925172:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:26.324086Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673881736925223:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:26.870355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673860262086061:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:26.870434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:28.102591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... ervice] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:39.394181Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:39.409376Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:39.409468Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:39.427949Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673936323639520:2081] 1767807159073008 != 1767807159073011 2026-01-07T17:32:39.467287Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:39.491694Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:39.671076Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:39.741620Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:39.741640Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:39.741648Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:39.741731Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:40.113063Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:40.383758Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:40.391417Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:40.412640Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:40.540114Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:40.742142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:40.831397Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.097665Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673936323639662:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:44.097728Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:44.314198Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673957798477871:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:44.314285Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:44.314539Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673957798477881:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:44.314571Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:44.411624Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.455564Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.512847Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.566180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.617056Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.697149Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.759177Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.895894Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.057421Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673962093446042:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.057527Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.058963Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673962093446048:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.058964Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673962093446047:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.059024Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.065599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:45.084619Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:32:45.084991Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673962093446051:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:45.187616Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673962093446106:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:48.175321Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 >> KqpResultSetFormats::DefaultFormat [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 30107, MsgBus: 2374 2026-01-07T17:32:24.097613Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673869999568958:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:24.097911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:24.404746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:24.404878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:24.410849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:24.430673Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:24.500274Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673869999568834:2081] 1767807144093017 != 1767807144093020 2026-01-07T17:32:24.511867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:24.631700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:24.631731Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:24.631737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:24.631807Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:24.645380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:25.107217Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:25.191697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:25.267624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.418404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.608123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.685723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.635532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673882884472598:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.635665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.642048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673882884472608:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.642154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.004371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.035475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.067722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.098495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.132666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.207322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.243264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.330888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.424387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673887179440776:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.424483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.424764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673887179440781:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.424799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673887179440782:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.424824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.429130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:28.440465Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673887179440785:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:28.531435Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673887179440836:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:29.097198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673869999568958:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:29.097270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:30.402935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:41.036785Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:41.182643Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:41.182748Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:41.185008Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:41.186444Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:41.193070Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673939810962557:2081] 1767807160984198 != 1767807160984201 2026-01-07T17:32:41.204481Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:41.477051Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:41.492520Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:41.492542Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:41.492551Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:41.492633Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:42.026110Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:42.047719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:42.065386Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:42.080196Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.163898Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.323312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.421058Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.450206Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673961285800907:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.450339Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.450828Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673961285800917:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.450888Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.661609Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.735863Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.791100Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.849133Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.896353Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.966898Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.016829Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673944105930065:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:46.017034Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:46.062018Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.168514Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.330819Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673965580769096:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.330963Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.331464Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673965580769101:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.331534Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673965580769102:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.331659Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.336581Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:46.370057Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673965580769105:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:46.465507Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673965580769158:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:48.755869Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] Test command err: Trying to start YDB, gRPC: 17816, MsgBus: 13940 2026-01-07T17:32:23.623025Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673867849934435:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:23.623893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:23.663263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:23.891677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:23.891774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:23.949641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:23.983861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:23.985492Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:23.988999Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673867849934394:2081] 1767807143606850 != 1767807143606853 2026-01-07T17:32:24.084666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:24.084688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:24.084696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:24.084773Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:24.222713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:24.611568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:24.649177Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:24.678226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.863967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.034668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.126523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:26.808829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673880734838151:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.808907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.809415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673880734838161:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:26.809478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.165103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.202791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.241973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.277283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.312200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.393106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.437237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.511221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.597140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673885029806328:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.597237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.597794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673885029806333:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.597837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673885029806334:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.598122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.602013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:27.617456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673885029806337:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:27.692246Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673885029806390:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:28.617029Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673867849934435:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:28.617091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:29.566798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, bu ... 937876137688:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:40.883448Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:40.949693Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:41.072582Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:41.077077Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673937876137626:2081] 1767807160829495 != 1767807160829498 2026-01-07T17:32:41.081845Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:41.081932Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:41.110801Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:41.199984Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:41.297454Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:41.297485Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:41.297493Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:41.297584Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:41.885444Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:42.186271Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:42.198503Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:42.215569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.330512Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.584222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.702210Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.878310Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673937876137688:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:45.878381Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:45.879331Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673959350975985:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.879415Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.879804Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673959350975995:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:45.879868Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.043361Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.104076Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.185910Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.238110Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.290132Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.343300Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.426120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.509190Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.624578Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673963645944167:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.624700Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.629133Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673963645944172:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.629319Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673963645944173:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.629579Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.634929Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:46.656733Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673963645944176:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:46.764321Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673963645944227:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:48.833407Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TBlobStorageProxyTest::TestProxyLongTailDiscover |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3167, MsgBus: 9459 2026-01-07T17:32:21.895076Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673857117280476:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:21.895742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:22.183497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:22.183600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:22.193500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:22.251715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:22.297460Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:22.412613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:22.417437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:22.417457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:22.417462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:22.417519Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:22.825874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:22.837555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:22.910279Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:22.924676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.055452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.201839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.272972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.071171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673874297151487:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.071970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.072466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673874297151497:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.072542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.364150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.393739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.423229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.451714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.479507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.517898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.553306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.623185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.708263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673874297152369:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.708354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.708598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673874297152374:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.708618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.708651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673874297152375:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.713035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:25.725004Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673874297152378:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:25.813931Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673874297152429:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:26.895660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673857117280476:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:26.895731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:27.187083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689 ... 944096235829:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:41.617123Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:41.732129Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:41.749063Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673944096235592:2081] 1767807161563417 != 1767807161563420 2026-01-07T17:32:41.749139Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:41.767559Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:41.767648Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:41.799508Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:41.955421Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:41.975667Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:41.975689Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:41.975696Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:41.975770Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:42.491496Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:42.499742Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:42.515918Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.601556Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:42.677764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.921851Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.043614Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.596100Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673944096235829:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:46.596209Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:46.940553Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673965571073943:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.940662Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.941026Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673965571073953:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.941094Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.013667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.066269Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.118406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.170718Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.224164Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.309510Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.398391Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.543943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.708839Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673969866042123:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.708994Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.709523Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673969866042129:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.709566Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673969866042128:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.709601Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.715027Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:47.740389Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673969866042132:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:47.844628Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673969866042183:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:50.033511Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2026-01-07T17:32:49.530366Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002b56/r3tmp/tmp6nyERn//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-07T17:32:49.552063Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> KqpEffects::EffectWithSelect+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18227, MsgBus: 17781 2026-01-07T17:32:23.884628Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673868523128916:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:23.904244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:24.218278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:24.218371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:24.266293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:24.288591Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673868523128816:2081] 1767807143871198 != 1767807143871201 2026-01-07T17:32:24.304922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:24.310210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:24.432049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:24.489545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:24.489574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:24.489591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:24.489666Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:24.921316Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:24.984819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:24.992167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:25.077647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.268717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.457936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.538020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.409697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673885702999870:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.409832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.410222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673885702999880:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.410266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:27.823113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.883380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.942611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.993308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.044193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.077882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.124562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.176181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.266511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673889997968044:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.266608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.266985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673889997968050:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.267040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673889997968049:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.267080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.271669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:28.284629Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673889997968053:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:28.390385Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673889997968106:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:28.885160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673868523128916:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:28.885234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:30.525769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... et_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:42.720857Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:42.720863Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:42.720969Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:43.203044Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:43.437914Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:43.446086Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:43.457305Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.568338Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.885502Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.022529Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.199064Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673946899926657:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:47.199139Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:47.309448Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673968374764880:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.309539Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.309947Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673968374764890:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.309992Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.434652Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.485690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.549269Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.653514Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.725592Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.789536Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.860464Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.981854Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:48.165366Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673972669733059:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.165471Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.165810Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673972669733064:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.165858Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673972669733065:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.165893Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.170744Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:48.253782Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673972669733068:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:48.312765Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673972669733125:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:50.940218Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:52.105735Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7592673989849602741:2561], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr9ese6ym7asmwaa64f2h7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NDQxZTY4NTgtMmZmNGI2NTItYTFmYzA1ODItNzhkODYwOTU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:32:52.106297Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7592673989849602742:2562], TxId: 281474976710678, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr9ese6ym7asmwaa64f2h7. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NDQxZTY4NTgtMmZmNGI2NTItYTFmYzA1ODItNzhkODYwOTU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [3:7592673989849602738:2528], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:32:52.106652Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NDQxZTY4NTgtMmZmNGI2NTItYTFmYzA1ODItNzhkODYwOTU=, ActorId: [3:7592673981259668005:2528], ActorState: ExecuteState, LegacyTraceId: 01kecr9ese6ym7asmwaa64f2h7, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2026-01-07T17:32:52.285591Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002b62/r3tmp/tmp9R4yZI//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-07T17:32:52.382178Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:32:52.390196Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002b62/r3tmp/tmp9R4yZI//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-07T17:32:52.442820Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> TBlobStorageProxyTest::TestDoubleEmptyGet >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13538, MsgBus: 11312 2026-01-07T17:32:19.412022Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673848786318705:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:19.413373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:19.657973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:19.658114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:19.690216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:19.733785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:19.771145Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:19.943152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:19.943192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:19.943199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:19.943267Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:20.002771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:20.365309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:20.376023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:20.420959Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:20.442056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.626687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.808645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:20.873634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.722154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673861671222417:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.722272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.722949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673861671222426:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:22.723008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.076487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.114800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.165907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.210266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.257992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.304501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.356253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.396710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:23.504144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673865966190592:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.504233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.504531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673865966190597:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.504562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673865966190598:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.504697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:23.509266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:23.528270Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673865966190601:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:23.596536Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673865966190652:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:24.411401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673848786318705:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:24.411469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:25.483136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... 7278 2026-01-07T17:32:38.053039Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:38.053214Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:32:38.223879Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:38.225759Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673927115569536:2081] 1767807157986257 != 1767807157986260 2026-01-07T17:32:38.258631Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:38.258725Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:38.265057Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:38.274684Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:38.501623Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:38.501651Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:38.501660Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:38.501758Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:38.800292Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:39.022144Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:39.256872Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:39.266006Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:39.280540Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.447766Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.687635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:39.777282Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.795688Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673948590407890:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.795796Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.796270Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673948590407900:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.796329Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:42.898930Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:42.958411Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.015642Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.078367Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.168875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.266628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.340550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.401135Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.545116Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673952885376071:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:43.545299Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:43.550645Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673952885376076:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:43.550747Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673952885376077:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:43.551084Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:43.559349Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:43.587098Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673952885376080:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:43.674394Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673952885376131:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:46.350754Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:53.211887Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:32:53.211933Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 28837, MsgBus: 25582 ... waiting for SysViewsRoster update finished 2026-01-07T17:32:25.269300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:25.387549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:32:25.411200Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:32:25.411767Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:32:25.411834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:32:25.727816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:25.727966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:25.823969Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807142362126 != 1767807142362130 2026-01-07T17:32:25.836313Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:25.885134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:26.014179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:32:26.414006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:26.414081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:26.414115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:26.414633Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:26.425948Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:26.753416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:26.830436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.090838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.478795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:27.835260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.733116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.733286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.734236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.734336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:28.773069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:28.991701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.319476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.647859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:29.931554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.262977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.603972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:30.952237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:31.387070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.387250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.387684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.388203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.388392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.394125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:31.575455Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2797:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:31.640094Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:34.772705Z node 1 :RPC_REQUEST WARN: rpc_execute_query.cpp:487: Client lost 2026-01-07T17:32:34.774488Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3170:4452] TxId: 281474976715674. Ctx: { TraceId: 01kecr8xkf6phatmf9mgf4cyw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTI2ZTQ2NTEtZTk5MzI5NTAtY2Y0NTVmNzMt ... 962530384357:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:45.337206Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:45.547664Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:45.585710Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:45.601078Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673962530384252:2081] 1767807165285055 != 1767807165285058 2026-01-07T17:32:45.675854Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:45.675953Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:45.688282Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:45.891565Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:45.929547Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:45.929571Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:45.929578Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:45.929670Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:46.261089Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:46.862122Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:46.869348Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:46.883985Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.992091Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.143464Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.256630Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.313978Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673962530384357:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:50.314073Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:50.317138Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673984005222604:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.317263Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.319595Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673984005222670:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.319730Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.372698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.423860Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.465667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.521161Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.592862Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.675735Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.761744Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.913299Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:51.055339Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673988300190781:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:51.055441Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:51.056136Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673988300190786:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:51.056189Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673988300190787:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:51.056429Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:51.060569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:51.076711Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673988300190790:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:51.151722Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673988300190841:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:54.124132Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestKinesisHttpProxy::MissingAction >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2026-01-07T17:32:50.273305Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/00273a/r3tmp/tmp76YlP0//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-07T17:32:50.274472Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/00273a/r3tmp/tmp76YlP0//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-07T17:32:50.354188Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:32:50.354411Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:32:54.824172Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/00273a/r3tmp/tmpJwViMA//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-07T17:32:54.824874Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/00273a/r3tmp/tmpJwViMA//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-07T17:32:54.853757Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:32:54.853908Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> TestKinesisHttpProxy::DifferentContentTypes |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> KqpResultSetFormats::ArrowFormat_Types_String >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TExportToS3Tests::ShouldSucceedOnManyTables >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TestYmqHttpProxy::TestSendMessage >> TestKinesisHttpProxy::TestPing >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TestYmqHttpProxy::TestGetQueueUrl |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TestYmqHttpProxy::TestCreateQueue >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23711, MsgBus: 61784 2026-01-07T17:32:21.415589Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673859462761945:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:21.416852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:21.624564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:21.624668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:21.648145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:21.691183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:21.721924Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:21.889602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:21.889631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:21.889638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:21.889715Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:21.979329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:22.425007Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:22.427784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:22.437890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:22.507631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.631793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.790380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:22.868892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:24.796610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673872347665645:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.796731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.802785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673872347665655:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:24.802905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.195360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.246202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.291062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.324727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.358609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.397730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.441470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.490189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:25.575562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673876642633828:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.575667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.575807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673876642633834:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.575840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.575871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673876642633833:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:25.579046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:25.589041Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673876642633837:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:32:25.663328Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673876642633888:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:26.415652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673859462761945:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:26.415709Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:27.340661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... pp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:44.785008Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592673958430469569:2081] 1767807164315795 != 1767807164315798 2026-01-07T17:32:44.821721Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:44.823973Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:44.824044Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:44.835937Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:44.909018Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:45.149579Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:45.149604Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:45.149612Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:45.149690Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:45.389103Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:46.092052Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:46.121343Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:46.147478Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.288351Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.520560Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.598779Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.238671Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673979905307923:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.238761Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.238996Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673979905307932:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.239032Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.369454Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592673958430469712:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:49.369555Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:49.386850Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.465625Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.525726Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.582085Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.663211Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.731837Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.823411Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.960824Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:50.175211Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673984200276112:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.175316Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.176461Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673984200276117:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.176529Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592673984200276118:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.176869Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:50.181305Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:50.198172Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592673984200276121:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:50.283887Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592673984200276174:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:53.825144Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:53.971569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV1 [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV2 >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified [GOOD] |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::DropSourceTableBeforeTransferring |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> TExportToS3Tests::RebootDuringCompletion ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2026-01-07T17:32:27.738234Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.738271Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.738336Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.738774Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.753484Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.774855Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.777415Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2026-01-07T17:32:27.782026Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.782362Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.782397Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.782701Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.789145Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.789353Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.789694Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.790170Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.790284Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.797036Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.797129Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:32:27.801618Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.801649Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.801687Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.802038Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.802943Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.803083Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.805134Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.805585Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.807973Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.809004Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.809074Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:32:27.817939Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.817969Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.817990Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.818340Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.819030Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.819188Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.825133Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.825953Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.829530Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.833029Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.833095Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:32:27.834283Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.834309Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.834333Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.849209Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.861291Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.861492Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.865150Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.865967Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.866196Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.873157Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.873232Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-07T17:32:27.878144Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.878172Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.878196Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:27.888230Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:32:27.890653Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:32:27.890860Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.893138Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:32:27.893918Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:27.894089Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:32:27.894165Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:32:27.894202Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:32:27.894259Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2026-01-07T17:32:27.930881Z :ReadSession INFO: Random seed for debugging is 1767807147930843 2026-01-07T17:32:28.281423Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673890131810947:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:28.281660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:28.333015Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:32:28.356171Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673887523635458:2186];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:28.356524Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:28.416308Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:32:28.660756Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:28.687008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:28.879838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:28.879955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:28.881151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:28.881213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:29.029859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:29.085385Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:32:29.139481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:29.229387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:29.234589Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:29.248906Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:29.280578Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:29.389032Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:29.565897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038bd/r3tmp/yandexOjKLyy.tmp 2026-01-07T17:32:29.565929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to in ... vents: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.175998Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:32:58.277292Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:32:58.277318Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.277329Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:32:58.277344Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.277354Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:32:58.377894Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:32:58.377924Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.377935Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:32:58.377950Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.377960Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:32:58.479970Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:32:58.480004Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.480016Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:32:58.480034Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.480046Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:32:58.529380Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_1_1_15472707017107325453_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2026-01-07T17:32:58.580232Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:32:58.580261Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.580272Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:32:58.580290Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.580303Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:32:58.617395Z :INFO: [/Root] [/Root] [2c32ad8a-964bfa14-fe20fdc9-3a539c48] Closing read session. Close timeout: 0.000000s 2026-01-07T17:32:58.617482Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2026-01-07T17:32:58.617527Z :INFO: [/Root] [/Root] [2c32ad8a-964bfa14-fe20fdc9-3a539c48] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16583 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:32:58.617637Z :NOTICE: [/Root] [/Root] [2c32ad8a-964bfa14-fe20fdc9-3a539c48] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:32:58.617681Z :DEBUG: [/Root] [/Root] [2c32ad8a-964bfa14-fe20fdc9-3a539c48] [dc1] Abort session to cluster 2026-01-07T17:32:58.618217Z :NOTICE: [/Root] [/Root] [2c32ad8a-964bfa14-fe20fdc9-3a539c48] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:32:58.622074Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_15472707017107325453_v1 grpc read done: success# 0, data# { } 2026-01-07T17:32:58.622118Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_15472707017107325453_v1 grpc read failed 2026-01-07T17:32:58.622151Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_15472707017107325453_v1 grpc closed 2026-01-07T17:32:58.622200Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_15472707017107325453_v1 is DEAD 2026-01-07T17:32:58.624329Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_15472707017107325453_v1 2026-01-07T17:32:58.624381Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7592673950261355473:2488] destroyed 2026-01-07T17:32:58.624438Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_15472707017107325453_v1 2026-01-07T17:32:58.625038Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7592673950261355464:2481] disconnected. 2026-01-07T17:32:58.625086Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7592673950261355464:2481] disconnected; active server actors: 1 2026-01-07T17:32:58.625108Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7592673950261355464:2481] client user disconnected session shared/user_1_1_15472707017107325453_v1 2026-01-07T17:32:58.680767Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:32:58.680795Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.680807Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:32:58.680826Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.680837Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:32:58.783952Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:32:58.783987Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.784001Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:32:58.784020Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.784035Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:32:58.889058Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:32:58.889089Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.889100Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:32:58.889118Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:32:58.889127Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:33:00.675302Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:33:00.675330Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:33:00.675361Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:33:00.681159Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:33:00.690560Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:33:00.691055Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:33:00.691311Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:33:00.692232Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:33:00.692676Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:33:00.693089Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-07T17:33:00.693185Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:33:00.693256Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:33:00.693301Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-07T17:33:00.697051Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:33:00.697104Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:33:02.701185Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:33:02.701241Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:33:02.701299Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:33:02.722921Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:33:02.723374Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:33:02.732167Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:33:02.733157Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:33:02.733337Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:33:02.733405Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:33:02.733493Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath [GOOD] >> TSchemeShardExportToFsTests::ShouldExportDataAndSchemaToFs |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-07T17:31:21.547810Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2026-01-07T17:31:21.547872Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:3:2050] Successful handshake: owner# 800, generation# 1 2026-01-07T17:31:21.548041Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2026-01-07T17:31:21.548069Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:6:2053] Successful handshake: owner# 800, generation# 1 2026-01-07T17:31:21.548159Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2026-01-07T17:31:21.548196Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:3:2050] Commit generation: owner# 800, generation# 1 2026-01-07T17:31:21.548311Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2026-01-07T17:31:21.548336Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:6:2053] Commit generation: owner# 800, generation# 1 2026-01-07T17:31:21.548491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:21.548915Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2069] 2026-01-07T17:31:21.549080Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:3:2050] Upsert description: path# /root/tenant 2026-01-07T17:31:21.549195Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:3:2050] Subscribe: subscriber# [1:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:21.549367Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:44:2069] 2026-01-07T17:31:21.549397Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# /root/tenant 2026-01-07T17:31:21.549430Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:21.549539Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:45:2069] 2026-01-07T17:31:21.549565Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:9:2056] Upsert description: path# /root/tenant 2026-01-07T17:31:21.549598Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:9:2056] Subscribe: subscriber# [1:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:21.549666Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2026-01-07T17:31:21.549724Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2069] 2026-01-07T17:31:21.549780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2026-01-07T17:31:21.549818Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:44:2069] 2026-01-07T17:31:21.549856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:45:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2026-01-07T17:31:21.549916Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:45:2069] 2026-01-07T17:31:21.550025Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2069] 2026-01-07T17:31:21.550117Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:41:2069] 2026-01-07T17:31:21.550183Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:39:2069][/root/tenant] Set up state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:31:21.550242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:42:2069] 2026-01-07T17:31:21.550281Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:39:2069][/root/tenant] Ignore empty state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2026-01-07T17:31:21.550515Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:36:2066], cookie# 0, event size# 103 2026-01-07T17:31:21.550558Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-07T17:31:21.550705Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-07T17:31:21.551169Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2026-01-07T17:31:21.551228Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:43:2069] 2026-01-07T17:31:21.551282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:40:2069] 2026-01-07T17:31:21.551351Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:39:2069][/root/tenant] Update to strong state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2026-01-07T17:31:22.009597Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2026-01-07T17:31:22.009678Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:3:2050] Successful handshake: owner# 800, generation# 1 2026-01-07T17:31:22.009864Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2026-01-07T17:31:22.009895Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:3:2050] Commit generation: owner# 800, generation# 1 2026-01-07T17:31:22.009930Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2026-01-07T17:31:22.009953Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:6:2053] Successful handshake: owner# 900, generation# 1 2026-01-07T17:31:22.010115Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2026-01-07T17:31:22.010141Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:6:2053] Commit generation: owner# 900, generation# 1 2026-01-07T17:31:22.010235Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:31:22.010621Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2069] 2026-01-07T17:31:22.010650Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:3:2050] Upsert description: path# /root/tenant 2026-01-07T17:31:22.010741Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:3:2050] Subscribe: subscriber# [3:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:22.010884Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:44:2069] 2026-01-07T17:31:22.010916Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:6:2053] Upsert description: path# /root/tenant 2026-01-07T17:31:22.010967Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:6:2053] Subscribe: subscriber# [3:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:22.011084Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:45:2069] 2026-01-07T17:31:22.011104Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:9:2056] Upsert description: path# /root/tenant 2026-01-07T17:31:22.011136Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:9:2056] Subscribe: subscriber# [3:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:31:22.011204Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2026-01-07T17:31:22.011251Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2069] 2026-01-07T17:31:22.011304Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2026-01-07 ... 94: [399:3:2050] Commit generation: owner# 910, generation# 1 2026-01-07T17:33:03.441682Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2026-01-07T17:33:03.441716Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [399:6:2053] Successful handshake: owner# 910, generation# 1 2026-01-07T17:33:03.441912Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2026-01-07T17:33:03.441948Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [399:6:2053] Commit generation: owner# 910, generation# 1 2026-01-07T17:33:03.442080Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][399:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[399:24339059:0], [399:1099535966835:0], [399:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:33:03.442509Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2069] 2026-01-07T17:33:03.442551Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2026-01-07T17:33:03.442636Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:3:2050] Subscribe: subscriber# [399:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:33:03.442788Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:44:2069] 2026-01-07T17:33:03.442815Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2026-01-07T17:33:03.442857Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:6:2053] Subscribe: subscriber# [399:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:33:03.442993Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:45:2069] 2026-01-07T17:33:03.443020Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2026-01-07T17:33:03.443063Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:9:2056] Subscribe: subscriber# [399:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:33:03.443135Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2026-01-07T17:33:03.443193Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2069] 2026-01-07T17:33:03.443247Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2026-01-07T17:33:03.443290Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:44:2069] 2026-01-07T17:33:03.443334Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2026-01-07T17:33:03.443394Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:45:2069] 2026-01-07T17:33:03.443478Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2069] 2026-01-07T17:33:03.443554Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:41:2069] 2026-01-07T17:33:03.443607Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][399:39:2069][/Root/Tenant/table_inside] Set up state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:33:03.443674Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:42:2069] 2026-01-07T17:33:03.443719Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][399:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2026-01-07T17:33:03.728613Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2026-01-07T17:33:03.728666Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:3:2050] Successful handshake: owner# 910, generation# 1 2026-01-07T17:33:03.728758Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2026-01-07T17:33:03.728782Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:3:2050] Commit generation: owner# 910, generation# 1 2026-01-07T17:33:03.728817Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2026-01-07T17:33:03.728843Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:6:2053] Successful handshake: owner# 910, generation# 1 2026-01-07T17:33:03.729008Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2026-01-07T17:33:03.729041Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:6:2053] Commit generation: owner# 910, generation# 1 2026-01-07T17:33:03.729161Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][401:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[401:24339059:0], [401:1099535966835:0], [401:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:33:03.729515Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:43:2069] 2026-01-07T17:33:03.729541Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:3:2050] Upsert description: path# /Root/Tenant/table_inside 2026-01-07T17:33:03.729602Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:3:2050] Subscribe: subscriber# [401:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:33:03.729702Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:44:2069] 2026-01-07T17:33:03.729720Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:6:2053] Upsert description: path# /Root/Tenant/table_inside 2026-01-07T17:33:03.729744Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:6:2053] Subscribe: subscriber# [401:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:33:03.729828Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:45:2069] 2026-01-07T17:33:03.729854Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:9:2056] Upsert description: path# /Root/Tenant/table_inside 2026-01-07T17:33:03.729879Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:9:2056] Subscribe: subscriber# [401:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-07T17:33:03.729922Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:3:2050] 2026-01-07T17:33:03.729959Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:43:2069] 2026-01-07T17:33:03.729995Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:6:2053] 2026-01-07T17:33:03.730025Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:44:2069] 2026-01-07T17:33:03.730056Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:9:2056] 2026-01-07T17:33:03.730084Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:45:2069] 2026-01-07T17:33:03.730134Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:40:2069] 2026-01-07T17:33:03.730182Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:41:2069] 2026-01-07T17:33:03.730220Z node 401 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][401:39:2069][/Root/Tenant/table_inside] Set up state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:33:03.730262Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:42:2069] 2026-01-07T17:33:03.730293Z node 401 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][401:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_String [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Time >> TExportToS3Tests::ShouldRestartOnScanErrors >> TExportToS3Tests::ShouldSucceedOnSingleShardTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2026-01-07T17:25:34.042676Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-07T17:25:40.546297Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:44.320895Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:46.945414Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:48.579294Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:49.462605Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:50.042952Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:50.877705Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:51.834297Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-07T17:25:52.442370Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:53.977736Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:25:57.626839Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:26:06.594451Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:26:15.361526Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:26:25.582823Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:26:36.277393Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:26:38.613963Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:26:43.899092Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:26:47.576741Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:26:59.896290Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:27:16.002389Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:27:27.931628Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:27:32.024353Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:27:37.861977Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:35.061974Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-07T17:29:36.139418Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:38.037406Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:39.475339Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:40.580989Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:41.743837Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:44.359262Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:46.539135Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:48.577758Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-07T17:29:49.911268Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:51.079989Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:52.758094Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:53.747668Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:54.863718Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:29:56.189663Z node 86 :YQ_CONTROL_PLANE_STORAG ... L_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:30:01.383936Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:30:02.169823Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:30:02.991945Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:30:04.878839Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:30:05.710419Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:30:06.490296Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:27.614024Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-07T17:31:31.854873Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:32.973768Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:34.193195Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:35.573412Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:36.754586Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:43.706155Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:49.990380Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:51.789329Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:53.045563Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:54.381950Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:58.693866Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:31:59.900051Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:02.360097Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:03.804703Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:06.091376Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:07.207905Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:08.253533Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:10.435892Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-07T17:32:11.673275Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:12.806147Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:13.877730Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:14.932188Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:15.965956Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:17.089702Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:18.168532Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:19.283906Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:20.371452Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:21.501454Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:22.683445Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-07T17:32:23.925983Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/control_plane_proxy/ut/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CheckItemProgress >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> TExportToS3Tests::ExportStartTime >> ExternalBlobsMultipleChannels::ChangeExternalCount >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TExportToS3Tests::RebootDuringCompletion [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TSchemeShardExportToFsTests::ShouldExportDataAndSchemaToFs [GOOD] >> TSchemeShardExportToFsTests::ShouldExportMultipleTablesWithData >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> ExternalBlobsMultipleChannels::Simple >> TExportToS3Tests::RebootDuringAbortion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2026-01-07T17:32:59.019596Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002729/r3tmp/tmpTy7v0l//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-07T17:32:59.059878Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:33:01.755996Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002729/r3tmp/tmpTy7v0l//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-07T17:33:01.757535Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:33:02.558860Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002729/r3tmp/tmpTy7v0l//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-07T17:33:02.560567Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:33:04.089053Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002729/r3tmp/tmpTy7v0l//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-07T17:33:04.137333Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:33:05.429261Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002729/r3tmp/tmpTy7v0l//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-07T17:33:05.435861Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> ExternalBlobsMultipleChannels::SingleChannel >> TestKinesisHttpProxy::GoodRequestPutRecords >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> ExternalBlobsMultipleChannels::WithCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2026-01-07T17:32:57.696511Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002738/r3tmp/tmpj29pUz//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-07T17:32:57.696816Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:32:59.256299Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002738/r3tmp/tmpj29pUz//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-07T17:32:59.303548Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:33:01.668455Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002738/r3tmp/tmpj29pUz//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-07T17:33:01.795094Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:33:03.257586Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002738/r3tmp/tmpj29pUz//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 4 2026-01-07T17:33:03.332330Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 4 VDISK[0:_:0:3:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:33:04.569522Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002738/r3tmp/tmpj29pUz//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 5 2026-01-07T17:33:04.577280Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 5 VDISK[0:_:0:4:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:33:05.725070Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/002738/r3tmp/tmpj29pUz//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 6 2026-01-07T17:33:05.747464Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 6 VDISK[0:_:0:5:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TestKinesisHttpProxy::TestPing [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> TestKinesisHttpProxy::TestRequestBadJson >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement >> TExportToS3Tests::ReplicationExportWithStaticCredentials >> TestYmqHttpProxy::TestCreateQueue [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> TestYmqHttpProxy::TestReceiveMessage >> TSchemeShardExportToFsTests::ShouldExportMultipleTablesWithData [GOOD] >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TExportToS3Tests::ExportIndexTablePartitioningSettings |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> TExportToS3Tests::TablePermissions >> TExportToS3Tests::ReplicationExportWithStaticCredentials [GOOD] >> TExportToS3Tests::ReplicationExportWithOAuthCredentials >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Time [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 >> TExportToS3Tests::Checksums >> KqpPg::LongDomainName [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> TExportToS3Tests::TablePermissions [GOOD] >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TExportToS3Tests::TopicExport >> TExportToS3Tests::ReplicationExportWithOAuthCredentials [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs [GOOD] >> TExportToS3Tests::ReplicationExportMultipleItems >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:10.776874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:10.776959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:10.776992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:10.777024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:10.777066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:10.777102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:10.777177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:10.777233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:10.778016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:10.778321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:10.880740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:10.880804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:10.896102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:10.896372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:10.896528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:10.902749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:10.903032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:10.903562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:10.903765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:10.905781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:10.905978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:10.907112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:10.907176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:10.907284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:10.907328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:10.907356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:10.907473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:11.060236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.061991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.062052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.062127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:11.062232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:33:11.970814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:33:11.971346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:11.971404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:33:11.972871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:11.973000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:11.973078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:33:11.973114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:33:11.973157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:11.974612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:11.974692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:11.974719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:33:11.974744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:33:11.974774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:11.974838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-07T17:33:11.975923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1389 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:11.975964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:11.976079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1389 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:11.976195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1389 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-07T17:33:11.977832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:11.977875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:11.977997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:11.978053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:11.978140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:11.978208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:11.978247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:11.978283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:11.978329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:33:11.979335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:11.979397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:11.981103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:11.981222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:11.981344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:11.981392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:11.981485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:11.981522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:11.981568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:11.981609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:11.981656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:33:11.981714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 101 2026-01-07T17:33:11.981757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:11.981797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:11.981826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:11.981953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:11.983814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:11.983854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:743:2720] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:33:11.986779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:11.987021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:11.987425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2026-01-07T17:33:11.989535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:11.989740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:33:09.914696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:33:10.029247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:33:10.055162Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:33:10.055776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:33:10.055847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:33:10.356032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:10.356158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:10.427370Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807186967539 != 1767807186967543 2026-01-07T17:33:10.447204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:10.494515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:10.590514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:33:10.924150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:10.937670Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:11.053871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:11.398715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:33:11.612385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 101:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 8857, MsgBus: 23648 2026-01-07T17:30:35.849476Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673404171750523:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:35.849543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:36.573353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:36.610048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:36.610181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:36.821528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:36.844847Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:36.870351Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:36.897230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:37.207981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:37.208126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:37.208135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:37.208211Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:38.028698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:40.843212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673404171750523:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:40.843300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:41.075260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673429941554950:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.076414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.077319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673429941554962:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.077394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673429941554963:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.077438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:41.082206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:41.100152Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673429941554966:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:30:41.259644Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673429941555017:2541] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9281, MsgBus: 14898 2026-01-07T17:30:42.670269Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673432834298100:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:42.793086Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:42.809264Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:42.936830Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:42.936908Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:42.940546Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:42.944898Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:42.948246Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673432834298039:2081] 1767807042630496 != 1767807042630499 2026-01-07T17:30:42.993038Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:43.297587Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:43.297608Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:43.297615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:43.297734Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:43.311444Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:43.749334Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:44.026571Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:44.037771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:30:47.667829Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673432834298100:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:47.667917Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:47.820283Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673454309135408:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:47.820404Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:47.821641Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673454309135420:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:47.821702Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673454309135421:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:47.821985Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:47.825830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:47.871532Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673454309135424:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:30:47.940096Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673454309135475:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResou ... AD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:57.694765Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592674011527914258:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:57.694865Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:57.706112Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:57.732879Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7592674011527914257:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:32:57.800603Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7592674011527914310:2544] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:57.890278Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 62165, MsgBus: 24798 2026-01-07T17:33:03.602356Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7592674038585148510:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:03.602449Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:03.639129Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-01-07T17:33:03.794330Z node 11 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [11:7592674038585148478:2081] 1767807183600810 != 1767807183600813 2026-01-07T17:33:03.795020Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:03.827448Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:03.827587Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:03.846124Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:03.897675Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-01-07T17:33:03.969274Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:03.969300Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:03.969312Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:03.969429Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:04.616764Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:05.027071Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:08.603883Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592674038585148510:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:08.604005Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:09.886176Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592674064354953143:2327], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.886384Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.886918Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592674064354953162:2330], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.886987Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592674064354953163:2331], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.887062Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.892498Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:09.910767Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7592674064354953166:2332], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:33:09.988422Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7592674064354953217:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:10.030656Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TExportToS3Tests::Checksums [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TExportToS3Tests::ChecksumsWithCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:10.576383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:10.576491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:10.576540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:10.576576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:10.576614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:10.576642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:10.576726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:10.576797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:10.577667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:10.577965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:10.668819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:10.668883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:10.684769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:10.685190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:10.685396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:10.692163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:10.692551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:10.693351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:10.693678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:10.696447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:10.696666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:10.697936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:10.698018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:10.698148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:10.698198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:10.698240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:10.698416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:10.859093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.860919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.861374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.861439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.861538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.861627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... CHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:12.440533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:12.440578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-07T17:33:12.440621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-07T17:33:12.442506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-07T17:33:12.442562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-07T17:33:12.442652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-07T17:33:12.442681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:33:12.442712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-07T17:33:12.442733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:33:12.442762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-07T17:33:12.442809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:127:2151] message: TxId: 281474976710760 2026-01-07T17:33:12.442846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:33:12.442874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-07T17:33:12.442916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-07T17:33:12.442985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-07T17:33:12.445399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-07T17:33:12.445472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-07T17:33:12.445565Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-07T17:33:12.445676Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:792:2761], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-07T17:33:12.447832Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-07T17:33:12.447994Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:792:2761], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:33:12.448064Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:33:12.453590Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-07T17:33:12.453755Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:792:2761], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:33:12.453812Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-07T17:33:12.453950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:12.454005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:881:2839] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:12.454547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:12.454931Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 360us result status StatusSuccess 2026-01-07T17:33:12.455489Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TExportToS3Tests::TopicExport [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TExportToS3Tests::ReplicationExportMultipleItems [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::TopicWithPermissionsExport >> TExportToS3Tests::ReplicationExportGlobalConsistency >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink >> KeyValueGRPCService::SimpleWriteListRangeV2 [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV1 >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TestKinesisHttpProxy::TestRequestWithIAM >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TExportToS3Tests::CorruptedDyNumber >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:59.850487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:59.850573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:59.850616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:59.850667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:59.850735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:59.850808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:59.850882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:59.850954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:59.851853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:59.852161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:59.956777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:59.956851Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:59.987439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:59.987821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:59.988009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:59.995795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:59.996152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:59.996944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:59.997198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:59.999978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:00.000180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:00.001604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:00.001674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:00.001789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:00.001846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:00.001897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:00.002112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:00.166447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.167565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.167695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.167787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.167963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:00.168883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:12.141892Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.141973Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.142007Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:12.142039Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-07T17:33:12.142075Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:33:12.142153Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-07T17:33:12.144330Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:12.144431Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-07T17:33:12.144499Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:12.146071Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2026-01-07T17:33:12.146183Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:33:12.146518Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2026-01-07T17:33:12.147151Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:12.147260Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 25769805935 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:12.147329Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2026-01-07T17:33:12.147457Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2026-01-07T17:33:12.147538Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-07T17:33:12.147583Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-07T17:33:12.147637Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-07T17:33:12.147679Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-07T17:33:12.147732Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:12.147811Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:33:12.147852Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2026-01-07T17:33:12.147904Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-07T17:33:12.147950Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-01-07T17:33:12.147990Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-07T17:33:12.148082Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:33:12.148122Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-07T17:33:12.148173Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-07T17:33:12.148216Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-07T17:33:12.148848Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.150356Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:12.150397Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:12.150564Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:33:12.150689Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:12.150728Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:212:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-07T17:33:12.150773Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:212:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-07T17:33:12.151618Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.151710Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.151757Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:12.151807Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:33:12.151867Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:12.152750Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.152836Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.152869Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:12.152903Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:33:12.152953Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:33:12.153042Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-07T17:33:12.153097Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:128:2152] 2026-01-07T17:33:12.155159Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.156265Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:12.156374Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-07T17:33:12.156429Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-07T17:33:12.158103Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-07T17:33:12.158165Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [6:931:2888] TestWaitNotification: OK eventTxId 112 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2026-01-07T17:33:10.678930Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/00269a/r3tmp/tmpHLUJfU//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-07T17:33:10.712571Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream >> TExportToS3Tests::EncryptedExport >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpScanArrowFormat::AggregateCountStar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:12.308533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:12.308645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:12.308703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:12.308752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:12.308793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:12.308834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:12.309221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:12.309343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:12.310331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:12.310717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:12.406922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:12.406993Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:12.426141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:12.426474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:12.426681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:12.438395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:12.438736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:12.439529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:12.439751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:12.444198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:12.444420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:12.445682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:12.445756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:12.445926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:12.445981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:12.446031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:12.446207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:12.608904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.610946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.611051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.611132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.611206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.611285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:12.611392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:13.586321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:33:13.586585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:13.586652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:33:13.586704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:33:13.586743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:33:13.588760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:13.588844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:33:13.588890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:33:13.590830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:13.590884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:13.590939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:13.590994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:33:13.591169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:33:13.592921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:33:13.593095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:33:13.593437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:13.593561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:13.593612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:13.593878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:33:13.593936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:13.594130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:13.594210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:13.596236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:13.596298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:13.596523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:13.596574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:33:13.596961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:13.597027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:33:13.597145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:13.597189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:13.597231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:13.597281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:13.597323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:33:13.597368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:13.597409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:33:13.597445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:33:13.597513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:13.597551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:33:13.597618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:33:13.598284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:13.598397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:13.598457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:33:13.598511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:33:13.598556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:13.598641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:33:13.601877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:33:13.602487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:33:13.603880Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2665] Bootstrap 2026-01-07T17:33:13.605004Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:33:13.607967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:13.608360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:13.608489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2026-01-07T17:33:13.609002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2026-01-07T17:33:13.610257Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:33:13.613158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:13.613412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-07T17:33:13.613976Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TExportToS3Tests::TopicWithPermissionsExport [GOOD] >> TExportToS3Tests::ChecksumsWithCompression [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] >> TExportToS3Tests::ReplicationExportGlobalConsistency [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TExportToS3Tests::Changefeeds >> TExportToS3Tests::TopicsExport >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TExportToS3Tests::ReplicatedTableExport |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TExportToS3Tests::ReplicatedTableExport [GOOD] >> TSchemeShardTTLTests::ShouldSkipDroppedColumn |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |84.6%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest |84.5%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TExportToS3Tests::TopicsExport [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TExportToS3Tests::EncryptedExport [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> TExportToS3Tests::TopicsWithPermissionsExport >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TExportToS3Tests::ExportTableWithUniqueIndex >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TExportToS3Tests::Changefeeds [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:15.814373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:15.814465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:15.814507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:15.814539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:15.814573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:15.814608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:15.814697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:15.814765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:15.815570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:15.815834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:15.900380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:15.900439Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:15.915060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:15.915353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:15.915491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:15.921899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:15.922248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:15.922982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:15.923249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:15.925835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:15.926047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:15.927158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:15.927236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:15.927380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:15.927420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:15.927460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:15.927620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:16.067171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.068942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.069021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.069102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.069177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.069276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 3Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:17.378012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:17.378078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-07T17:33:17.378120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-07T17:33:17.380270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-07T17:33:17.380324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-07T17:33:17.380417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-07T17:33:17.380455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:33:17.380491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-07T17:33:17.380518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:33:17.380560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-07T17:33:17.380617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:127:2151] message: TxId: 281474976710760 2026-01-07T17:33:17.380657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:33:17.380687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-07T17:33:17.380713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-07T17:33:17.380774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-07T17:33:17.382421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-07T17:33:17.382499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-07T17:33:17.382552Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-07T17:33:17.382656Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:792:2761], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-07T17:33:17.384042Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-07T17:33:17.384170Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:792:2761], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:33:17.384223Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:33:17.385660Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-07T17:33:17.386062Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:792:2761], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:33:17.386194Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-07T17:33:17.386365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:17.386409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:881:2839] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:17.386920Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:17.387168Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 261us result status StatusSuccess 2026-01-07T17:33:17.387725Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:16.243044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:16.243114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:16.243146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:16.243172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:16.243209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:16.243234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:16.243314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:16.243362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:16.244038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:16.244249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:16.331679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:16.331739Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:16.348781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:16.349241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:16.349450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:16.357334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:16.357745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:16.358581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:16.358872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:16.370613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:16.370857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:16.372131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:16.372220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:16.372350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:16.372405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:16.372451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:16.372663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:16.529337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.530402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.530550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.530636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.530718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.530779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.530845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.530907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.530998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.531080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.531179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.531281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.531377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.531462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:16.531557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 46678944, txId: 102, path id: 38 2026-01-07T17:33:17.546727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:17.546768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:33:17.547274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:17.547377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:17.547413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:33:17.547461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:33:17.547497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:17.547575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:33:17.550660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:17.563975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1198 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:17.564036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:17.564195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1198 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:17.564329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1198 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:33:17.565213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:17.565277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:17.565411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:17.565466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:17.565570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 714 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:17.565643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:17.565685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:17.565721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:17.565777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:33:17.568554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:17.569089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:17.569391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:17.569447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:33:17.569563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:17.569596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:17.569631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:17.569672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:17.569707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:33:17.569776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 102 2026-01-07T17:33:17.569828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:17.569863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:33:17.569891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:33:17.570045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:17.575149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:17.575206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:801:2770] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:17.575794Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:17.576118Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 248us result status StatusSuccess 2026-01-07T17:33:17.576627Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ReplicatedTableExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:04.967397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:04.967485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:04.967522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:04.967558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:04.967603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:04.967646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:04.967705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:04.967772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:04.968611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:04.968904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:05.059874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:05.059929Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:05.075203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:05.075556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:05.075757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:05.081845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:05.082190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:05.082942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:05.083201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:05.086047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:05.086225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:05.087464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:05.087536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:05.087643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:05.087693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:05.087736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:05.087896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:05.229835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.230805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.230921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:05.231938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 4046678944, LocalPathId: 1], version: 8 2026-01-07T17:33:16.743437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:16.744305Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:16.744367Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:16.744389Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:33:16.744418Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:33:16.744455Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:16.744537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-07T17:33:16.745467Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1261 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:16.745517Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:16.745646Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1261 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:16.745813Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1261 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:33:16.750484Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 723 RawX2: 30064773780 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:16.750546Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:16.750702Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 723 RawX2: 30064773780 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:16.750769Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:16.750866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 723 RawX2: 30064773780 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:16.750937Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:16.750978Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:16.751021Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:16.751064Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:33:16.757542Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:16.757737Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:16.757943Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:16.758094Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:16.758424Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:16.758478Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:16.758624Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:16.758668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:16.758719Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:16.758768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:16.758815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:33:16.758891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:751:2728] message: TxId: 101 2026-01-07T17:33:16.758956Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:16.759010Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:16.759054Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:16.759213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:16.761013Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:16.761077Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [7:752:2729] TestWaitNotification: OK eventTxId 101 2026-01-07T17:33:16.761620Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:16.761869Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 282us result status StatusSuccess 2026-01-07T17:33:16.762355Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> KqpStats::SysViewCancelled [GOOD] >> KqpTypes::DyNumberCompare |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:17.983386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:17.983455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:17.983486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:17.983516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:17.983541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:17.983575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:17.983622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:17.983683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:17.984281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:17.984479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:18.064559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:18.064619Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:18.079869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:18.080239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:18.080438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:18.090276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:18.090681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:18.091479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:18.091743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:18.095166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:18.095383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:18.096532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:18.096620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:18.096724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:18.096767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:18.096806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:18.096985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:18.251207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.255441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.255613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.255698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.255779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.255844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.255925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.255992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.256112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.256195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.256288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.256361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.256428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.256500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.256582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:18.853632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:33:18.853747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:33:18.855143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:33:18.855275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:33:18.855580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:18.855689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:18.855736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:18.855983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:33:18.856035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:18.856179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:18.856248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:18.858110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:18.858167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:18.858391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:18.858435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:33:18.858696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:18.858736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:33:18.858824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:18.858852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:18.858881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:18.858939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:18.858983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:33:18.859025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:18.859060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:33:18.859138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:33:18.859194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:18.859222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:33:18.859292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:33:18.859838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:18.859936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:18.859990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:33:18.860021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:33:18.860050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:18.860150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:33:18.862401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:33:18.862787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1767807198.863731 121433 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2026-01-07T17:33:18.864055Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2665] Bootstrap 2026-01-07T17:33:18.864845Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:33:18.867291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:18.867575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:18.867867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2026-01-07T17:33:18.868763Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:33:18.870901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:18.871090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-07T17:33:18.871567Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1767807198.871895 121433 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2026-01-07T17:33:18.874430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:18.874712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:18.874948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2026-01-07T17:33:18.876665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:18.876848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:58.779492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:58.779596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:58.779635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:58.779702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:58.779773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:58.779815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:58.779896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:58.779973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:58.780958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:58.781293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:58.891043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:58.891105Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:58.918402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:58.918826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:58.919013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:58.932111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:58.932511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:58.933568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:58.933871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:58.943187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:58.943445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:58.944797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:58.944869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:58.945019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:58.945086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:58.945140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:58.945357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:59.237234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.239898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.240001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.240106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.240185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.240266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.240354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:17.868183Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-07T17:33:17.868224Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:17.868530Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:17.868585Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:17.868610Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:17.868634Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2026-01-07T17:33:17.868657Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2026-01-07T17:33:17.868698Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-07T17:33:17.868741Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:128:2153] 2026-01-07T17:33:17.872239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:17.872421Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:17.872492Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-07T17:33:17.872541Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-07T17:33:17.872583Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:17.872614Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-07T17:33:17.872641Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2026-01-07T17:33:17.875139Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-07T17:33:17.875276Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:33:17.875332Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1534:3326] TestWaitNotification: OK eventTxId 105 2026-01-07T17:33:17.876592Z node 6 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-07T17:33:17.881836Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:17.881903Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:817: TExport::TTxProgress: Resume: id# 105 2026-01-07T17:33:17.881981Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:598: TExport::TTxProgress: Allocate txId: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-07T17:33:17.882080Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-07T17:33:17.882227Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 105, at schemeshard: 72057594046678944 2026-01-07T17:33:17.882350Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:17.882399Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:938: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 105 2026-01-07T17:33:17.882473Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:590: TExport::TTxProgress: Drop propose: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2026-01-07T17:33:17.882601Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-07T17:33:17.887145Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-105" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2026-01-07T17:33:17.887335Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-105, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2026-01-07T17:33:17.887504Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:17.892022Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:17.892302Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-105 2026-01-07T17:33:17.892465Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-07T17:33:17.892585Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761 2026-01-07T17:33:17.892658Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:17.892740Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:999: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-07T17:33:17.892903Z node 6 :EXPORT TRACE: schemeshard_export__create.cpp:1000: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761 2026-01-07T17:33:17.893053Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:1181: TExport::TTxProgress: Wait for completion: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2026-01-07T17:33:17.895666Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-07T17:33:17.895813Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:17.895940Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-07T17:33:17.896018Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-07T17:33:17.896080Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:17.896123Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-07T17:33:17.896168Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2026-01-07T17:33:17.903199Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 105 2026-01-07T17:33:17.903522Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:33:17.903616Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:33:17.904169Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:33:17.904265Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:33:17.904323Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1754:3542] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:18.303739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:18.303838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:18.303882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:18.303919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:18.303958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:18.303988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:18.304075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:18.304144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:18.305136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:18.305440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:18.401602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:18.401666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:18.418197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:18.418624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:18.418827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:18.425930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:18.426270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:18.426986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:18.427235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:18.430459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:18.430711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:18.431892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:18.431960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:18.432074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:18.432118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:18.432158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:18.432312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:18.577343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.578945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.579041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.579115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.579177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.579249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.579381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7:33:19.133227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:33:19.133253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:33:19.134895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.134939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:33:19.134973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:33:19.136584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.136625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.136661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:19.136710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:33:19.136807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:33:19.138428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:33:19.138588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:33:19.138930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:19.139080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:19.139126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:19.139368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:33:19.139421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:19.139547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:19.139604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:19.141403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:19.141451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:19.141645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:19.141688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:33:19.142008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.142057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:33:19.142145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:19.142179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:19.142220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:19.142261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:19.142296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:33:19.142339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:19.142375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:33:19.142456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:33:19.142523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:19.142563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:33:19.142607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:33:19.143325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:19.143429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:19.143477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:33:19.143516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:33:19.143552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:19.143635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:33:19.146868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:33:19.147521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1767807199.148550 121567 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2026-01-07T17:33:19.148958Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2665] Bootstrap 2026-01-07T17:33:19.150022Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:33:19.153086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:19.153491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.153767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2026-01-07T17:33:19.154300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1767807199 seconds (20460 days, 56 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2026-01-07T17:33:19.155694Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:33:19.158649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1767807199 seconds (20460 days, 56 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:19.158918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1767807199 seconds (20460 days, 56 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-07T17:33:19.159604Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] >> TExportToS3Tests::AuditCompletedExport >> ExternalBlobsMultipleChannels::Simple [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:33:10.359553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:33:10.467663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:33:10.483987Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:33:10.484351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:33:10.484386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:33:10.784125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:10.784241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:10.865776Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807187693803 != 1767807187693807 2026-01-07T17:33:10.877535Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:10.923377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:11.008700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:33:11.318832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:11.332706Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:11.435575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:11.742212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:958:2815], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.742338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:968:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.742783Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.743692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:974:2825], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.743963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.748513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:11.875168Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:972:2823], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:33:11.962479Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1030:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TestKinesisHttpProxy::ListShards |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:33:10.508801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:33:10.614278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:33:10.631095Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:33:10.631603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:33:10.631668Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:33:10.925036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:10.925148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:10.999412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807187375632 != 1767807187375636 2026-01-07T17:33:11.014336Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:11.062327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:11.147603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:33:11.453976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:11.469191Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:11.573427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:11.914478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:33:12.135214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.135332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1025:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.135381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.136090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.136211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.139247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:12.277868Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1028:2868], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:33:12.326453Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1086:2907] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:18.732782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:18.732868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:18.732913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:18.732966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:18.733002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:18.733029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:18.733111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:18.733177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:18.734154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:18.734427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:18.819063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:18.819128Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:18.833485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:18.833846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:18.834024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:18.840027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:18.840304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:18.840854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:18.841080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:18.843337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:18.843499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:18.844381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:18.844430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:18.844510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:18.844546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:18.844581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:18.844691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:18.953252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.953971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:18.954752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ode 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:2 129 -> 240 2026-01-07T17:33:19.845604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:19.845639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:19.845740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:19.845791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:19.845862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:19.845975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:19.846007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.846032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:19.846060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:33:19.849067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:19.849331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:19.849390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:19.851576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:19.851675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-07T17:33:19.851730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.851780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-07T17:33:19.851985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.852125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-07T17:33:19.852166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2026-01-07T17:33:19.852254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-07T17:33:19.852279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-07T17:33:19.852346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-07T17:33:19.852370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-07T17:33:19.852403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2026-01-07T17:33:19.852618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.852640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:19.852685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-07T17:33:19.852716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:33:19.852749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-07T17:33:19.852768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:33:19.852790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2026-01-07T17:33:19.852862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:786:2752] message: TxId: 101 2026-01-07T17:33:19.852922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:33:19.852979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:19.853030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:19.853178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:19.853222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-07T17:33:19.853244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-07T17:33:19.853276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:33:19.853300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-07T17:33:19.853316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-07T17:33:19.853379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:33:19.855510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:19.855545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:787:2753] TestWaitNotification: OK eventTxId 101 2026-01-07T17:33:19.856068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:19.856329Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 277us result status StatusSuccess 2026-01-07T17:33:19.856804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTestsWithReboots::AlterTable >> TestKinesisHttpProxy::CreateDeleteStream |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:04.560015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:04.560097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:04.560134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:04.560171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:04.560204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:04.560246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:04.560296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:04.560349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:04.561126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:04.561362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:04.658353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:04.658412Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:04.681020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:04.681413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:04.681576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:04.690371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:04.690743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:04.691505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:04.691793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:04.697659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:04.697859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:04.699084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:04.699161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:04.699273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:04.699315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:04.699372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:04.699569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:04.883847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.884899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.885909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.886006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.886085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.886182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.886261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-07T17:33:19.152600Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-01-07T17:33:19.152644Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-07T17:33:19.152720Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-07T17:33:19.152769Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-07T17:33:19.152820Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2026-01-07T17:33:19.152872Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-07T17:33:19.153642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:19.155152Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:19.155199Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:19.155415Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:33:19.155550Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:19.155585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-07T17:33:19.155624Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 41 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-07T17:33:19.156468Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:19.156547Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:19.156586Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:19.156656Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-07T17:33:19.156717Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:19.157654Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:19.157738Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:19.157769Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:19.157802Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-07T17:33:19.157837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:33:19.157914Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-07T17:33:19.157990Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:130:2154] 2026-01-07T17:33:19.160105Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:19.160475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:19.160553Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-07T17:33:19.160602Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-07T17:33:19.162183Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:19.162265Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:944:2891] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:19.162806Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/ByValue" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:33:19.163095Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/ByValue" took 321us result status StatusSuccess 2026-01-07T17:33:19.163995Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/ByValue" PathDescription { Self { Name: "ByValue" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ByValue" LocalPathId: 39 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:06.107324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:06.107430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:06.107488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:06.107604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:06.107653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:06.107693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:06.107760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:06.107832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:06.108694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:06.108980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:06.197009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:06.197074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:06.210430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:06.215163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:06.215353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:06.221855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:06.222137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:06.222871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:06.223110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:06.228519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:06.228686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:06.229707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:06.229757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:06.229884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:06.229919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:06.229963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:06.230178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:06.378264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.379356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.379509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.379581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.379671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.379778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.379844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.379948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.380031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.380144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.380249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.380334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.380397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.380455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:06.380521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6-01-07T17:33:19.120263Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2026-01-07T17:33:19.120286Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-07T17:33:19.120365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-01-07T17:33:19.123088Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-07T17:33:19.123120Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-01-07T17:33:19.123147Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-07T17:33:19.123638Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-01-07T17:33:19.123765Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000044 FAKE_COORDINATOR: advance: minStep5000044 State->FrontStep: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000044 2026-01-07T17:33:19.124041Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000044, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:19.124126Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773230 } } Step: 5000044 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:19.124156Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000044, at schemeshard: 72057594046678944 2026-01-07T17:33:19.124232Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-07T17:33:19.124282Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-07T17:33:19.124313Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-07T17:33:19.124339Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-07T17:33:19.124356Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-07T17:33:19.124406Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2026-01-07T17:33:19.124449Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-07T17:33:19.124475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-01-07T17:33:19.124512Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-07T17:33:19.124535Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-01-07T17:33:19.124559Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710758:0 2026-01-07T17:33:19.124600Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-07T17:33:19.124631Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-01-07T17:33:19.124662Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 20 2026-01-07T17:33:19.124683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 43], 18446744073709551615 2026-01-07T17:33:19.125211Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:19.125303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:19.126587Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:19.126622Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:19.126791Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-07T17:33:19.126900Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:19.126939Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-01-07T17:33:19.126974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 43 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-01-07T17:33:19.127542Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 20 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:19.127605Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 20 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:19.127628Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-07T17:33:19.127656Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 20 2026-01-07T17:33:19.127690Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 8 2026-01-07T17:33:19.127972Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:19.128048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:19.128068Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-07T17:33:19.128092Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-07T17:33:19.128114Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-07T17:33:19.128169Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-01-07T17:33:19.128191Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:130:2154] 2026-01-07T17:33:19.128491Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:33:19.128516Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-07T17:33:19.128575Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2026-01-07T17:33:19.130317Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:19.131367Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:19.131456Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-01-07T17:33:19.131491Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710758 2026-01-07T17:33:19.131771Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:33:19.132818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:33:19.132849Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:1497:3319] TestWaitNotification: OK eventTxId 106 >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TestKinesisHttpProxy::GoodRequestGetRecords >> TestYmqHttpProxy::TestSetQueueAttributes >> TSchemeShardTTLTests::TtlTiersValidation >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:33:10.534793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:33:10.652119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:33:10.672695Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:33:10.673198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:33:10.673257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:33:11.003261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:11.003409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:11.096504Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807187560310 != 1767807187560314 2026-01-07T17:33:11.106067Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:11.152150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:11.257619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:33:11.553280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:11.567340Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:11.672693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:11.967562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.967710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:971:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.967790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.968792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:976:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.969048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:11.973387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:12.111058Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:33:12.177468Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23387, MsgBus: 11023 2026-01-07T17:30:29.779642Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673377597350830:2227];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:29.779745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:29.823555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:30:30.375823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:30.375924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:30.488154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:30.527617Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:30.539202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:30.749355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:30.773443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:30.773467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:30.773473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:30.773545Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:30.782189Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:31.698493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:31.712574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2026-01-07T17:30:34.739478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:34.780105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673377597350830:2227];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:34.792330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:34.954785Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T17:30:34.967529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.037659Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T17:30:35.076557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673403367155476:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.076654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.077372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673403367155488:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.077423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673403367155489:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.077735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:35.082005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:35.103320Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673403367155492:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2026-01-07T17:30:35.217731Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673403367155543:2664] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } f f t t 18 2026-01-07T17:30:35.739927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.825439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:35.899524Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T17:30:35.901428Z node 1 :TX_DATASHARD ERROR: read_table_scan.cpp:460: TReadTableScan: undelivered event TxId: 281474976710670 2026-01-07T17:30:35.901484Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:245: Prepare transaction failed. txid 281474976710670 at tablet 72075186224037891 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710670] at 72075186224037891 while waiting for scan finish) | 2026-01-07T17:30:35.903753Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710670 at tablet 72075186224037891 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710670] at 72075186224037891 while waiting for scan finish) | 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2026-01-07T17:30:36.331606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:36.415936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:36.497687Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2026-01-07T17:30:36.907031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:36.978971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:37.031457Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2026-01-07T17:30:37.474640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:37.543518Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T17:30:37.548972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:37.615609Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2026-01-07T17:30:38.309349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__opera ... 942422Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:03.093810Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:03.093836Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:03.093847Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:03.093955Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:03.098970Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:03.681778Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:04.206667Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:04.216249Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:33:07.658693Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592674033982456581:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:07.658805Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:09.612323Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592674064047228495:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.612484Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.612950Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592674064047228507:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.613028Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592674064047228508:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.613552Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:09.620708Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:09.643416Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7592674064047228511:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:33:09.744838Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7592674064047228564:2545] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:09.793803Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:10.519820Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Trying to start YDB, gRPC: 12394, MsgBus: 29915 2026-01-07T17:33:12.730746Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7592674076075731850:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:12.737024Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:12.825042Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:12.911932Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:12.919889Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7592674076075731741:2081] 1767807192679947 != 1767807192679950 2026-01-07T17:33:12.933821Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:12.933944Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:12.960764Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:13.030117Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:13.081503Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:13.081534Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:13.081547Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:13.081669Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:13.737200Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:13.797505Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:13.803798Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:17.729736Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592674076075731850:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:17.729840Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:18.043060Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674101845536413:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:18.043157Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674101845536407:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:18.043386Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:18.044037Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674101845536422:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:18.044128Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:18.049398Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:18.065659Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592674101845536421:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:33:18.138828Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592674101845536474:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:18.188673Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TestKinesisHttpProxy::TestListStreamConsumers |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> TSchemeShardTTLTests::ConditionalErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:33:10.800803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:33:10.942275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:33:10.960683Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:33:10.961111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:33:10.961163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:33:11.285117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:11.285231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:11.355141Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807187786063 != 1767807187786067 2026-01-07T17:33:11.365685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:11.411606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:11.501293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:33:11.805923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:11.819270Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:11.925789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:12.234538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.234685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:971:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.234785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.235830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:976:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.236052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:12.240611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:12.375040Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:33:12.471721Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:20.540068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:20.343063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:20.343164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:20.343215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:20.343269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:20.343310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:20.343353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:20.343443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:20.343521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:20.344458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:20.344753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:20.436824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:20.436891Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:20.451680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:20.452021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:20.452164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:20.459758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:20.460163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:20.460895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:20.461231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:20.464026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:20.464213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:20.465244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:20.465317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:20.465432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:20.465477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:20.465520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:20.465711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:20.603211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.604315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.604448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.604533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.604613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.604675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.604741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.604804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.604899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.605006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.605107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.605194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.605275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.605360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:20.605452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:33:21.707803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:33:21.707852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:33:21.707892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-07T17:33:21.707943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:33:21.709559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:33:21.709642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:33:21.709675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:33:21.709709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-07T17:33:21.709743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-07T17:33:21.709808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:33:21.710538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1243 } } CommitVersion { Step: 250 TxId: 103 } 2026-01-07T17:33:21.710577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2026-01-07T17:33:21.710737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1243 } } CommitVersion { Step: 250 TxId: 103 } 2026-01-07T17:33:21.710872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1243 } } CommitVersion { Step: 250 TxId: 103 } debug: NTableState::TProposedWaitParts operationId# 103:0 2026-01-07T17:33:21.713821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 947 RawX2: 4294970187 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-07T17:33:21.713881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2026-01-07T17:33:21.713998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 947 RawX2: 4294970187 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-07T17:33:21.714050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:21.714113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 947 RawX2: 4294970187 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-07T17:33:21.714169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:21.714217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.714244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:33:21.714284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-07T17:33:21.717576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:33:21.717662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:33:21.717790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.718052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.718298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.718338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:33:21.718437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:33:21.718471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:33:21.718501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:33:21.718533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:33:21.718562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:33:21.718616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:810:2776] message: TxId: 103 2026-01-07T17:33:21.718649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:33:21.718680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:33:21.718718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:33:21.718814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:33:21.720345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:33:21.720388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:973:2909] TestWaitNotification: OK eventTxId 103 W0000 00:00:1767807201.720902 122222 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2026-01-07T17:33:21.724143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:21.724647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.724770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2026-01-07T17:33:21.725218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2026-01-07T17:33:21.727648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:21.727918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:33:11.661624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:33:11.760096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:33:11.780104Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:33:11.780562Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:33:11.780620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:33:12.040702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:12.040824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:12.116913Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807188833977 != 1767807188833981 2026-01-07T17:33:12.126891Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:12.171774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:12.271914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:33:12.575801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:12.593401Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:12.719918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:13.110993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:13.111140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:971:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:13.111227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:13.112249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:976:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:13.112448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:13.116215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:13.251859Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:33:13.344759Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:21.058187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:21.058306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.058354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:21.058388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:21.058432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:21.058476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:21.058561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.058628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:21.059592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:21.059924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:21.154808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:21.154875Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:21.172673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:21.173038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:21.173233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:21.183068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:21.183484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:21.184276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:21.184542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:21.188039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.188246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:21.189412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:21.189490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.189616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:21.189663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:21.189725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:21.189898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.339984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.341960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.342046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.342137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.342230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:33:22.377320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:22.379035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:22.379129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:22.379168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:33:22.379202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:33:22.379252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:22.379340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-07T17:33:22.380629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1481 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:22.380676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:22.380822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1481 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:22.380972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1481 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-07T17:33:22.383002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:22.383057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:22.383210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:22.383299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:22.383436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:22.383520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:22.383566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.383623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:22.383675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:33:22.384887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:22.384995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:22.386676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.386796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.386893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.386930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:22.387056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:22.387102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:22.387146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:22.387196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:22.387262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:33:22.387344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 101 2026-01-07T17:33:22.387404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:22.387448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:22.387482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:22.387648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:22.392162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:22.392244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:743:2720] TestWaitNotification: OK eventTxId 101 2026-01-07T17:33:22.392880Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:22.393163Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 295us result status StatusSuccess 2026-01-07T17:33:22.393694Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:21.229983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:21.230092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.230139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:21.230176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:21.230215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:21.230256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:21.230328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.230402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:21.231350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:21.231673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:21.316537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:21.316598Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:21.332294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:21.332679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:21.332847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:21.341011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:21.341396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:21.342172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:21.342453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:21.345277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.345488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:21.346681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:21.346756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.346876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:21.346921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:21.347006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:21.347188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.484869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.489360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.489544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.489635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.489713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.489774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.489850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.489930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.490015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.490102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.490216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.490295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.490362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.490449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.490557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:33:22.532955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:22.533035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:22.533066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:33:22.533099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:33:22.533142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:33:22.533218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:33:22.535987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:22.553940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1179 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:22.554004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:22.554198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1179 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:22.554361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1179 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:33:22.555706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.555767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:22.555925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.555990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:22.556082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.556153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:22.556235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.556290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:22.556346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:33:22.562114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.566717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.566961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.567033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:33:22.567180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:22.567248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:22.567294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:22.567329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:22.567377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:33:22.567455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:784:2750] message: TxId: 102 2026-01-07T17:33:22.567525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:22.567565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:33:22.567599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:33:22.567734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:22.570014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:22.570074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:868:2826] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:22.570661Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:22.571005Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 301us result status StatusSuccess 2026-01-07T17:33:22.571558Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> TestYmqHttpProxy::TestGetQueueAttributes >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |84.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:21.545575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:21.545658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.545693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:21.545728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:21.545760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:21.545798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:21.545881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.545945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:21.546700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:21.546973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:21.620092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:21.620172Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:21.637284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:21.637607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:21.637765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:21.644983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:21.645353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:21.646031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:21.646299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:21.649241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.649440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:21.650544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:21.650604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.650710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:21.650750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:21.650788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:21.650934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.786739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.790997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.791069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.791163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.791277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.791358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 818 RawX2: 4294970080 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.776809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:22.776979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 818 RawX2: 4294970080 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.777044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:22.777129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 818 RawX2: 4294970080 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.777184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:22.777227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1090: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-01-07T17:33:22.778525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.779270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.794197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.794247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:22.794372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.794429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:22.794493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:22.794571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:22.794625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.794661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:22.794703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:22.794734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:33:22.798074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.798294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.798366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-07T17:33:22.798435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-07T17:33:22.798474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-07T17:33:22.798570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-07T17:33:22.798611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-07T17:33:22.800911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.801016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:33:22.801119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:22.801163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:22.801202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:22.801250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:22.801290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:33:22.801373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 102 2026-01-07T17:33:22.801423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:22.801457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:33:22.801494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:33:22.801635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:33:22.801671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:22.803971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:22.804019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:846:2804] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:22.804518Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:22.804796Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 291us result status StatusSuccess 2026-01-07T17:33:22.805370Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSequence::CreateSequenceParallel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:21.862908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:21.863002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.863049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:21.863095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:21.863146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:21.863191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:21.863281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.863352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:21.864238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:21.864520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:21.945681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:21.945739Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:21.966197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:21.966533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:21.966715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:21.973086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:21.973424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:21.974170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:21.974402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:21.977130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.977347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:21.978465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:21.978537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.978670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:21.978719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:21.978764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:21.978929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.113306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.114949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.115010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.115063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.115125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.115183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:23.065057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:23.065144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:23.065178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:33:23.065214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:33:23.065266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:23.065349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-07T17:33:23.066554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1355 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:23.066594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:23.066743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1355 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:23.067165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1355 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-07T17:33:23.069403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:23.069463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:23.069611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:23.069684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:23.069840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 714 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:23.069932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:23.069978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.070033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:23.070086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:33:23.071403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:23.071490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:23.073320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.073468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.073596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.073668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:23.073773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:23.073832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:23.073886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:23.073920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:23.073961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:33:23.074037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 101 2026-01-07T17:33:23.074091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:23.074133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:23.074170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:23.074622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:23.076859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:23.076945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:743:2720] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:33:23.080608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:23.081128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.081506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2026-01-07T17:33:23.083747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:23.084070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2026-01-07T17:33:23.087464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:23.087708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.088085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2026-01-07T17:33:23.090570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:23.090814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KeyValueGRPCService::SimpleGetStorageChannelStatusV1 [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV2 >> TSequence::CreateSequence >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardTTLTestsWithReboots::CopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:23.635189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:23.635289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:23.635328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:23.635364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:23.635398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:23.635426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:23.635497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:23.635561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:23.636353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:23.636611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:23.725382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:23.725432Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:23.740907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:23.741231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:23.741393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:23.747270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:23.747616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:23.748243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:23.748497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:23.750920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:23.751111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:23.752187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:23.752249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:23.752366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:23.752409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:23.752444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:23.752585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.919696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.923673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.923830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.923915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.923981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.924702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... thId: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:24.532543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:33:24.532811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:24.532878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:33:24.532969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:33:24.533028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:33:24.537987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:24.538080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:33:24.538135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:33:24.542685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:24.542749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:24.542816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:24.542873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:33:24.543055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:33:24.545456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:33:24.545614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:33:24.545969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:24.546090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:24.546147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:24.546438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:33:24.546514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:24.546704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:24.546784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:24.549351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:24.549406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:24.549649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:24.549714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:33:24.550065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:24.550133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:33:24.550239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:24.550276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:24.550316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:24.550369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:24.550415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:33:24.550459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:24.550499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:33:24.550538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:33:24.550613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:24.550651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:33:24.550684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:33:24.551318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:24.551431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:24.551481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:33:24.551531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:33:24.551577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:24.551666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:33:24.554810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:33:24.555384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:33:24.556896Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2665] Bootstrap 2026-01-07T17:33:24.558115Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:33:24.561804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:24.562185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:24.562305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2026-01-07T17:33:24.562827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2026-01-07T17:33:24.564191Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:33:24.567484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:24.567739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-07T17:33:24.568266Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> TSchemeShardTTLTests::AlterTableShouldSuccess >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> TSchemeShardTTLTests::CheckCounters >> TestKinesisHttpProxy::ListShards [GOOD] >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp >> TestKinesisHttpProxy::ListShardsEmptyFields >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 >> TSchemeShardColumnTableTTL::AlterColumnTable >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithTags |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:26.609208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:26.609298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.609340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:26.609372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:26.609416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:26.609443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:26.609529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.609593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:26.610415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:26.610696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:26.702869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:26.702928Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:26.721835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:26.722142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:26.722287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:26.729424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:26.729756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:26.730386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:26.730619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:26.732765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.732961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:26.734060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:26.734112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.734222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:26.734262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:26.734289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:26.734409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:26.857037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.857758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.857884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.857959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.858793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:27.453405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:33:27.453646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.453705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:33:27.453764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:33:27.453818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:33:27.455813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.455894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:33:27.455939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:33:27.463836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.463892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.463930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:27.463984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:33:27.464108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:33:27.465748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:33:27.465873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:33:27.466176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:27.466280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:27.466324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:27.466565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:33:27.466625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:27.466801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:27.466857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:27.468412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:27.468452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:27.468625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:27.468671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:33:27.468999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.469070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:33:27.469177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:27.469214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:27.469253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:27.469312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:27.469354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:33:27.469401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:27.469440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:33:27.469527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:33:27.469603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:27.469645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:33:27.469694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:33:27.470399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:27.470490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:27.470531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:33:27.470561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:33:27.470593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:27.470658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:33:27.473372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:33:27.473759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:33:27.474655Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2665] Bootstrap 2026-01-07T17:33:27.475620Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:33:27.477835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:27.478148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.478235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2026-01-07T17:33:27.478641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2026-01-07T17:33:27.479754Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:33:27.482372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:27.482606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-07T17:33:27.483261Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:26.438986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:26.439071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.439108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:26.439139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:26.439174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:26.439220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:26.439291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.439353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:26.440101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:26.440346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:26.531173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:26.531252Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:26.549689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:26.550018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:26.550218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:26.559345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:26.559768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:26.560587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:26.560865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:26.563886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.564093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:26.565298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:26.565376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.565503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:26.565551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:26.565601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:26.565771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:26.721343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.722379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.722514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.722613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.722696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.722765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.722846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.722923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.723019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.723106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.723210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.723285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.723355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.723450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.723543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-07T17:33:27.631163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.631214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:33:27.631641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:33:27.631735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:33:27.631766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:33:27.631795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:33:27.631827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:27.631887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-07T17:33:27.634401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:33:27.647122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 953 } } CommitVersion { Step: 5000040 TxId: 104 } 2026-01-07T17:33:27.647176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:27.647362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 953 } } CommitVersion { Step: 5000040 TxId: 104 } 2026-01-07T17:33:27.647501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 953 } } CommitVersion { Step: 5000040 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:33:27.648338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-07T17:33:27.648382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:27.648520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-07T17:33:27.648579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:27.648667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-07T17:33:27.648730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:27.648769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.648881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:27.648920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-07T17:33:27.658825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.659240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.659569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.659644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:33:27.659800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:33:27.659844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:33:27.659877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:33:27.659925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:33:27.659962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-07T17:33:27.660025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 104 2026-01-07T17:33:27.660069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:33:27.660106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:33:27.660137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:33:27.660250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:27.662114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:33:27.662196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:849:2818] TestWaitNotification: OK eventTxId 104 2026-01-07T17:33:27.662854Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:27.663116Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 270us result status StatusSuccess 2026-01-07T17:33:27.663625Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:22.874613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:22.874712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:22.874754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:22.874787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:22.874822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:22.874855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:22.874927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:22.874996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:22.875814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:22.876077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:22.962150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:22.962215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:22.973923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:22.974205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:22.974336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:22.980277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:22.980585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:22.981190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:22.981448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:22.984397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:22.984632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:22.985694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:22.985752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:22.985879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:22.985922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:22.985965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:22.986130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.136826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.137887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.138921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.139017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 8Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409551, request: TableId: 43 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640220000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-07T17:33:27.401486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:33:27.402424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-07T17:33:27.403053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2026-01-07T17:33:27.403235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:27.403292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-01-07T17:33:27.403845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:27.404157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.404219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:33:27.404604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.404640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-07T17:33:27.417392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.417454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-01-07T17:33:27.418771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.418819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:27.419076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.419277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.419502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.419564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.220000Z, at schemeshard: 72057594046678944 2026-01-07T17:33:27.419818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.419883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.220000Z, at schemeshard: 72057594046678944 2026-01-07T17:33:27.419964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.419997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:27.425075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.426260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.426319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.222000Z, at schemeshard: 72057594046678944 2026-01-07T17:33:27.426907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.426991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.427054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.427168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.427238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.222000Z, at schemeshard: 72057594046678944 2026-01-07T17:33:27.427311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.427351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.427375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.427412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.427441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.222000Z, at schemeshard: 72057594046678944 2026-01-07T17:33:27.427511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.495345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2026-01-07T17:33:27.495541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 41 shard idx 72057594046678944:4 data size 43 row count 1 2026-01-07T17:33:27.495623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], pathId map=TTLEnabledTable4, is column=0, is olap=0, RowCount 1, DataSize 43 2026-01-07T17:33:27.495759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable 2026-01-07T17:33:27.495828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 603 row count 2 2026-01-07T17:33:27.495875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTable3, is column=0, is olap=0, RowCount 2, DataSize 603 2026-01-07T17:33:27.495928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-07T17:33:27.495989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-07T17:33:27.496024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable1, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:27.496104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-07T17:33:27.496141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-07T17:33:27.496172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=TTLEnabledTable2, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:27.496227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-07T17:33:27.496259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 42 shard idx 72057594046678944:5 data size 627 row count 2 2026-01-07T17:33:27.496294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], pathId map=TTLEnabledTable5, is column=0, is olap=0, RowCount 2, DataSize 627, with borrowed parts 2026-01-07T17:33:27.496370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409550: SplitByLoadNotEnabledForTable 2026-01-07T17:33:27.509119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.509258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2026-01-07T17:33:27.511703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:27.511879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:27.511936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.225000Z, at schemeshard: 72057594046678944 2026-01-07T17:33:27.511998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:26.631216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:26.631302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.631353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:26.631388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:26.631425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:26.631449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:26.631504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.631564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:26.632321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:26.632569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:26.725950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:26.726002Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:26.739451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:26.743954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:26.744114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:26.750580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:26.750839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:26.751544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:26.751741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:26.754153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.754311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:26.755501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:26.755559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.755659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:26.755697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:26.755735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:26.755885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:26.896269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.897987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.898044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.898082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.898146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.898193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.898249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7:33:27.736531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.736571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:33:27.737063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:27.737143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:27.737176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:33:27.737206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:33:27.737236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:27.737288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:33:27.738638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1025 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:27.738668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:27.738762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1025 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:27.738841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1025 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-07T17:33:27.739498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 711 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:27.739543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:27.739657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 711 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:27.739708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:27.739780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 711 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:27.739825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:27.739869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.739897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:27.739924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:33:27.741584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:27.741771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.742639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.742834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.742875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:33:27.742954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:27.742991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:27.743022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:27.743055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:27.743084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:33:27.743145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:738:2715] message: TxId: 102 2026-01-07T17:33:27.743208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:27.743238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:33:27.743264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:33:27.743349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:27.744571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:27.744615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:797:2766] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:27.745147Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:27.745380Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 243us result status StatusSuccess 2026-01-07T17:33:27.745729Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestYmqHttpProxy::TestTagQueue >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TestYmqHttpProxy::BillingRecordsForJsonApi >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TestYmqHttpProxy::TestDeleteQueue |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TestYmqHttpProxy::TestListQueues >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:30.661919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:30.662015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:30.662059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:30.662096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:30.662134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:30.662174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:30.662241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:30.662303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:30.663132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:30.663432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:30.751078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:30.751134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:30.765811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:30.766123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:30.766296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:30.772460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:30.772791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:30.773553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:30.773770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:30.776403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:30.776581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:30.777603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:30.777672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:30.777761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:30.777798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:30.777842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:30.777964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:30.901685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.902462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.902550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.902607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.902665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.902717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.902764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.902843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.902935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.903009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.903068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.903129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.903197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.903249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:30.903318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... peration type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:31.478743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:31.478882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:33:31.479049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.479104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:33:31.479144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:33:31.479183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:33:31.480544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.480587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:33:31.480628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:33:31.482095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.482138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.482186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:31.482241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:33:31.482349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:33:31.483624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:33:31.483731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:33:31.483959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:31.484063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:31.484101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:31.484324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:33:31.484375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:33:31.484506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:31.484601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:31.485991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:31.486043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:31.486177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:31.486216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:33:31.486489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.486541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:33:31.487208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:31.487252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:31.487298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:33:31.487340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:31.487382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:33:31.487417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:33:31.487448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:33:31.487474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:33:31.487539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:31.487569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:33:31.487613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:33:31.488065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:31.488136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:33:31.488182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:33:31.488216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:33:31.488246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:33:31.488314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:33:31.491597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:33:31.492126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:33:31.493473Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2665] Bootstrap 2026-01-07T17:33:31.494582Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:33:31.497464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:31.497825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.498208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2026-01-07T17:33:31.499227Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:33:31.502230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:31.502435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-07T17:33:31.502772Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable >> TSchemeShardTTLTestsWithReboots::CreateTable |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:25.034850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:25.034948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:25.034994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:25.035027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:25.035065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:25.035104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:25.035185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:25.035263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:25.036068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:25.036345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:25.136899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:25.136975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:25.153826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:25.154161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:25.154330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:25.160635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:25.161036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:25.161754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:25.162015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:25.164846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:25.165080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:25.166191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:25.166266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:25.166394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:25.166439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:25.166483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:25.166658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:25.313096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.314948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:25.315057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :33:31.950210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.950327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.950393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.951098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.951210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.951280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.951342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.951419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.951495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.951602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.951647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:31.951771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:31.951810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:31.951843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:31.951874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:31.951906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:33:31.951973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3118:4337] message: TxId: 101 2026-01-07T17:33:31.952022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:31.952102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:31.952142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:31.953244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-07T17:33:31.956357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:31.956424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:3119:4338] TestWaitNotification: OK eventTxId 101 2026-01-07T17:33:31.957020Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:31.957281Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 294us result status StatusSuccess 2026-01-07T17:33:31.957963Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1767807211.958736 123945 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2026-01-07T17:33:31.961777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:31.961990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.962440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2026-01-07T17:33:31.966813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:31.967059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> KeyValueGRPCService::SimpleGetStorageChannelStatusV2 [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> TestKinesisHttpProxy::TestWrongStream >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> TSchemeShardTTLTestsWithReboots::MoveTable >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TestYmqHttpProxy::TestDeleteMessage >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:33.239603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:33.239680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:33.239717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:33.239748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:33.239779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:33.239814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:33.239873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:33.239940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:33.240695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:33.240947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:33.321510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:33.321561Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:33.335031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:33.335258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:33.335395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:33.340413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:33.340643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:33.341178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:33.341377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:33.343490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:33.343631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:33.344453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:33.344513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:33.344595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:33.344626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:33.344664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:33.344757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:33.468111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.468750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.468819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.468878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.468937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.468994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.469498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... __operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:33:34.304696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:34.304773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:34.304807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:33:34.304841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:33:34.304894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:33:34.304997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:33:34.308464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:34.323017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1040 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:34.323065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:34.323234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1040 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:34.323357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1040 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:33:34.324224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:34.324266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:34.324463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:34.324525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:34.324600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:34.324660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:34.324696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:34.324725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:34.324757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:33:34.328374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:34.329473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:34.329747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:34.329804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:33:34.329913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:34.329963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:34.330002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:34.330032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:34.330065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:33:34.330138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:784:2750] message: TxId: 102 2026-01-07T17:33:34.330187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:34.330219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:33:34.330250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:33:34.330355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:34.331861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:34.331908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:915:2837] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:34.332436Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:34.332725Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 306us result status StatusSuccess 2026-01-07T17:33:34.333273Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false >> TestYmqHttpProxy::TestTagQueue [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey >> TestYmqHttpProxy::TestUntagQueue >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:24.349479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:24.349589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:24.349640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:24.349683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:24.349720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:24.349753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:24.349823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:24.349905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:24.350751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:24.351050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:24.440901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:24.440996Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:24.457006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:24.457389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:24.457588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:24.464301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:24.464666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:24.465454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:24.465728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:24.468394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:24.468620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:24.469834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:24.469896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:24.470028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:24.470094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:24.470193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:24.470410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:24.619913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.620912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.621939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.622021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:33:35.485710Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2026-01-07T17:33:35.485743Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:33:35.487682Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.487726Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2026-01-07T17:33:35.487798Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:755:2731] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2026-01-07T17:33:35.488277Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:130:2154], Recipient [7:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:33:35.488319Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:33:35.488365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.488405Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:33:35.488660Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:33:35.488759Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:33:35.488784Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2026-01-07T17:33:35.488808Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-07T17:33:35.488844Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2026-01-07T17:33:35.488867Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-07T17:33:35.488892Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2026-01-07T17:33:35.488987Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:827:2783] message: TxId: 102 2026-01-07T17:33:35.489061Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-07T17:33:35.489116Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:33:35.489164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:33:35.489310Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:35.489360Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-07T17:33:35.489385Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-07T17:33:35.489421Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:33:35.489450Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2026-01-07T17:33:35.489474Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:2 2026-01-07T17:33:35.489522Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:33:35.489552Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:3 2026-01-07T17:33:35.489577Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:3 2026-01-07T17:33:35.489626Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:33:35.490456Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:35.490498Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.490723Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [7:130:2154], Recipient [7:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-07T17:33:35.490757Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-07T17:33:35.490811Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:33:35.490858Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:33:35.490931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:35.491143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:35.491169Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.491470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:35.491494Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.491525Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:35.491568Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.491615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:35.491642Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.493824Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.494012Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:35.494039Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.495322Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:35.495413Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:827:2783] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2026-01-07T17:33:35.495555Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:35.495593Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:935:2883] 2026-01-07T17:33:35.495751Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:937:2885], Recipient [7:130:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:33:35.495779Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:33:35.495801Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 2026-01-07T17:33:35.495859Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:35.498229Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1018:2965], Recipient [7:130:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-07T17:33:35.498297Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:33:35.498445Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:35.498713Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 266us result status StatusPathDoesNotExist 2026-01-07T17:33:35.498867Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000039, drop txId: 102" Path: "/MyRoot/Table" PathId: 38 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sequence/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> TExportToS3Tests::IndexMaterializationDisabled >> TBlobStorageProxyTest::TestBlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:28.370459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:28.370548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:28.370591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:28.370629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:28.370667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:28.370694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:28.370775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:28.370845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:28.371659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:28.371932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:28.462796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:28.462845Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:28.477459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:28.477805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:28.477996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:28.485951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:28.486317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:28.487046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:28.487312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:28.490023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:28.490229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:28.491409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:28.491473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:28.491599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:28.491645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:28.491686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:28.491846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:28.630471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.634485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.634670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.634782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.634864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.634928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.634997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.635065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.635168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.635275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.635391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.635470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.635539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.635654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:28.635751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409608, partId: 0 2026-01-07T17:33:35.650582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409608 TxId: 103 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:33:35.651647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.655394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.655520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.655573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.655665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.655713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:33:35.655815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:33:35.655845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:33:35.655873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:33:35.655900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:33:35.655940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:33:35.656006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3118:4337] message: TxId: 103 2026-01-07T17:33:35.656051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:33:35.656099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:33:35.656132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:33:35.657100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-07T17:33:35.659181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:33:35.659267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4399:5554] TestWaitNotification: OK eventTxId 103 2026-01-07T17:33:35.659852Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:35.660054Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 233us result status StatusSuccess 2026-01-07T17:33:35.660494Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2026-01-07T17:33:35.663743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:35.663895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.664232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2026-01-07T17:33:35.668296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:35.668481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:33:35.668754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:33:35.668787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:33:35.669158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:33:35.669226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:33:35.669257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4753:5907] TestWaitNotification: OK eventTxId 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:35.148566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:35.148807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:35.148856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:35.148886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:35.148918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:35.148969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:35.149044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:35.149118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:35.149901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:35.150173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:35.219365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:35.219413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:35.233797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:35.234107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:35.234286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:35.240388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:35.240722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:35.241399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:35.241625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:35.244018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:35.244206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:35.245260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:35.245325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:35.245432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:35.245472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:35.245514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:35.245671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.373834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.374735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.374843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.374915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.374993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.375731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:2 129 -> 240 2026-01-07T17:33:36.239730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:36.239775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:36.239917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:36.239963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:36.240084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 735 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:36.240203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:36.240238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.240270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:36.240304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:33:36.253767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:36.254311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:36.254388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:36.260484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:36.260682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-07T17:33:36.260793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.260892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-07T17:33:36.261816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.262154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-07T17:33:36.262225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2026-01-07T17:33:36.262348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-07T17:33:36.262386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-07T17:33:36.262426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-07T17:33:36.262461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-07T17:33:36.262506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2026-01-07T17:33:36.263142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.263191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:36.263274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-07T17:33:36.263318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:33:36.263381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-07T17:33:36.263412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:33:36.263440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2026-01-07T17:33:36.263513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:786:2752] message: TxId: 101 2026-01-07T17:33:36.263558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:33:36.263609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:36.263645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:36.263793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:36.263842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-07T17:33:36.263867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-07T17:33:36.263931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:33:36.263961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-07T17:33:36.264003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-07T17:33:36.264107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:33:36.268645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:36.268711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:787:2753] TestWaitNotification: OK eventTxId 101 2026-01-07T17:33:36.269364Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:36.269705Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 333us result status StatusSuccess 2026-01-07T17:33:36.270363Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:17.099901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:17.099971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:17.100006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:17.100033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:17.100062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:17.100095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:17.100142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:17.100228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:17.100951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:17.101170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:17.176004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:17.176062Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:17.189693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:17.190007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:17.190177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:17.196059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:17.196404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:17.197205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:17.197453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:17.200808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:17.201032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:17.202226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:17.202305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:17.202424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:17.202473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:17.202516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:17.202709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:17.386987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.387972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.388979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.389067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:17.389160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... sole configs 2026-01-07T17:33:22.610927Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:24.056369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0377 2026-01-07T17:33:24.056517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0681 2026-01-07T17:33:24.088328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-07T17:33:24.088538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-07T17:33:24.088614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:24.088738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-07T17:33:24.088798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-07T17:33:24.088830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:24.088889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-07T17:33:24.102534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:33:26.808405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0104 2026-01-07T17:33:26.808526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0175 2026-01-07T17:33:26.839433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-07T17:33:26.839634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-07T17:33:26.839751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:26.839939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-07T17:33:26.840029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-07T17:33:26.840068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:26.840144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-07T17:33:26.852862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:33:29.486046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0104 2026-01-07T17:33:29.486136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0175 2026-01-07T17:33:29.521133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-07T17:33:29.521288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-07T17:33:29.521344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:29.521437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-07T17:33:29.521495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-07T17:33:29.521538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:29.521586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-07T17:33:29.533118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:33:32.952556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0037 2026-01-07T17:33:32.952660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0049 2026-01-07T17:33:32.993629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-07T17:33:32.993799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-07T17:33:32.993865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:32.993976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-07T17:33:32.994037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-07T17:33:32.994070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:32.994108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-07T17:33:33.004476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:33:36.454953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-07T17:33:36.455105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:36.455474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:36.455795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409547, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 60024000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 40 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-07T17:33:36.456412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:36.457459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:36.457531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:36.466782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:36.467060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:36.467158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.024000Z, at schemeshard: 72057594046678944 2026-01-07T17:33:36.467233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:24.618477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:24.618579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:24.618621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:24.618663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:24.618708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:24.618739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:24.618788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:24.618858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:24.619722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:24.620027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:24.706906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:24.706968Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:24.721892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:24.722293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:24.722513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:24.728685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:24.728996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:24.729769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:24.729967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:24.732680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:24.732842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:24.734033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:24.734082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:24.734175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:24.734215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:24.734291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:24.734432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:24.868672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.869633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.869749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.869834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.869930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:24.870656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Coordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1453 } } CommitVersion { Step: 5000050 TxId: 114 } debug: NTableState::TProposedWaitParts operationId# 114:0 2026-01-07T17:33:36.573322Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 114 2026-01-07T17:33:36.574663Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1458:3393], Recipient [7:130:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:33:36.574705Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:33:36.574731Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:33:36.575040Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269551620, Sender [7:1395:3338], Recipient [7:130:2154]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1395 RawX2: 30064774410 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-07T17:33:36.575096Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5280: StateWork, processing event TEvDataShard::TEvSchemaChanged 2026-01-07T17:33:36.575219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1395 RawX2: 30064774410 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-07T17:33:36.575266Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2026-01-07T17:33:36.575446Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1395 RawX2: 30064774410 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-07T17:33:36.575528Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:36.575637Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1395 RawX2: 30064774410 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-07T17:33:36.575720Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:36.575779Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.575831Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:33:36.575890Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 114:0 129 -> 240 2026-01-07T17:33:36.576091Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:33:36.576902Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:36.577492Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2026-01-07T17:33:36.577547Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:36.578741Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2026-01-07T17:33:36.578780Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:36.580442Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.580495Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:36.580670Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.580721Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:36.580763Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 114:0 2026-01-07T17:33:36.580880Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1395:3338] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2026-01-07T17:33:36.581040Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:130:2154], Recipient [7:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:33:36.581082Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:33:36.581148Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.581198Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 114:0 ProgressState 2026-01-07T17:33:36.581347Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:33:36.581382Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2026-01-07T17:33:36.581438Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-07T17:33:36.581519Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2026-01-07T17:33:36.581571Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-07T17:33:36.581620Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2026-01-07T17:33:36.581690Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:792:2758] message: TxId: 114 2026-01-07T17:33:36.581752Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-07T17:33:36.581810Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 114:0 2026-01-07T17:33:36.581860Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 114:0 2026-01-07T17:33:36.582078Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-07T17:33:36.584201Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:33:36.584321Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:792:2758] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2026-01-07T17:33:36.584512Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-07T17:33:36.584574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1425:3360] 2026-01-07T17:33:36.584813Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1427:3362], Recipient [7:130:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:33:36.584849Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:33:36.584873Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2026-01-07T17:33:36.586071Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [7:1467:3402], Recipient [7:130:2154]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2026-01-07T17:33:36.586140Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:33:36.589085Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:33:36.589366Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2026-01-07T17:33:36.589935Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2026-01-07T17:33:36.590219Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:33:36.593259Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:36.593554Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2026-01-07T17:33:36.593628Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sequence/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 5459, MsgBus: 21167 ... waiting for SysViewsRoster update finished 2026-01-07T17:31:31.362300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:31.553242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:31:31.574656Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:31:31.575130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:31:31.575187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:31:31.903387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:31.903548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:32.007643Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807087973366 != 1767807087973370 2026-01-07T17:31:32.026419Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:32.078167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:32.224068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:31:32.717894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:32.717941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:32.717970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:32.718238Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:32.729730Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:33.167914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:33.270622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:33.570986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.001499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:34.377623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.619921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1907:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.620158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.622448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1980:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.622563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:35.667819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:35.850486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:36.233725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:36.566023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:36.860164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:37.186768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:37.529773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:37.923316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:38.359246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:38.359442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:38.359830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:38.359918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:38.360017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:38.366808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:38.583998Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2801:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:31:38.647452Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2861:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:41.026824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table. ... 22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem, At lambda
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional Trying to start YDB, gRPC: 27184, MsgBus: 6270 2026-01-07T17:33:28.001507Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592674145617078309:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:28.001552Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:28.026542Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:28.119740Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:28.119833Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:28.120864Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:28.153284Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:28.249610Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:28.249635Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:28.249643Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:28.249755Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:28.290271Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:28.917886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:28.927432Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:28.935215Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:29.007733Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:29.011006Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:29.180713Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:29.253517Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.463119Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674162796949328:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.463260Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.463652Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674162796949338:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.463722Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.553919Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.594192Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.628651Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.661758Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.693728Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.728664Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.763900Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.816336Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:32.916874Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674162796950206:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.917011Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.917300Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674162796950211:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.917341Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674162796950212:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.917427Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:32.920756Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:32.935238Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592674162796950215:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:33:33.001880Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592674145617078309:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:33.001997Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:33.021942Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592674167091917565:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |84.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |84.7%| [LD] {RESULT} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::IndexMaterializationDisabled [GOOD] >> TExportToS3Tests::UserSID >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TExportToS3Tests::IndexMaterialization >> TBlobStorageProxyTest::TestEmptyDiscover >> TestYmqHttpProxy::TestListQueues [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> KqpResultSetFormats::ArrowFormat_EmptyBatch >> TExportToS3Tests::UserSID [GOOD] >> TestYmqHttpProxy::TestPurgeQueue >> TExportToS3Tests::TransferExportNoConnString >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> KqpResultSetFormats::ArrowFormat_Simple >> KqpScanArrowFormat::AllTypesColumnsCellvec >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TExportToS3Tests::ShouldCheckQuotasExportsLimited >> TExportToS3Tests::IndexMaterialization [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> TExportToS3Tests::IndexMaterializationGlobal >> TestKinesisHttpProxy::TestWrongStream2 |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TExportToS3Tests::TransferExportNoConnString [GOOD] >> TReplicationTests::Disabled >> TReplicationTests::Create >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TExportToS3Tests::TransferExportWithConnString >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> TExportToS3Tests::IndexMaterializationGlobal [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch >> TExportToS3Tests::IndexMaterializationGlobalAsync >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TestKinesisHttpProxy::ListShardsTimestamp >> TestYmqHttpProxy::TestChangeMessageVisibility >> TestKinesisHttpProxy::TestCounters >> TExportToS3Tests::TransferExportWithConnString [GOOD] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] >> TExportToS3Tests::TransferExportWithConsumer >> TExportToS3Tests::SchemaMapping >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> LocalTableWriter::ApplyInCorrectOrder >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] >> TReplicationTests::Create [GOOD] >> TReplicationTests::CreateDropRecreate >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> TExportToS3Tests::IndexMaterializationGlobalAsync [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::TransferExportWithConsumer [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_3 >> TExportToS3Tests::DisableAutoDropping >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs >> TExportToS3Tests::SchemaMapping [GOOD] >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::ConsistencyLevel >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> TExportToS3Tests::SchemaMappingEncryption >> KqpResultSetFormats::ArrowFormat_EmptyBatch [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap >> KqpResultSetFormats::ArrowFormat_Simple [GOOD] >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::SingleKey ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2026-01-07T17:24:55.571228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592671943004221461:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:24:55.572044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:24:56.784378Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:24:56.897960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:24:56.899102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:24:57.047095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:24:57.047674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:24:57.363619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:24:57.802725Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.140788s 2026-01-07T17:24:57.803157Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592671943004221320:2082] 1767806695550228 != 1767806695550231 2026-01-07T17:24:57.803287Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:24:57.803317Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.141391s 2026-01-07T17:24:57.840336Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:24:57.870830Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-07T17:24:57.871313Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-07T17:24:57.871353Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-07T17:24:57.871428Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-07T17:24:57.872478Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-07T17:24:57.872488Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-07T17:24:57.872504Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-07T17:24:57.872510Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-07T17:24:57.902507Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2026-01-07T17:24:57.902530Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2026-01-07T17:24:57.978632Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.023149s 2026-01-07T17:24:58.005332Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:24:58.005368Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:24:58.017800Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:24:58.018372Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:24:58.478509Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:24:58.478598Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:24:58.478659Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:24:58.478678Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:24:58.611681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:24:58.611703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:24:58.611709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:24:58.611789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:25:00.569013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592671943004221461:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:25:00.577409Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:25:00.868100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:25:00.881297Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:25:00.881370Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:25:00.881379Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:25:00.881403Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:25:00.889139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:25:00.945205Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# MakeDirectoryRequest, traceId# 01kecqv3cffttrc7652cctt6t3, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43658, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-07T17:25:00.952018Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:25:00.952075Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:25:00.952086Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:25:00.952120Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:25:00.961475Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:25:00.961571Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:25:00.961585Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:25:00.961615Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:25:00.968354Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# CreateVolumeRequest, traceId# 01kecqv3d5684g1kt5yhey8g16, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43658, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-07T17:25:00.971295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSolomonVolume, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp:380) 2026-01-07T17:25:00.977149Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:25:00.977293Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:25:00.977305Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T17:25:00.977341Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T17:25:01.105704Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828672 event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:25:01.127290Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ListDirectoryRequest, traceId# 01kecqv3j62drbex9xkfczh39q, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43658, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef Wait iteration# 0 2026-01-07T17:25:01.213585Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ExecuteTransactionRequest, traceId# 01kecqv3mg6gk4ff1hvkec9hc2, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43658, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-07T17:25:01.257656Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828673 event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:25:01.267524Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037888 TTxInit flat Execute 2026-01-07T17:25:01.267814Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:68: KeyValue# 72075186224037888 TTxInit flat BuildScheme 2026-01-07T17:25:01.270721Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037888 TTxInit flat Execute returns true 2026-01-07T17:25:01.271196Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828684 event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:25:01.281398Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-07T17:25:01.282026Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-07T17:25:01.300562Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2026-01-07T17:25:01.300858Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2026-01-07T17:25:01.300945Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event ... xy.cpp:641: Got grpc request# DescribeVolumeRequest, traceId# 01kecrathn740951hkm1715xmm, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35806, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-07T17:33:36.192562Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037888 TTxInit flat Execute 2026-01-07T17:33:36.192572Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:78: KeyValue# 72075186224037888 TTxInit flat ReadDb Tree 2026-01-07T17:33:36.192771Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037888 TTxInit flat Execute returns true 2026-01-07T17:33:36.196918Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-07T17:33:36.197048Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-07T17:33:36.197101Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-07T17:33:36.197141Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-07T17:33:36.197169Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-07T17:33:36.197193Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-07T17:33:36.197228Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-07T17:33:36.197253Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-07T17:33:36.197558Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-07T17:33:36.197589Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-07T17:33:36.197611Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-07T17:33:36.197633Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-07T17:33:36.197718Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-07T17:33:36.197754Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 4 Marker# KV50 2026-01-07T17:33:36.197770Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-07T17:33:36.197778Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 3 Marker# KV50 2026-01-07T17:33:36.197786Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-07T17:33:36.197794Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 2 Marker# KV50 2026-01-07T17:33:36.197804Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-07T17:33:36.197813Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 1 Marker# KV50 2026-01-07T17:33:36.197848Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037889 SendCutHistory Marker# KV51 2026-01-07T17:33:36.197911Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037889 TTxRegisterInitialGCCompletion Execute 2026-01-07T17:33:36.198314Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-07T17:33:36.198376Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-07T17:33:36.198429Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-07T17:33:36.198467Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-07T17:33:36.198514Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-07T17:33:36.198538Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-07T17:33:36.198561Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-07T17:33:36.198590Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-07T17:33:36.198763Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-07T17:33:36.198787Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-07T17:33:36.198819Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-07T17:33:36.198862Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-07T17:33:36.198931Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-07T17:33:36.198949Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 4 Marker# KV50 2026-01-07T17:33:36.198959Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-07T17:33:36.198967Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 3 Marker# KV50 2026-01-07T17:33:36.198978Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-07T17:33:36.198985Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 2 Marker# KV50 2026-01-07T17:33:36.198994Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-07T17:33:36.199001Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 1 Marker# KV50 2026-01-07T17:33:36.199020Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037888 SendCutHistory Marker# KV51 2026-01-07T17:33:36.199079Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Execute 2026-01-07T17:33:36.200839Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037889 TTxRegisterInitialGCCompletion Complete 2026-01-07T17:33:36.201105Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Complete 2026-01-07T17:33:36.203731Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# DropVolumeRequest, traceId# 01kecratjbcpvh7fj4qkr1gw8d, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35806, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-07T17:33:36.213382Z node 65 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSolomonVolume, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp:184) 2026-01-07T17:33:36.306071Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ListDirectoryRequest, traceId# 01kecratnh1bnm0020j38jepkk, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35806, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-07T17:33:36.307511Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:365: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2026-01-07T17:33:36.307588Z node 65 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 65, TabletId: 72075186224037889 not found 2026-01-07T17:33:36.308776Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:365: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2026-01-07T17:33:36.309836Z node 65 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 65, TabletId: 72075186224037888 not found |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/keyvalue/ut/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::CommitInterval >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree [GOOD] >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs [GOOD] >> TSchemeShardExportToFsTests::FsExportWithMultipleTables >> TExportToS3Tests::DisableAutoDropping [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> TExportToS3Tests::DecimalOutOfRange |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |84.9%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest |84.9%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> TestYmqHttpProxy::TestListDeadLetterSourceQueues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2026-01-07T17:33:43.995403Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674212594112825:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:43.995444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:44.207984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:44.234487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:44.234617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:44.318644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:44.320129Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:44.440707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:44.564404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:44.564427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:44.564450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:44.564518Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:44.804229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:44.810729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:44.814795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:44.907785Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handshake: worker# [1:7592674216889080908:2485] 2026-01-07T17:33:44.908144Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:33:44.908464Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:33:44.908524Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Send handshake: worker# [1:7592674216889080908:2485] 2026-01-07T17:33:44.909799Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:33:44.915558Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-07T17:33:44.915723Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-07T17:33:44.915956Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592674216889081003:2546] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T17:33:44.916021Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:33:44.916114Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592674216889081003:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-07T17:33:44.918486Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592674216889081003:2546] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:33:44.918555Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:33:44.918621Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-07T17:33:44.918996Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T17:33:44.919424Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2026-01-07T17:33:44.919542Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2026-01-07T17:33:44.919714Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592674216889081003:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-07T17:33:44.921405Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592674216889081003:2546] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:33:44.921470Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:33:44.921514Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592674216889081000:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } 2026-01-07T17:33:45.004718Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch >> TReplicationTests::CommitInterval [GOOD] >> TReplicationTests::Alter >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> TestKinesisHttpProxy::TestWrongRequest >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TReplicationTests::Disabled [GOOD] >> TReplicationTests::CreateSequential >> TestKinesisHttpProxy::TestCounters [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> HashShuffle::BackPressureInMemoryLoad [GOOD] >> HashShuffle::BackPressureWithSpilling [GOOD] >> HashShuffle::BackPressureWithSpillingLoad |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix [GOOD] >> TExportToS3Tests::AuditCancelledExport >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] |84.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |84.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody >> TestKinesisHttpProxy::ListShardsToken >> TExportToS3Tests::IndexMaterializationTwoTables >> TExportToS3Tests::DecimalOutOfRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:36.845369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:36.845448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:36.845494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:36.845531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:36.845573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:36.845623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:36.845681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:36.845746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:36.846574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:36.846851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:36.916971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:36.917024Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:36.930541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:36.930810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:36.930980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:36.940351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:36.940688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:36.941384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:36.941641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:36.944336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:36.944513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:36.945673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:36.945747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:36.945891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:36.945937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:36.945975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:36.946142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:37.066500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.067546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.067630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.067707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.067767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.067842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.067893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.067954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.068001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.068073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.068116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.068183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.068244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.068317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.068365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], version: 1 2026-01-07T17:33:48.764877Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 4 2026-01-07T17:33:48.764970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/2, is published: true 2026-01-07T17:33:48.765681Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697601 2026-01-07T17:33:48.765825Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72057594037968897 2026-01-07T17:33:48.765883Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2026-01-07T17:33:48.766261Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-07T17:33:48.766495Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type DataShard, boot OK, tablet id 72075186233409548 2026-01-07T17:33:48.766601Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697601 2026-01-07T17:33:48.766827Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1813: TOperation RegisterRelationByTabletId collision in routes has found, TxId# 281474976710758, partId# 0, prevPartId# 1, tablet# 72057594037968897, guessDefaultRootHive# yes, prevTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "1" CopyFromTable: "/MyRoot/Table2" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true, newTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "0" CopyFromTable: "/MyRoot/Table1" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true 2026-01-07T17:33:48.766878Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2026-01-07T17:33:48.767087Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-07T17:33:48.767140Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2026-01-07T17:33:48.767291Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-07T17:33:48.767360Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-07T17:33:48.767453Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-07T17:33:48.767553Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710758:1 2 -> 3 2026-01-07T17:33:48.768270Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-07T17:33:48.768435Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 3, type DataShard, boot OK, tablet id 72075186233409549 2026-01-07T17:33:48.768958Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-07T17:33:48.769005Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2026-01-07T17:33:48.769115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-07T17:33:48.769160Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-07T17:33:48.769222Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-07T17:33:48.769296Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710758:0 2 -> 3 2026-01-07T17:33:48.772945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:48.773201Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:48.773351Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:48.776718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:48.776949Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2026-01-07T17:33:48.777452Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-07T17:33:48.777851Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2026-01-07T17:33:48.777937Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:1 ProgressState at tablet# 72057594046678944 2026-01-07T17:33:48.778030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:1 Propose modify scheme on dstDatashard# 72075186233409548 idx# 72057594046678944:4 srcDatashard# 72075186233409547 idx# 72057594046678944:2 operationId# 281474976710758:1 seqNo# 2:3 at tablet# 72057594046678944 2026-01-07T17:33:48.779112Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-07T17:33:48.779165Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2026-01-07T17:33:48.779215Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409549 idx# 72057594046678944:3 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:4 at tablet# 72057594046678944 2026-01-07T17:33:48.788792Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:4 msg type: 269549568 2026-01-07T17:33:48.788993Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-01-07T17:33:48.789088Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409548 2026-01-07T17:33:48.789123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409547 2026-01-07T17:33:48.789354Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:3 msg type: 269549568 2026-01-07T17:33:48.789437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-01-07T17:33:48.789499Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409549 2026-01-07T17:33:48.789526Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> TExportToS3Tests::CorruptedDecimalValue >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:31.023859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:31.023947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:31.023987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:31.024017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:31.024048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:31.024083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:31.024153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:31.024213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:31.025013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:31.025280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:31.102557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:31.102600Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:31.117408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:31.117807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:31.117994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:31.124228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:31.124566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:31.125270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:31.125551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:31.127820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:31.128020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:31.128897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:31.128988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:31.129110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:31.129151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:31.129186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:31.129315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:31.272599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.273549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.273710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.273795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.273869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.273930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:31.274679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 102 at step: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000039 2026-01-07T17:33:50.361269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:50.361382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:50.361456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-07T17:33:50.361718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-07T17:33:50.361887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:50.373897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:50.373958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:33:50.374223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:50.374286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-07T17:33:50.374866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.374926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:33:50.375656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:50.375776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:33:50.375830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:33:50.375877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:33:50.375923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:50.376006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:33:50.378174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1197 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:50.378228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:50.378404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1197 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:33:50.378538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1197 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-07T17:33:50.379637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:50.379697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:50.379882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:50.379948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:50.380031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:50.380111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:50.380153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.380186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:50.380227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:33:50.389810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:33:50.389972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.390255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.390377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.390477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:33:50.390600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:50.390638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:50.390671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:50.390703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:50.390735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:33:50.390854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 102 2026-01-07T17:33:50.390905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:50.390939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:33:50.390969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:33:50.391117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:50.392818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:50.392867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1017:2969] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:50.393278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:50.393372Z node 1 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__conditional_erase.cpp:398: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2026-01-07T17:33:50.394971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:50.395127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:50.395182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.050500Z, at schemeshard: 72057594046678944 2026-01-07T17:33:50.395238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> HashShuffle::BackPressureWithSpillingLoad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2026-01-07T17:32:58.186746Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674017632309451:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:58.187141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:58.679765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:58.679879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:58.707575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:58.793786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:58.859759Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:58.889091Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674017632309408:2081] 1767807178166834 != 1767807178166837 2026-01-07T17:32:59.090070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:59.109079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:59.109122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:59.109137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:59.109232Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:59.205146Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:59.476979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:59.914001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:59.918783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:32:59.921089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:00.173721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.411413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.496235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.805340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-07T17:33:00.849201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.956803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.030509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.096262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.173758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.218127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.259125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.211946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674017632309451:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:03.224363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:03.231542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674039107147560:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.231656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674039107147570:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.231753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.237228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674039107147575:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.237326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.238491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:03.260533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2026-01-07T17:33:03.263090Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674039107147574:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:33:03.323896Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674039107147628:3110] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:04.055942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.089383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.180684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx ... [] execution duration: 25ms 2026-01-07T17:33:48.628942Z node 7 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:48.630736Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:48.630767Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 20ms 2026-01-07T17:33:48.631019Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592674233022172825:2445], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:48.631177Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:48.631227Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:33:48.631289Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:48.631345Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 21ms 2026-01-07T17:33:48.631816Z node 7 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:48.789918Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7592674233022172761:2436]: Pool not found 2026-01-07T17:33:48.790116Z node 7 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:33:49.095070Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7592674233022172789:2441]: Pool not found 2026-01-07T17:33:49.095337Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:33:49.098650Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592674237317140182:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:49.098730Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7592674237317140183:2459], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:33:49.098736Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:49.099538Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592674237317140186:2460], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:49.099618Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:49.420124Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7592674237317140180:2457]: Pool not found 2026-01-07T17:33:49.420429Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:33:49.597776Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:35096) incoming connection opened 2026-01-07T17:33:49.597855Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:35096) -> (POST /Root, 3 bytes) 2026-01-07T17:33:49.597999Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [1853:2ee9:447c:0:53:2ee9:447c:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: ff476129-277ab9ab-fcad0117-3ddb0dbe 2026-01-07T17:33:49.598215Z node 7 :HTTP_PROXY INFO: http_req.cpp:1602: http request [UnknownMethodName] requestId [ff476129-277ab9ab-fcad0117-3ddb0dbe] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2026-01-07T17:33:49.598331Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:35096) <- (400 InvalidAction, 76 bytes) 2026-01-07T17:33:49.598387Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:35096) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { } 2026-01-07T17:33:49.598424Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:35096) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: ff476129-277ab9ab-fcad0117-3ddb0dbe Content-Type: application/x-amz-json-1.1 Content-Length: 76 2026-01-07T17:33:49.598498Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:35096) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:04.330622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:04.330696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:04.330724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:04.330772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:04.330811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:04.330840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:04.330883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:04.330939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:04.331778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:04.332015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:04.418973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:04.419048Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:04.436028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:04.436452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:04.436701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:04.443491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:04.443845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:04.444654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:04.444945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:04.447732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:04.447940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:04.449194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:04.449270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:04.449389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:04.449442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:04.449487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:04.449691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:04.603447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.604585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.604717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.604813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.604882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:04.605959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... otify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-07T17:33:50.043368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-01-07T17:33:50.043575Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:33:50.044335Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000041 2026-01-07T17:33:50.044760Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:50.044947Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773230 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:50.045028Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000041, at schemeshard: 72057594046678944 2026-01-07T17:33:50.045199Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.045316Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-07T17:33:50.045367Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-07T17:33:50.045434Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-07T17:33:50.045486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-07T17:33:50.045568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:50.045667Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:33:50.045719Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-01-07T17:33:50.045789Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-07T17:33:50.045841Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-01-07T17:33:50.045888Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710758:0 2026-01-07T17:33:50.046048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:33:50.046141Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-01-07T17:33:50.046200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-07T17:33:50.046255Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2026-01-07T17:33:50.047246Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:50.049339Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:50.049390Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:50.049601Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:33:50.049788Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:50.049829Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-01-07T17:33:50.049874Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 40 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-01-07T17:33:50.051292Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:50.051385Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:50.051434Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-07T17:33:50.051494Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:33:50.051555Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:33:50.052053Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:50.052147Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:50.052179Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-07T17:33:50.052213Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-07T17:33:50.052250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:33:50.052347Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-01-07T17:33:50.052411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:130:2154] 2026-01-07T17:33:50.052899Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:33:50.052983Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:33:50.053097Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:50.054907Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:50.055920Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-07T17:33:50.056009Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-01-07T17:33:50.056080Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710758 2026-01-07T17:33:50.056141Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:50.056184Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2026-01-07T17:33:50.056225Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2026-01-07T17:33:50.056490Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:33:50.058781Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2026-01-07T17:33:50.059083Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:33:50.059154Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:33:50.059685Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:33:50.059803Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:33:50.059857Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:958:2915] TestWaitNotification: OK eventTxId 103 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:26.173478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:26.173582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.173623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:26.173670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:26.173705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:26.173735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:26.173809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.173877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:26.174693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:26.174931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:26.246805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:26.246850Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:26.260155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:26.260485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:26.260644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:26.266810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:26.267165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:26.267942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:26.268194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:26.270774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.270957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:26.272070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:26.272142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.272264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:26.272307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:26.272349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:26.272494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:26.436235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.437925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.438027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.438106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.438175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.438251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.438353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:50.805533Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:33:50.805568Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:33:50.805602Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:33:50.805640Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:33:50.805717Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:33:50.807617Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 872 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:50.807662Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:50.807797Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 872 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:33:50.807928Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 872 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-07T17:33:50.809431Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 77309414031 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:50.809482Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:50.809592Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 77309414031 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:50.809649Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:50.809736Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 717 RawX2: 77309414031 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:33:50.809809Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:50.809850Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.809892Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:50.809939Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:33:50.810955Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:50.813759Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:33:50.815700Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.815872Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.815963Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:50.816003Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:50.816112Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:50.816153Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:50.816200Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:50.816234Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:50.816278Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:33:50.816344Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:745:2723] message: TxId: 101 2026-01-07T17:33:50.816397Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:50.816444Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:50.816480Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:50.816622Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:50.821823Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:50.821865Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:746:2724] TestWaitNotification: OK eventTxId 101 2026-01-07T17:33:50.822245Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:50.822407Z node 18 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 187us result status StatusSuccess 2026-01-07T17:33:50.822960Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> HashShuffle::BackPressureWithSpillingLoad [GOOD] Test command err: Blocked 254975 time(s) emptyPops 0 Blocked 2895 time(s) emptyPops 0 |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TExportToS3Tests::AutoDropping ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2026-01-07T17:32:58.642026Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674016114475948:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:58.642094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:58.710899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:59.228774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:59.228948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:59.233992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:59.242900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:59.255944Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:59.256141Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674016114475906:2081] 1767807178639138 != 1767807178639141 2026-01-07T17:32:59.498893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:59.498924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:59.498932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:59.499025Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:59.538446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:59.653057Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:59.804328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:59.811316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:00.065827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:33:00.078336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:00.110017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:33:00.179721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.460552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.697597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-07T17:33:00.712758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.790842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-07T17:33:00.800175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.844543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.882423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.940639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.004473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.072188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.130610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.039141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674037589314055:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.039645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674037589314044:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.039718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.042785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674037589314059:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.042865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.046439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:03.068130Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674037589314058:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:33:03.136075Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674037589314111:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:03.535726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.581571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.614268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/sc ... 26-01-07T17:33:50.270010Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [280aef4b-97759704-b75f7318-9244a8d5] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "280aef4b-97759704-b75f7318-9244a8d5" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "280aef4b-97759704-b75f7318-9244a8d5" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-07T17:33:50.270703Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [DeleteMessageBatch] requestId [280aef4b-97759704-b75f7318-9244a8d5] Got succesfult GRPC response. 2026-01-07T17:33:50.270837Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessageBatch] requestId [280aef4b-97759704-b75f7318-9244a8d5] reply ok 2026-01-07T17:33:50.270978Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [DeleteMessageBatch] requestId [280aef4b-97759704-b75f7318-9244a8d5] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 197 SourceAddress: 9849:2d3e:127c:0:8049:2d3e:127c:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2026-01-07T17:33:50.271226Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:43280) <- (200 , 44 bytes) 2026-01-07T17:33:50.271342Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:43280) connection closed 2026-01-07T17:33:50.272394Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:43282) incoming connection opened 2026-01-07T17:33:50.272515Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:43282) -> (POST /Root, 106 bytes) 2026-01-07T17:33:50.272665Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b8bd:4e3d:127c:0:a0bd:4e3d:127c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 7462093b-1098dabd-35c19e07-221e8cfe 2026-01-07T17:33:50.273103Z node 7 :HTTP_PROXY INFO: http_req.cpp:543: http request [ReceiveMessage] requestId [7462093b-1098dabd-35c19e07-221e8cfe] got new request from [b8bd:4e3d:127c:0:a0bd:4e3d:127c:0] 2026-01-07T17:33:50.273520Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:477: http request [ReceiveMessage] requestId [7462093b-1098dabd-35c19e07-221e8cfe] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2026-01-07T17:33:50.273538Z node 7 :HTTP_PROXY INFO: http_req.cpp:301: http request [ReceiveMessage] requestId [7462093b-1098dabd-35c19e07-221e8cfe] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:33:50.273674Z node 7 :SQS DEBUG: ymq_proxy.cpp:148: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 7462093b-1098dabd-35c19e07-221e8cfe 2026-01-07T17:33:50.273776Z node 7 :SQS DEBUG: proxy_actor.cpp:263: Request [7462093b-1098dabd-35c19e07-221e8cfe] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2026-01-07T17:33:50.273796Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [7462093b-1098dabd-35c19e07-221e8cfe] Request proxy started 2026-01-07T17:33:50.273901Z node 7 :SQS DEBUG: service.cpp:761: Request [7462093b-1098dabd-35c19e07-221e8cfe] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2026-01-07T17:33:50.274013Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [7462093b-1098dabd-35c19e07-221e8cfe] Get configuration duration: 0ms 2026-01-07T17:33:50.274122Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [7462093b-1098dabd-35c19e07-221e8cfe] Send get leader node request to sqs service for cloud4/000000000000000101v0 2026-01-07T17:33:50.274147Z node 7 :SQS DEBUG: service.cpp:581: Request [7462093b-1098dabd-35c19e07-221e8cfe] Leader node for queue [cloud4/000000000000000101v0] is 7 2026-01-07T17:33:50.274168Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [7462093b-1098dabd-35c19e07-221e8cfe] Got leader node for queue response. Node id: 7. Status: 0 2026-01-07T17:33:50.274275Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [7462093b-1098dabd-35c19e07-221e8cfe] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" 2026-01-07T17:33:50.274349Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [7462093b-1098dabd-35c19e07-221e8cfe] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" 2026-01-07T17:33:50.274406Z node 7 :SQS DEBUG: action.h:133: Request [7462093b-1098dabd-35c19e07-221e8cfe] Request started. Actor: [7:7592674238753706626:3939] 2026-01-07T17:33:50.275066Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7592674238753706626:3939] 2026-01-07T17:33:50.275084Z node 7 :SQS DEBUG: service.cpp:754: Request [7462093b-1098dabd-35c19e07-221e8cfe] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2026-01-07T17:33:50.275309Z node 7 :SQS DEBUG: action.h:627: Request [7462093b-1098dabd-35c19e07-221e8cfe] Get configuration duration: 1ms 2026-01-07T17:33:50.275317Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976715712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2026-01-07T17:33:50.275326Z node 7 :SQS TRACE: action.h:647: Request [7462093b-1098dabd-35c19e07-221e8cfe] Got configuration. Root url: http://ghrun-z2c6jslqle.auto.internal:8771, Shards: 4, Fail: 0 2026-01-07T17:33:50.275337Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 6ms 2026-01-07T17:33:50.275352Z node 7 :SQS TRACE: action.h:662: Request [7462093b-1098dabd-35c19e07-221e8cfe] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2026-01-07T17:33:50.275366Z node 7 :SQS TRACE: action.h:427: Request [7462093b-1098dabd-35c19e07-221e8cfe] DoRoutine 2026-01-07T17:33:50.275410Z node 7 :SQS TRACE: queue_leader.cpp:2426: Increment active message requests for [cloud4/000000000000000101v0/3]. ActiveMessageRequests: 1 2026-01-07T17:33:50.275425Z node 7 :SQS DEBUG: queue_leader.cpp:938: Request [7462093b-1098dabd-35c19e07-221e8cfe] Received empty result from shard 3 infly. Infly capacity: 0. Messages count: 0 2026-01-07T17:33:50.275439Z node 7 :SQS DEBUG: queue_leader.cpp:1164: Request [7462093b-1098dabd-35c19e07-221e8cfe] No known messages in this shard. Skip attempt to add messages to infly 2026-01-07T17:33:50.275447Z node 7 :SQS DEBUG: queue_leader.cpp:1170: Request [7462093b-1098dabd-35c19e07-221e8cfe] Already tried to add messages to infly 2026-01-07T17:33:50.275483Z node 7 :SQS TRACE: queue_leader.cpp:2436: Decrement active message requests for [[cloud4/000000000000000101v0/3]. ActiveMessageRequests: 0 2026-01-07T17:33:50.275496Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976715712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2026-01-07T17:33:50.275525Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2026-01-07T17:33:50.275560Z node 7 :SQS TRACE: action.h:264: Request [7462093b-1098dabd-35c19e07-221e8cfe] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" } } 2026-01-07T17:33:50.275599Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 7ms 2026-01-07T17:33:50.275626Z node 7 :SQS TRACE: proxy_service.h:35: Request [7462093b-1098dabd-35c19e07-221e8cfe] Sending sqs response: { ReceiveMessage { RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" } RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-07T17:33:50.275722Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request ReceiveMessage working duration: 1ms 2026-01-07T17:33:50.275734Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ReceiveMessage { RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" } RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-01-07T17:33:50.275766Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [] Sending executed reply 2026-01-07T17:33:50.275784Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7592674238753706625:2544]: ReceiveMessage { RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" } RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-01-07T17:33:50.275834Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7592674238753706626:3939]. Found: 1 2026-01-07T17:33:50.275861Z node 7 :SQS DEBUG: queue_leader.cpp:1915: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/2] 2026-01-07T17:33:50.275924Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [7462093b-1098dabd-35c19e07-221e8cfe] HandleResponse: { ReceiveMessage { RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" } RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2026-01-07T17:33:50.275993Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [7462093b-1098dabd-35c19e07-221e8cfe] Sending reply from proxy actor: { ReceiveMessage { RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" } RequestId: "7462093b-1098dabd-35c19e07-221e8cfe" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-07T17:33:50.276564Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ReceiveMessage] requestId [7462093b-1098dabd-35c19e07-221e8cfe] Got succesfult GRPC response. 2026-01-07T17:33:50.276615Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [7462093b-1098dabd-35c19e07-221e8cfe] reply ok 2026-01-07T17:33:50.276715Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ReceiveMessage] requestId [7462093b-1098dabd-35c19e07-221e8cfe] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 154 SourceAddress: b8bd:4e3d:127c:0:a0bd:4e3d:127c:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2026-01-07T17:33:50.276795Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:43282) <- (200 , 2 bytes) Http output full {} 2026-01-07T17:33:50.276889Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:43282) connection closed |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> TExportToS3Tests::CorruptedDecimalValue [GOOD] >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> TExportToS3Tests::IndexMaterializationTwoTables [GOOD] >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] >> TAuditTest::OtherGetRequestsAreAudited [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink >> TKeyValueTracingTest::ReadHuge |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::OtherGetRequestsAreAudited [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] >> TKeyValueTracingTest::WriteSmall >> TestKinesisHttpProxy::GoodRequestCreateStream |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TKeyValueTracingTest::WriteHuge >> TExportToS3Tests::AutoDropping [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Binary >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap >> KqpResultSetFormats::ArrowFormat_Types_List_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Tuple >> TAuditTest::AuditDisabledWithoutAppData [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDecimalValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:09.835494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:09.835575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:09.835605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:09.835632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:09.835664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:09.835700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:09.835773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:09.835871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:09.836749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:09.837087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:09.926972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:09.927021Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:09.939483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:09.939857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:09.940064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:09.945894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:09.946158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:09.946713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:09.946936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:09.948984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:09.949173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:09.950334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:09.950436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:09.950569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:09.950622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:09.950669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:09.950879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:10.092617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.093660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.093788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.093888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.093958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:10.094810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... y msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2026-01-07T17:33:53.075082Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000041 2026-01-07T17:33:53.076227Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:53.076344Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773230 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:53.076401Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000041, at schemeshard: 72057594046678944 2026-01-07T17:33:53.076546Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710759:0 128 -> 129 2026-01-07T17:33:53.076711Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000041 REQUEST: PUT /Backup1/metadata.json HTTP/1.1 HEADERS: Host: localhost:30175 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CAE7CFC2-7393-4CEF-8C6A-9EE74D8160E3 amz-sdk-request: attempt=1 content-length: 119 content-md5: oX2vUQoa+8sc65ouCpRQqg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /Backup1/metadata.json / / 119 2026-01-07T17:33:53.125359Z node 7 :DATASHARD_BACKUP ERROR: export_scan.cpp:208: [Export] [scanner] Error read data from table: Invalid Decimal binary representation 2026-01-07T17:33:53.125894Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:53.125944Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:33:53.126211Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:53.126250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 40 FAKE_COORDINATOR: Erasing txId 281474976710759 2026-01-07T17:33:53.137533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-07T17:33:53.137635Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:33:53.138618Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2026-01-07T17:33:53.138723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2026-01-07T17:33:53.138759Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2026-01-07T17:33:53.138799Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-07T17:33:53.138840Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-07T17:33:53.138938Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2026-01-07T17:33:53.147627Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2026-01-07T17:33:53.160545Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 858 RawX2: 30064773896 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-07T17:33:53.160626Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:53.160773Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 858 RawX2: 30064773896 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-07T17:33:53.160916Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 858 RawX2: 30064773896 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-07T17:33:53.161033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:53.161081Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-07T17:33:53.161129Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:53.161185Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710759:0 129 -> 240 2026-01-07T17:33:53.161400Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:53.164133Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-07T17:33:53.164494Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-07T17:33:53.164548Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2026-01-07T17:33:53.164714Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2026-01-07T17:33:53.164751Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-07T17:33:53.164796Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2026-01-07T17:33:53.164828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-07T17:33:53.164865Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2026-01-07T17:33:53.164957Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:130:2154] message: TxId: 281474976710759 2026-01-07T17:33:53.165014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-07T17:33:53.165062Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710759:0 2026-01-07T17:33:53.165097Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710759:0 2026-01-07T17:33:53.165225Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:33:53.167741Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2026-01-07T17:33:53.167823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710759 2026-01-07T17:33:53.168053Z node 7 :EXPORT NOTICE: schemeshard_export__create.cpp:729: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table1' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38] SourcePathType: EPathTypeTable State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid Decimal binary representation' } 2026-01-07T17:33:53.170242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:53.170310Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:888:2846] TestWaitNotification: OK eventTxId 102 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:30:29.835807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:30:29.835902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:29.835946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:30:29.836000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:30:29.836043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:30:29.836070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:30:29.836148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:30:29.842244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:30:29.843384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:30:29.843804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:30:29.971963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:30:29.972018Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:29.994493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:30:29.995015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:30:29.995202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:30:30.025272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:30:30.025548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:30:30.026235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:30:30.027152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:30:30.035019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:30.035237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:30:30.036612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:30:30.036688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:30:30.036831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:30:30.036881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:30:30.036939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:30:30.037109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:30:30.320271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.321568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.321716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.321824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.321932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:30:30.322784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0 TxId: 281474976725763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2026-01-07T17:33:53.461316Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking 2026-01-07T17:33:53.461458Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1767:3616], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-07T17:33:53.461982Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976725763:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725763 msg type: 269090816 2026-01-07T17:33:53.462102Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976725763, partId: 4294967295, tablet: 72075186233409550 2026-01-07T17:33:53.462267Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976725763, at schemeshard: 72075186233409549 2026-01-07T17:33:53.462304Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725763, ready parts: 0/1, is published: true 2026-01-07T17:33:53.462337Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976725763, at schemeshard: 72075186233409549 2026-01-07T17:33:53.476200Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 20750, transactions count in step: 1, at schemeshard: 72075186233409549 2026-01-07T17:33:53.476353Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725763 AckTo { RawX1: 0 RawX2: 0 } } Step: 20750 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2026-01-07T17:33:53.476428Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409549] TDropLock TPropose opId# 281474976725763:0 HandleReply TEvOperationPlan: step# 20750 2026-01-07T17:33:53.476496Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976725763:0 128 -> 240 2026-01-07T17:33:53.480265Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976725763:0, at schemeshard: 72075186233409549 2026-01-07T17:33:53.480332Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976725763:0 ProgressState 2026-01-07T17:33:53.480417Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725763:0 progress is 1/1 2026-01-07T17:33:53.480446Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-07T17:33:53.480483Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725763:0 progress is 1/1 2026-01-07T17:33:53.480512Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-07T17:33:53.480568Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725763, ready parts: 1/1, is published: true 2026-01-07T17:33:53.480643Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:1007:2947] message: TxId: 281474976725763 2026-01-07T17:33:53.480692Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-07T17:33:53.480740Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725763:0 2026-01-07T17:33:53.480771Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725763:0 2026-01-07T17:33:53.480854Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 6 2026-01-07T17:33:53.485077Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725763 2026-01-07T17:33:53.485165Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725763 2026-01-07T17:33:53.485238Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 109, txId# 281474976725763 2026-01-07T17:33:53.485372Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1767:3616], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000}, txId# 281474976725763 2026-01-07T17:33:53.488064Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking 2026-01-07T17:33:53.488205Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1767:3616], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-07T17:33:53.488294Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:33:53.490310Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Done 2026-01-07T17:33:53.490445Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Done TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1767:3616], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-07T17:33:53.490509Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 109, subscribers count# 1 2026-01-07T17:33:53.490730Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-07T17:33:53.490784Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [5:3235:4977] TestWaitNotification: OK eventTxId 109 2026-01-07T17:33:53.492212Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=109&Page=BuildIndexInfo 2026-01-07T17:33:53.492327Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=109&Page=BuildIndexInfo quantity: 311, 311 unit: "request_unit", "request_unit" type: "delta", "delta" ... unblocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering 2026-01-07T17:33:53.495481Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:338: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson 2026-01-07T17:33:53.496021Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=109&Page=BuildIndexInfo 2026-01-07T17:33:53.496115Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=109&Page=BuildIndexInfo |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::IndexMaterializationTwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:37.336626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:37.336725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:37.336778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:37.336823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:37.336866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:37.336921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:37.337005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:37.337081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:37.338005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:37.338331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:37.425927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:37.425984Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:37.446063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:37.446451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:37.446636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:37.455108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:37.455500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:37.456284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:37.456564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:37.460241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:37.460465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:37.461754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:37.461834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:37.461955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:37.462004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:37.462056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:37.462236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:37.602263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:37.603979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... e publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:33:53.592953Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.593087Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.593116Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-07T17:33:53.593143Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 15 2026-01-07T17:33:53.593174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 4 2026-01-07T17:33:53.593252Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2026-01-07T17:33:53.595448Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710765, at schemeshard: 72057594046678944 2026-01-07T17:33:53.595500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2026-01-07T17:33:53.595537Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710765, at schemeshard: 72057594046678944 2026-01-07T17:33:53.597496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2026-01-07T17:33:53.597598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:33:53.597773Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000048 FAKE_COORDINATOR: advance: minStep5000048 State->FrontStep: 5000047 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000048 2026-01-07T17:33:53.597942Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000048, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:53.598019Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773230 } } Step: 5000048 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:53.598053Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710765:0, step: 5000048, at schemeshard: 72057594046678944 2026-01-07T17:33:53.598195Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710765:0, at schemeshard: 72057594046678944 2026-01-07T17:33:53.598273Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710765:0 progress is 1/1 2026-01-07T17:33:53.598326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-07T17:33:53.598393Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710765:0 progress is 1/1 2026-01-07T17:33:53.598432Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-07T17:33:53.598496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:53.598582Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-07T17:33:53.598614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: false 2026-01-07T17:33:53.598658Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-07T17:33:53.598692Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710765:0 2026-01-07T17:33:53.598724Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710765:0 2026-01-07T17:33:53.598782Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 4 2026-01-07T17:33:53.598818Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710765, publications: 2, subscribers: 1 2026-01-07T17:33:53.598860Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710765, [OwnerId: 72057594046678944, LocalPathId: 1], 18 2026-01-07T17:33:53.599138Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710765, [OwnerId: 72057594046678944, LocalPathId: 44], 18446744073709551615 2026-01-07T17:33:53.600843Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.602401Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:53.602434Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:53.602557Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 44] 2026-01-07T17:33:53.602717Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:53.602752Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 1 2026-01-07T17:33:53.602780Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 44 FAKE_COORDINATOR: Erasing txId 281474976710765 2026-01-07T17:33:53.603431Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.603523Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.603564Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-07T17:33:53.603618Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2026-01-07T17:33:53.603672Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:33:53.604154Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.604270Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.604313Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-07T17:33:53.604353Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 18446744073709551615 2026-01-07T17:33:53.604382Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-07T17:33:53.604454Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710765, subscribers: 1 2026-01-07T17:33:53.604519Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:130:2154] 2026-01-07T17:33:53.607551Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.608423Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-07T17:33:53.608513Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2026-01-07T17:33:53.608574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710765 2026-01-07T17:33:53.610445Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:33:53.610507Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:1230:3121] TestWaitNotification: OK eventTxId 103 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TestYmqHttpProxy::TestListQueueTags >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2026-01-07T17:32:59.202099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674020903614988:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:59.202134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:59.696802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:59.697071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:59.705560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:59.877034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:59.989308Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:00.029779Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:00.203478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:00.203498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:00.203504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:00.203578Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:00.233188Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:00.528830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:00.595037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:00.984369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:33:00.989539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:33:00.994761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:01.011522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:33:01.083105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.220365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.274764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.341348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.389031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.430114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.472868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.519257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.575160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.656991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.309298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674038083485781:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.309403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.309866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674038083485793:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.309912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674038083485794:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.310030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.314387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:03.329235Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674038083485797:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:33:03.426264Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674038083485848:3106] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:03.886914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.942452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.026727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.073210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.106218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... ba3a811] Proxy actor: used user_name='cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2026-01-07T17:33:53.645289Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [79c6fe11-a525bf48-c38da1-aba3a811] Request proxy started 2026-01-07T17:33:53.645359Z node 7 :SQS DEBUG: service.cpp:761: Request [79c6fe11-a525bf48-c38da1-aba3a811] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2026-01-07T17:33:53.645429Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [79c6fe11-a525bf48-c38da1-aba3a811] Get configuration duration: 0ms 2026-01-07T17:33:53.645567Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [79c6fe11-a525bf48-c38da1-aba3a811] Send get leader node request to sqs service for cloud4/000000000000000301v0 2026-01-07T17:33:53.645599Z node 7 :SQS DEBUG: service.cpp:581: Request [79c6fe11-a525bf48-c38da1-aba3a811] Leader node for queue [cloud4/000000000000000301v0] is 7 2026-01-07T17:33:53.645621Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [79c6fe11-a525bf48-c38da1-aba3a811] Got leader node for queue response. Node id: 7. Status: 0 2026-01-07T17:33:53.645766Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [79c6fe11-a525bf48-c38da1-aba3a811] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" 2026-01-07T17:33:53.645865Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [79c6fe11-a525bf48-c38da1-aba3a811] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" 2026-01-07T17:33:53.645934Z node 7 :SQS DEBUG: action.h:133: Request [79c6fe11-a525bf48-c38da1-aba3a811] Request started. Actor: [7:7592674254727578414:5603] 2026-01-07T17:33:53.645972Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7592674254727578414:5603] 2026-01-07T17:33:53.645993Z node 7 :SQS DEBUG: service.cpp:754: Request [79c6fe11-a525bf48-c38da1-aba3a811] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2026-01-07T17:33:53.651330Z node 7 :SQS TRACE: executor.cpp:286: Request [ab64e8e-815e225e-46a0b36a-ff04569a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976715923 Step: 1767807233696 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k3\":\"v\"}" } } } } } 2026-01-07T17:33:53.651376Z node 7 :SQS DEBUG: executor.cpp:287: Request [ab64e8e-815e225e-46a0b36a-ff04569a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 12ms 2026-01-07T17:33:53.651878Z node 7 :SQS TRACE: executor.cpp:325: Request [ab64e8e-815e225e-46a0b36a-ff04569a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715923 Step: 1767807233696 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k3\":\"v\"}" } } } } } 2026-01-07T17:33:53.652012Z node 7 :SQS TRACE: executor.cpp:327: Request [ab64e8e-815e225e-46a0b36a-ff04569a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{\"k3\":\"v\"}"} 2026-01-07T17:33:53.652195Z node 7 :SQS DEBUG: executor.cpp:401: Request [ab64e8e-815e225e-46a0b36a-ff04569a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 14ms 2026-01-07T17:33:53.652465Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [ab64e8e-815e225e-46a0b36a-ff04569a] Sending executed reply 2026-01-07T17:33:53.652865Z node 7 :SQS DEBUG: action.h:627: Request [79c6fe11-a525bf48-c38da1-aba3a811] Get configuration duration: 7ms 2026-01-07T17:33:53.652888Z node 7 :SQS TRACE: action.h:647: Request [79c6fe11-a525bf48-c38da1-aba3a811] Got configuration. Root url: http://ghrun-z2c6jslqle.auto.internal:8771, Shards: 1, Fail: 0 2026-01-07T17:33:53.652914Z node 7 :SQS TRACE: action.h:427: Request [79c6fe11-a525bf48-c38da1-aba3a811] DoRoutine 2026-01-07T17:33:53.653062Z node 7 :SQS TRACE: action.h:264: Request [79c6fe11-a525bf48-c38da1-aba3a811] SendReplyAndDie from action actor { ListQueueTags { RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" Tags { Key: "k3" Value: "v" } } } 2026-01-07T17:33:53.653196Z node 7 :SQS TRACE: proxy_service.h:35: Request [79c6fe11-a525bf48-c38da1-aba3a811] Sending sqs response: { ListQueueTags { RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" Tags { Key: "k3" Value: "v" } } RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k3" Value: "v" } } 2026-01-07T17:33:53.653324Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7592674254727578414:5603]. Found: 1 2026-01-07T17:33:53.653430Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ListQueueTags { RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" Tags { Key: "k3" Value: "v" } } RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k3" Value: "v" } 2026-01-07T17:33:53.653520Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7592674254727578413:2768]: ListQueueTags { RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" Tags { Key: "k3" Value: "v" } } RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k3" Value: "v" } 2026-01-07T17:33:53.653690Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [79c6fe11-a525bf48-c38da1-aba3a811] HandleResponse: { ListQueueTags { RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" Tags { Key: "k3" Value: "v" } } RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k3" Value: "v" } }, status: OK 2026-01-07T17:33:53.653774Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [79c6fe11-a525bf48-c38da1-aba3a811] Sending reply from proxy actor: { ListQueueTags { RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" Tags { Key: "k3" Value: "v" } } RequestId: "79c6fe11-a525bf48-c38da1-aba3a811" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k3" Value: "v" } } 2026-01-07T17:33:53.653992Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ListQueueTags] requestId [79c6fe11-a525bf48-c38da1-aba3a811] Got succesfult GRPC response. 2026-01-07T17:33:53.654115Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListQueueTags] requestId [79c6fe11-a525bf48-c38da1-aba3a811] reply ok 2026-01-07T17:33:53.654244Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ListQueueTags] requestId [79c6fe11-a525bf48-c38da1-aba3a811] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 170 SourceAddress: b8b8:8f14:ae7b:0:a0b8:8f14:ae7b:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2026-01-07T17:33:53.654345Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:34048) <- (200 , 19 bytes) 2026-01-07T17:33:53.654466Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:34048) connection closed Http output full {"Tags":{"k3":"v"}} |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::AuditDisabledWithoutAppData [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] >> TUrlMatcherTest::MatchRecursive [GOOD] >> TKeyValueTracingTest::WriteSmall [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TKeyValueTracingTest::WriteHuge [GOOD] >> TSchemeShardTTLTests::CheckCounters [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:32:58.841710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:32:58.841797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:58.841837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:32:58.841869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:32:58.841915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:32:58.841964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:32:58.842031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:32:58.842104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:32:58.843047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:32:58.843342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:32:58.927332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:32:58.927413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:58.941865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:32:58.942121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:32:58.942250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:32:58.946722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:32:58.947067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:32:58.947606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:32:58.947786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:32:58.949918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:58.950057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:32:58.950898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:32:58.950954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:32:58.951040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:32:58.951073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:32:58.951128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:32:58.951273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:32:59.273414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.279636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.279832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.279976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:32:59.280816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... de 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-07T17:33:54.813932Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:54.813964Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-07T17:33:54.813996Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2026-01-07T17:33:54.815525Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-07T17:33:54.815604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:54.815652Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:885:2843] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:54.816709Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:54.816945Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 257us result status StatusSuccess 2026-01-07T17:33:54.817560Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 2 2026-01-07T17:33:54.818214Z node 7 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-07T17:33:54.820331Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:54.820385Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:817: TExport::TTxProgress: Resume: id# 102 2026-01-07T17:33:54.820464Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:598: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-07T17:33:54.820553Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-07T17:33:54.820663Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2026-01-07T17:33:54.820744Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:54.820793Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:938: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2026-01-07T17:33:54.820873Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:590: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2026-01-07T17:33:54.821006Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-07T17:33:54.823915Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2026-01-07T17:33:54.824135Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.824324Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:54.829895Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:54.830164Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-102 2026-01-07T17:33:54.830313Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-07T17:33:54.830419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2026-01-07T17:33:54.830488Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:54.830530Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:999: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-07T17:33:54.830594Z node 7 :EXPORT TRACE: schemeshard_export__create.cpp:1000: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2026-01-07T17:33:54.830701Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:1181: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2026-01-07T17:33:54.832344Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-07T17:33:54.832461Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:54.832547Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-07T17:33:54.832606Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-07T17:33:54.832655Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-07T17:33:54.832694Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-07T17:33:54.832730Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2026-01-07T17:33:54.834280Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2026-01-07T17:33:54.834482Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:33:54.834526Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:33:54.834920Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:33:54.835027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:54.835065Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:1106:3059] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:26.044595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:26.044696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.044747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:26.044787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:26.044824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:26.044869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:26.044974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.045041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:26.045876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:26.046131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:26.130156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:26.130203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:26.143625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:26.143862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:26.143987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:26.149474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:26.149784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:26.150414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:26.150634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:26.153178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.153375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:26.154502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:26.154579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.154690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:26.154730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:26.154769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:26.154919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:26.304634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.305646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.305767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.305857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.305942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.306777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944 2026-01-07T17:33:54.370315Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.370420Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.370896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.371134Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.373557Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.373669Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.373761Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.373927Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.374187Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.374555Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.374714Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.374914Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.375061Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.375149Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.375252Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.375347Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.375430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.375537Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:33:54.375600Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:33:54.375735Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:54.375785Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:54.375836Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:33:54.375870Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:54.375914Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:33:54.375988Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:3113:4331] message: TxId: 101 2026-01-07T17:33:54.376049Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:33:54.376131Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:33:54.376170Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:33:54.377746Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-07T17:33:54.381148Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:33:54.381212Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:3114:4332] TestWaitNotification: OK eventTxId 101 2026-01-07T17:33:54.381775Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:54.382075Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 335us result status StatusSuccess 2026-01-07T17:33:54.382756Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TUrlMatcherTest::MatchExactPathOnly [GOOD] >> TKeyValueTracingTest::ReadSmall [GOOD] >> TAuditTest::DeniedPathsAreNotAudited [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchRecursive [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchExactPathOnly [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::DeniedPathsAreNotAudited [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-01-07T17:33:27.526121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:27.526193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:27.526236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:27.526268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:27.526298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:27.526322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:27.526382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:27.526446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:27.527224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:27.527546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:27.596619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:27.596655Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:27.608491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:27.608729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:27.608869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:27.620797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:27.621100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:27.621921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:27.622349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:27.625704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:27.625902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:27.627094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:27.627162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:27.627296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:27.627368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:27.627418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:27.627595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.779372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.780983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.781048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.781146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.781217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.781315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.781378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... tifyTxCompletionResult 2026-01-07T17:33:55.896056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1747:3647] 2026-01-07T17:33:55.896125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-07T17:33:56.030550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2026-01-07T17:33:56.031199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2026-01-07T17:33:56.031342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-07T17:33:56.031409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:56.031510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-07T17:33:56.032202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-07T17:33:56.032250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:56.032306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-07T17:33:56.165698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-07T17:33:56.165781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:56.165880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:56.166023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409549, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1767820498622395 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-07T17:33:56.166157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409548, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1767820498622395 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-07T17:33:56.166686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-07T17:33:56.167203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:33:56.168170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:56.168225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-07T17:33:56.168494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-07T17:33:56.168527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:33:56.175798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:56.175972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:56.176027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2026-01-07T22:14:58.622395Z, at schemeshard: 72057594046678944 2026-01-07T17:33:56.176098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-07T17:33:56.176152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:56.176253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:33:56.176297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2026-01-07T22:14:58.622395Z, at schemeshard: 72057594046678944 2026-01-07T17:33:56.176338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:33:56.200296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:33:56.265541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-07T17:33:56.265726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-07T17:33:56.265811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-07T17:33:56.265878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:56.265992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-07T17:33:56.266174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-07T17:33:56.266217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:56.266275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-07T17:33:56.298515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:33:56.356285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-07T17:33:56.356451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-07T17:33:56.356518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-07T17:33:56.356580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:56.356697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-07T17:33:56.356942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-07T17:33:56.356996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:33:56.357053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2026-01-07T17:33:01.663707Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674031447451770:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:01.663779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:02.133017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:02.161463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:02.161576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:02.266042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:02.375995Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:02.378663Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674031447451732:2081] 1767807181657793 != 1767807181657796 2026-01-07T17:33:02.401068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:02.565391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:02.565414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:02.565426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:02.565530Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:02.685540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:02.867042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:02.875277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:03.165474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:33:03.175554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:33:03.177664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:03.208080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:33:03.297838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.591703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.674582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.747596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-07T17:33:03.752613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.796839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.832511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.873962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.921845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.972721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.008911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:05.921491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674048627322573:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.921568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674048627322585:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.921587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.922008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674048627322587:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.922074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.926381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:05.940900Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674048627322588:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:33:06.033627Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674052922289936:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:06.493883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:06.593506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:06.664164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674031447451770:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:06.664263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:06.677485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but pr ... Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2026-01-07T17:33:55.502406Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request SendMessageBatch working duration: 86ms 2026-01-07T17:33:55.502561Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse SendMessageBatch { RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "cd71fdae-5e9e3ef9-f3a3a444-54c7644a" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "d68d248c-5314d996-bb92131b-f8be3914" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2026-01-07T17:33:55.502699Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7592674263904241135:2498]: SendMessageBatch { RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "cd71fdae-5e9e3ef9-f3a3a444-54c7644a" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "d68d248c-5314d996-bb92131b-f8be3914" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2026-01-07T17:33:55.502768Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7592674263904241141:3742]. Found: 1 2026-01-07T17:33:55.503423Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [340ce3fb-6dbf411c-e4feed9f-443553f7] HandleResponse: { SendMessageBatch { RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "cd71fdae-5e9e3ef9-f3a3a444-54c7644a" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "d68d248c-5314d996-bb92131b-f8be3914" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2026-01-07T17:33:55.503612Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [340ce3fb-6dbf411c-e4feed9f-443553f7] Sending reply from proxy actor: { SendMessageBatch { RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "cd71fdae-5e9e3ef9-f3a3a444-54c7644a" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "d68d248c-5314d996-bb92131b-f8be3914" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "340ce3fb-6dbf411c-e4feed9f-443553f7" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"cd71fdae-5e9e3ef9-f3a3a444-54c7644a"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"d68d248c-5314d996-bb92131b-f8be3914"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} 2026-01-07T17:33:55.503988Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [SendMessageBatch] requestId [340ce3fb-6dbf411c-e4feed9f-443553f7] Got succesfult GRPC response. 2026-01-07T17:33:55.504229Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [SendMessageBatch] requestId [340ce3fb-6dbf411c-e4feed9f-443553f7] reply ok 2026-01-07T17:33:55.504388Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [SendMessageBatch] requestId [340ce3fb-6dbf411c-e4feed9f-443553f7] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 619 SourceAddress: 38ad:102:ae7b:0:20ad:102:ae7b:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2026-01-07T17:33:55.504574Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:37890) <- (200 , 465 bytes) 2026-01-07T17:33:55.504695Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:37890) connection closed 2026-01-07T17:33:55.507654Z node 7 :SQS TRACE: executor.cpp:256: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2026-01-07T17:33:55.507700Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 6ms 2026-01-07T17:33:55.507876Z node 7 :SQS DEBUG: queue_leader.cpp:464: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2026-01-07T17:33:55.507889Z node 7 :SQS DEBUG: queue_leader.cpp:514: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2026-01-07T17:33:55.507976Z node 7 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2026-01-07T17:33:55.508049Z node 7 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2026-01-07T17:33:55.508395Z node 7 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2026-01-07T17:32:58.525748Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674019408551287:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:58.526115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:58.553150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:58.891660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:58.896951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:58.897057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:58.961601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:59.035569Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:59.107313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:59.107343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:59.107351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:59.107438Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:59.122346Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:59.400593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:59.421271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:59.526207Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:59.738328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:59.746072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:32:59.758023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:32:59.793855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:32:59.888430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.209367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.306816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-07T17:33:00.317400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.432022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-07T17:33:00.437329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.516279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.631778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.743117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.811583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.861723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.927841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.050628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674040883389283:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.050744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.051944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674040883389294:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.052068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674040883389296:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.052123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.065603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:03.080883Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674040883389299:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:33:03.186903Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674040883389350:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:03.500278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674019408551287:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:03.500347Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:03.839120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.877668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: ( ... em { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:55.187748Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:33:55.187886Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 26ms 2026-01-07T17:33:55.188583Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:55.188811Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:55.190537Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:55.190564Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 26ms 2026-01-07T17:33:55.190818Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:55.190855Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-07T17:33:55.190960Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 26ms 2026-01-07T17:33:55.191296Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:55.361600Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7592674260934444600:2440]: Pool not found 2026-01-07T17:33:55.361790Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:33:55.778576Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7592674260934444588:2435]: Pool not found 2026-01-07T17:33:55.778970Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:33:55.782773Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7592674260934444714:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:55.782794Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7592674260934444715:2459], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:33:55.782864Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:55.783337Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7592674260934444718:2460], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:55.783383Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:56.063553Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7592674260934444712:2457]: Pool not found 2026-01-07T17:33:56.063855Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:33:56.157274Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:41166) incoming connection opened 2026-01-07T17:33:56.157363Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:41166) -> (POST /, 87 bytes) 2026-01-07T17:33:56.157567Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [5809:78c2:187c:0:4009:78c2:187c:0] request [CreateStream] url [/] database [] requestId: 10b67456-d990e324-453da2f-bf9fcbd 2026-01-07T17:33:56.158251Z node 8 :HTTP_PROXY WARN: http_req.cpp:971: http request [CreateStream] requestId [10b67456-d990e324-453da2f-bf9fcbd] got new request with incorrect json from [5809:78c2:187c:0:4009:78c2:187c:0] database '' 2026-01-07T17:33:56.158471Z node 8 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateStream] requestId [10b67456-d990e324-453da2f-bf9fcbd] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2026-01-07T17:33:56.158819Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:41166) <- (400 InvalidArgumentException, 135 bytes) 2026-01-07T17:33:56.158892Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:41166) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 2026-01-07T17:33:56.158934Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:41166) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 10b67456-d990e324-453da2f-bf9fcbd Content-Type: application/x-amz-json-1.1 Content-Length: 135 2026-01-07T17:33:56.159040Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:41166) connection closed Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:00.617061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:00.617156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:00.617234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:00.617289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:00.617333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:00.617372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:00.617439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:00.617538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:00.618459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:00.618791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:00.807288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:00.807352Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:00.830197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:00.830569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:00.830750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:00.842086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:00.842464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:00.843279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:00.843553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:00.846474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:00.846694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:00.847977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:00.848043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:00.848153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:00.848237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:00.848295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:00.848497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:01.049502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.050719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.050959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.051907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.052009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:01.052074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... move publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:56.723179Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:56.723286Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:56.723321Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:56.723354Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-07T17:33:56.723390Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:33:56.723475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-07T17:33:56.725701Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:56.725746Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-07T17:33:56.725783Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-07T17:33:56.727312Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2026-01-07T17:33:56.727439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:33:56.727785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2026-01-07T17:33:56.728057Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:56.728183Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773230 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:33:56.728241Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2026-01-07T17:33:56.728388Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2026-01-07T17:33:56.728480Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-07T17:33:56.728532Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-07T17:33:56.728597Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-07T17:33:56.728647Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-07T17:33:56.728729Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:33:56.728817Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:33:56.728866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2026-01-07T17:33:56.728946Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-07T17:33:56.728994Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-01-07T17:33:56.729044Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-07T17:33:56.729138Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:33:56.729186Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-07T17:33:56.729244Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-07T17:33:56.729299Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-07T17:33:56.730755Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-07T17:33:56.735117Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:56.735189Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:56.735379Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:33:56.735515Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:56.735555Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-07T17:33:56.735598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 2026-01-07T17:33:56.736532Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:56.736636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:56.736681Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:56.736747Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:33:56.736803Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:33:56.737471Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:56.737556Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:56.737587Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-07T17:33:56.737618Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:33:56.737651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:33:56.737723Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-07T17:33:56.737787Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:130:2154] 2026-01-07T17:33:56.741309Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:56.741606Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-07T17:33:56.741710Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-07T17:33:56.741777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-07T17:33:56.743642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:56.743718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:1000:2952] TestWaitNotification: OK eventTxId 102 >> TReplicationTests::CopyReplicatedTable [GOOD] |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpScanArrowFormat::JoinWithParams [GOOD] >> Secret::Simple >> KqpScanArrowInChanels::AggregateCountStar |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpResultSetFormats::ArrowFormat_LargeTable |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:42.720660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:42.720735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:42.720769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:42.720803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:42.721556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:42.721603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:42.721690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:42.723046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:42.724998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:42.726074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:42.853222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:42.853291Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:42.875215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:42.875563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:42.876783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:42.899393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:42.899689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:42.900283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:42.906050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:42.916158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:42.927465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:42.937347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:42.937433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:42.937559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:42.937634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:42.937731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:42.938629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:43.105819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.107723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.107846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.107937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 815 RawX2: 42949675742 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:59.474602Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:33:59.474803Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 815 RawX2: 42949675742 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:59.474896Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:59.475066Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 815 RawX2: 42949675742 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:59.475178Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:59.475253Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1090: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-01-07T17:33:59.486107Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:59.486762Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:59.499254Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 711 RawX2: 42949675657 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:59.499312Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:33:59.499418Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 711 RawX2: 42949675657 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:59.499477Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:33:59.499542Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 711 RawX2: 42949675657 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:33:59.499596Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:59.499645Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:59.499693Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:33:59.499736Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:33:59.499765Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:33:59.502228Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:59.502849Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:59.502954Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-07T17:33:59.503067Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-07T17:33:59.503138Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-07T17:33:59.503283Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-07T17:33:59.503351Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-07T17:33:59.506105Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:33:59.506180Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:33:59.506387Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:59.506465Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:59.506518Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:33:59.506570Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:59.506628Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:33:59.506740Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [10:739:2717] message: TxId: 102 2026-01-07T17:33:59.506831Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:33:59.506916Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:33:59.506991Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:33:59.507216Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:33:59.507282Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:33:59.513566Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:33:59.513687Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:842:2801] TestWaitNotification: OK eventTxId 102 2026-01-07T17:33:59.514616Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:33:59.514976Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 388us result status StatusSuccess 2026-01-07T17:33:59.515575Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpResultSetFormats::ArrowFormat_Types_Binary [GOOD] >> Secret::ValidationQueryService |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2026-01-07T17:32:58.548110Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674017826996001:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:58.548152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:59.005181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:59.163514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:59.163634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:59.167725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:59.324902Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:59.353052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:59.575065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:59.596949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:59.596969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:59.596976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:59.597051Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:59.971949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:59.984360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:00.328205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:33:00.365606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:33:00.367744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:00.633316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.893010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:00.969902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-07T17:33:00.974834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.031170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.082072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.125461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.190413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.233507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.300159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.359476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:02.987250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674035006866737:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:02.987449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:02.988106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674035006866749:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:02.988171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674035006866750:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:02.988222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:02.993142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:03.012251Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674035006866753:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:33:03.104210Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674039301834100:3106] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:03.549069Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674017826996001:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:03.549129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:03.619388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.663607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.709278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.748464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is u ... node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:39790) connection closed 2026-01-07T17:33:57.986216Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037907] server disconnected, pipe [8:7592674271439527166:2491] destroyed 2026-01-07T17:33:57.986263Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037911] server disconnected, pipe [8:7592674271439527167:2492] destroyed 2026-01-07T17:33:58.010657Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:33:58.010698Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.010712Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.010735Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.010749Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-07T17:33:58.010838Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:33:58.010850Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.010863Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.010882Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.010895Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-07T17:33:58.017179Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:33:58.017226Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.017242Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.017264Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.017281Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-07T17:33:58.024909Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:33:58.024963Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.024980Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.025005Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.025027Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2026-01-07T17:33:58.025098Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:33:58.025112Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.025125Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.025141Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.025152Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-07T17:33:58.110900Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:33:58.110930Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.110940Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.110957Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.110968Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-07T17:33:58.111025Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:33:58.111042Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.111049Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.111058Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.111063Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-07T17:33:58.119329Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:33:58.119373Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.119387Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.119408Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.119422Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-07T17:33:58.125556Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:33:58.125595Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.125595Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:33:58.125608Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.125616Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.125628Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.125639Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.125646Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.125656Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2026-01-07T17:33:58.125658Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-07T17:33:58.213058Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:33:58.213100Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.213113Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.213134Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.213149Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-07T17:33:58.213211Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:33:58.213222Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.213234Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.213245Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.213253Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-07T17:33:58.229056Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:33:58.229093Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.229111Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.229134Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.229150Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-07T17:33:58.229220Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:33:58.229232Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.229242Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.229255Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.229263Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2026-01-07T17:33:58.229289Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:33:58.229302Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.229312Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:33:58.229326Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:33:58.229335Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2026-01-07T17:33:01.689698Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674031953143323:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:01.689850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:02.052111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:02.052195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:02.163536Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674031953143163:2081] 1767807181657130 != 1767807181657133 2026-01-07T17:33:02.163631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:02.180446Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:02.185577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:02.279295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:02.279318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:02.279325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:02.279455Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:02.420248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:02.487146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:02.495492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:33:02.696611Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:02.721752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:33:02.734537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:02.859775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:02.965377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.010793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2026-01-07T17:33:03.015607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.052795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-07T17:33:03.057627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.140870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.190395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.276516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.347181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.424267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.496166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:05.177156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674049133014001:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.177288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.177743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674049133014013:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.177751Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674049133014014:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.177818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:05.181830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:05.195983Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674049133014017:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-07T17:33:05.271528Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674049133014068:3102] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:05.674961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:05.724001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:05.761717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:05.810116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:56.401187Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-07T17:33:56.401259Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 8ms 2026-01-07T17:33:56.401493Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:56.402156Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:56.402181Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 6ms 2026-01-07T17:33:56.402545Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:56.402575Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:33:56.402664Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 7ms 2026-01-07T17:33:56.403032Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:33:56.564816Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7592674265949779471:2441]: Pool not found 2026-01-07T17:33:56.565025Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:33:56.906124Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7592674265949779425:2436]: Pool not found 2026-01-07T17:33:56.906601Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:33:56.910603Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7592674265949779544:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:56.910661Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7592674265949779545:2459], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:33:56.910709Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:56.911193Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7592674265949779548:2460], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:56.911260Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:57.288064Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7592674265949779542:2457]: Pool not found 2026-01-07T17:33:57.288360Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2026-01-07T17:33:57.392980Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:51538) incoming connection opened 2026-01-07T17:33:57.393118Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:51538) -> (POST /Root, 4 bytes) 2026-01-07T17:33:57.393302Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [785d:b5b9:a97b:0:605d:b5b9:a97b:0] request [CreateStream] url [/Root] database [/Root] requestId: 29d26a4f-882ef364-ad38e154-2191d32d 2026-01-07T17:33:57.393976Z node 8 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateStream] requestId [29d26a4f-882ef364-ad38e154-2191d32d] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2026-01-07T17:33:57.394282Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:51538) <- (400 MissingParameter, 127 bytes) 2026-01-07T17:33:57.394334Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:51538) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked null 2026-01-07T17:33:57.394370Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:51538) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 29d26a4f-882ef364-ad38e154-2191d32d Content-Type: application/x-amz-json-1.1 Content-Length: 127 2026-01-07T17:33:57.394484Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:51538) connection closed |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |84.9%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |85.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> KqpResultSetFormats::ArrowFormat_Types_Tuple [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct |85.0%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.0%| [TA] {RESULT} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2026-01-07T17:33:02.282872Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674034781955406:2231];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:02.282977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:02.750837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:02.750938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:02.818742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:03.165031Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674034781955213:2081] 1767807182244323 != 1767807182244326 2026-01-07T17:33:03.165218Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:03.177606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:03.177635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:03.177649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:03.177763Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:03.192577Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:03.285172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:03.437080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:03.606039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:03.612364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:04.007180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:33:04.030137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:33:04.034701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:04.065813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:33:04.128154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.278055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.350356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.401973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.442311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.516579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.561067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.622596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.695044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.771315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:06.585658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674051961826065:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.585779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.585922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674051961826077:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.586377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674051961826079:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.586464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.589683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:06.610832Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674051961826080:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:33:06.677151Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674051961826132:3111] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:07.112668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:07.150356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:07.186748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:07.227405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... tatus: 48 MiniKQLCompileResults { CompiledProgram: "\037\026\nFlags\010Name\010Args\016Payload\022Parameter CreatedTimestamp\024InflyCount\030InflyVersion\030MessageCount\"RetentionBoundary\nShard\006\002\206\202\t\211\004\202\203\005@\206\205\014\207\203\010\207\203\006\207\203\010\207\203\006\207\203\010\207\203\004\026\032\036\"&*$SetResult\000\003?\002\nstate\t\211\004?\036\205\004?\036\203\014\020List$Truncated\203\004\030Member\000\t\211\026?&\203\005\004\200\205\014\203\004\203\004\203\004\203\004\203\004\203\004\026\032\036\"&*\213\006\203\010\203\010\203\004\213\006?\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?, \000\001\205\000\000\000\000\001:\000\000\000\000\000\000\000?\034\005?:\003?.\010\003?0\016\003?2\030\003?4\020\003?6\014\003?8\032?\013?B\t\351\000?<\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?n\003?p(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?>\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\204\003?\206\036QUEUE_ID_NUMBER\003\022\000\003?@\000\004\013?F?|?\222\003?D\007\377\377\377\377\004\003?H\000\003?J\000\003?L\000\003?N\000\006\006?R\003\203\014\000\003\203\014\000\003\203\014\000\007\003?T\000\377\007\003?(\000\002\001\000/" } } 2026-01-07T17:33:58.834123Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_STATE_ID) Queue [cloud4/000000000000000101v0] compilation duration: 12ms 2026-01-07T17:33:58.834306Z node 7 :SQS DEBUG: queue_leader.cpp:464: Request [] Query(idx=GET_STATE_ID) has been prepared 2026-01-07T17:33:58.834318Z node 7 :SQS DEBUG: queue_leader.cpp:514: Request [] Executing compiled query(idx=GET_STATE_ID) 2026-01-07T17:33:58.834383Z node 7 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_STATE_ID). Mode: COMPILE_AND_EXEC 2026-01-07T17:33:58.834439Z node 7 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_STATE_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391} 2026-01-07T17:33:58.834687Z node 7 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_STATE_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\026\nFlags\010Name\010Args\016Payload\022Parameter CreatedTimestamp\024InflyCount\030InflyVersion\030MessageCount\"RetentionBoundary\nShard\006\002\206\202\t\211\004\202\203\005@\206\205\014\207\203\010\207\203\006\207\203\010\207\203\006\207\203\010\207\203\004\026\032\036\"&*$SetResult\000\003?\002\nstate\t\211\004?\036\205\004?\036\203\014\020List$Truncated\203\004\030Member\000\t\211\026?&\203\005\004\200\205\014\203\004\203\004\203\004\203\004\203\004\203\004\026\032\036\"&*\213\006\203\010\203\010\203\004\213\006?\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?, \000\001\205\000\000\000\000\001:\000\000\000\000\000\000\000?\034\005?:\003?.\010\003?0\016\003?2\030\003?4\020\003?6\014\003?8\032?\013?B\t\351\000?<\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?n\003?p(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?>\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\204\003?\206\036QUEUE_ID_NUMBER\003\022\000\003?@\000\004\013?F?|?\222\003?D\007\377\377\377\377\004\003?H\000\003?J\000\003?L\000\003?N\000\006\006?R\003\203\014\000\003\203\014\000\003\203\014\000\007\003?T\000\377\007\003?(\000\002\001\000/" } Params { Bin: "\037\000\005\205\004\203\010\203\010> TestYmqHttpProxy::TestListQueueTags [GOOD] >> Secret::DeactivatedQueryService >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute >> Secret::SimpleQueryService |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionTo >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithDuplicatePartition >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToGapIndices >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidThreeChildren ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:26.854247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:26.854346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.854383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:26.854414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:26.854452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:26.854489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:26.854564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.854624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:26.855368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:26.855624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:26.931377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:26.931429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:26.949074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:26.949404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:26.949574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:26.969625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:26.969972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:26.970775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:26.970995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:26.973862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.974009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:26.975073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:26.975142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.975289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:26.975336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:26.975377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:26.975539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:27.125903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.126719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.126809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.126883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.126947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.126995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:27.127704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 26-01-07T17:34:03.002845Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.002968Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:34:03.003050Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:34:03.003117Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:34:03.005302Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.005392Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:34:03.005487Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:34:03.007592Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.007657Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.007742Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:03.007827Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:34:03.008016Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:03.013965Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:34:03.014197Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:34:03.014683Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:03.014893Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 115964119151 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:03.014987Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:03.015373Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:34:03.015455Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:03.015772Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:03.015890Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:03.018132Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:03.018219Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:03.018535Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:03.018605Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [27:210:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:34:03.019121Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.019192Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:34:03.019395Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:34:03.019461Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:03.019528Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:34:03.019584Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:03.019643Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:34:03.019711Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:03.019774Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:34:03.019869Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:34:03.019984Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:03.020044Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:34:03.020100Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:34:03.020816Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:34:03.021007Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:34:03.021101Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:34:03.021169Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:34:03.021239Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:03.021365Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:34:03.024659Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:34:03.025332Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:34:03.026659Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [27:669:2658] Bootstrap 2026-01-07T17:34:03.028397Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [27:669:2658] Become StateWork (SchemeCache [27:674:2663]) 2026-01-07T17:34:03.032085Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:03.032646Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.032816Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2026-01-07T17:34:03.033513Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2026-01-07T17:34:03.034530Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [27:669:2658] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:34:03.038215Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:03.038611Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-07T17:34:03.039296Z node 27 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |85.3%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2026-01-07T17:32:59.164973Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674022927236931:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:59.165592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:59.593901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:59.715992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:59.716076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:59.770304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:59.866783Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:59.869157Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674022927236893:2081] 1767807179135629 != 1767807179135632 2026-01-07T17:32:59.893448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:00.056785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:00.056805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:00.056812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:00.056882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:00.194601Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:00.503306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:00.892120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:33:00.902304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:33:00.904809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:00.929053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T17:33:01.005316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.216115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.284021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2026-01-07T17:33:01.288817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.355810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-07T17:33:01.364402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.405142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.440445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.494656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.539199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.583934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:01.623348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.307964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674040107107740:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.308116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.308603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674040107107752:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.308645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674040107107753:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.308967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:03.312497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:03.335422Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674040107107756:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-07T17:33:03.420117Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674040107107807:3108] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:03.886439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.935726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:03.982187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.041549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ES ... t new request from [b8eb:4769:97c:0:a0eb:4769:97c:0] database '/Root' stream 'testtopic' 2026-01-07T17:34:02.024396Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DescribeStream] requestId [f86352d6-85b197dc-aab8a8a5-b0d9c409] [auth] Authorized successfully 2026-01-07T17:34:02.024447Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [DescribeStream] requestId [f86352d6-85b197dc-aab8a8a5-b0d9c409] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:34:02.025264Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037911] server connected, pipe [8:7592674290281291954:2498], now have 1 active actors on pipe 2026-01-07T17:34:02.025328Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037907] server connected, pipe [8:7592674290281291950:2494], now have 1 active actors on pipe 2026-01-07T17:34:02.025399Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037909] server connected, pipe [8:7592674290281291952:2496], now have 1 active actors on pipe 2026-01-07T17:34:02.025426Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037908] server connected, pipe [8:7592674290281291951:2495], now have 1 active actors on pipe 2026-01-07T17:34:02.025460Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037910] server connected, pipe [8:7592674290281291953:2497], now have 1 active actors on pipe 2026-01-07T17:34:02.026180Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037907] server disconnected, pipe [8:7592674290281291950:2494] destroyed 2026-01-07T17:34:02.026212Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037908] server disconnected, pipe [8:7592674290281291951:2495] destroyed 2026-01-07T17:34:02.026234Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037909] server disconnected, pipe [8:7592674290281291952:2496] destroyed 2026-01-07T17:34:02.026255Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037910] server disconnected, pipe [8:7592674290281291953:2497] destroyed 2026-01-07T17:34:02.026273Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037911] server disconnected, pipe [8:7592674290281291954:2498] destroyed 2026-01-07T17:34:02.026669Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DescribeStream] requestId [f86352d6-85b197dc-aab8a8a5-b0d9c409] reply ok 2026-01-07T17:34:02.026875Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:37884) <- (200 , 1672 bytes) 2026-01-07T17:34:02.026967Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:37884) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1767807242,"StorageLimitMb":0,"StreamName":"testtopic"}} 2026-01-07T17:34:02.080167Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:02.080212Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.080229Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.080252Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.080269Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-07T17:34:02.080328Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:34:02.080339Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.080348Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.080359Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.080367Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-07T17:34:02.085317Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:34:02.085345Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.085356Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.085389Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.085401Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-07T17:34:02.085460Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:34:02.085471Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.085481Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.085489Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.085495Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-07T17:34:02.088314Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:02.088347Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.088361Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.088382Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.088395Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2026-01-07T17:34:02.180520Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:02.180533Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:34:02.180564Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.180564Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.180578Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.180579Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.180600Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.180600Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.180616Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-07T17:34:02.180617Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-07T17:34:02.185629Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:34:02.185634Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:34:02.185669Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.185674Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.185685Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.185688Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.185704Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.185709Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.185720Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-07T17:34:02.185723Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-07T17:34:02.188683Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:02.188719Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.188736Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:02.188755Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:02.188771Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |85.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> Secret::Deactivated |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |85.3%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:22.662513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:22.662592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:22.662623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:22.662657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:22.662693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:22.662725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:22.662777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:22.662841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:22.663575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:22.663792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:22.758682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:22.758745Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:22.772526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:22.772854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:22.773056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:22.779633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:22.779975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:22.780769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:22.781043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:22.783753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:22.783946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:22.785041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:22.785109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:22.785230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:22.785274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:22.785319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:22.785463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:22.909062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.910993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:22.911096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:03.759964Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:03.760003Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:34:03.760043Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:34:03.760086Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:34:03.760176Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-07T17:34:03.761547Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1841 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:34:03.761604Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:34:03.761749Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1841 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:34:03.761875Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1841 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-07T17:34:03.763345Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 708 RawX2: 120259086981 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:34:03.763395Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:34:03.763516Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 708 RawX2: 120259086981 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:34:03.763577Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:34:03.763681Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 708 RawX2: 120259086981 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:34:03.763752Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:03.763797Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.763838Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:34:03.763889Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:34:03.766212Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:34:03.766287Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:34:03.767419Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.767524Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.767760Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:03.767800Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:34:03.767909Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:34:03.767942Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:03.767978Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:34:03.768004Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:03.768039Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:34:03.768104Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:734:2711] message: TxId: 101 2026-01-07T17:34:03.768148Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:03.768202Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:34:03.768237Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:34:03.768369Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:34:03.770310Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:34:03.770374Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:735:2712] TestWaitNotification: OK eventTxId 101 2026-01-07T17:34:03.770991Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:03.771281Z node 28 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 335us result status StatusSuccess 2026-01-07T17:34:03.771939Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2026-01-07T17:33:02.294559Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674033996433056:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:02.294601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:02.365802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:33:02.686522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:02.686686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:02.689747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:02.765021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:02.866022Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:03.079926Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:03.094695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:03.094723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:03.094729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:03.103455Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:03.325163Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:03.455763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:03.472548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:03.787892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:33:03.796061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:33:03.798233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:33:03.927459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.090381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.151250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-07T17:33:04.156736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.283832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.331001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.386992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.428687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.474420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.512377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:04.570048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:06.550114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674051176303865:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.553020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674051176303860:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.553132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.553453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674051176303896:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.553518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:06.555821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:06.577816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674051176303874:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:33:06.659693Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674051176303927:3111] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:07.076230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:07.133498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:07.168776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:07.207811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemes ... ember { Name: "ReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "VisibilityDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "inflyCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "inflyVersion" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "messageCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "readOffset" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } Value { Struct { Optional { Optional { Uint64: 1767807242991 } } } Struct { Optional { } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Uint64: 0 } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Optional { Uint64: 0 } } } } } } 2026-01-07T17:34:03.323708Z node 7 :SQS DEBUG: queue_leader.cpp:2148: Infly version for shard [cloud4/000000000000000101v0/0]: 0 2026-01-07T17:34:03.330448Z node 7 :SQS TRACE: executor.cpp:286: Request [da1f41f0-4f52d207-86582b61-6efba399] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976715715 Step: 1767807243377 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2026-01-07T17:34:03.330471Z node 7 :SQS DEBUG: executor.cpp:287: Request [da1f41f0-4f52d207-86582b61-6efba399] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 9ms 2026-01-07T17:34:03.330951Z node 7 :SQS TRACE: executor.cpp:325: Request [da1f41f0-4f52d207-86582b61-6efba399] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715715 Step: 1767807243377 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2026-01-07T17:34:03.331031Z node 7 :SQS TRACE: executor.cpp:327: Request [da1f41f0-4f52d207-86582b61-6efba399] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{}"} 2026-01-07T17:34:03.331160Z node 7 :SQS DEBUG: executor.cpp:401: Request [da1f41f0-4f52d207-86582b61-6efba399] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 11ms 2026-01-07T17:34:03.331219Z node 7 :SQS DEBUG: queue_leader.cpp:2038: Created new Deduplication cleanup actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7592674295548067590:3917] 2026-01-07T17:34:03.331245Z node 7 :SQS DEBUG: queue_leader.cpp:2038: Created new Reads cleanup actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7592674295548067591:3918] 2026-01-07T17:34:03.331271Z node 7 :SQS INFO: fifo_cleanup.cpp:31: Request [49a0c17-d34420ac-f3a0b584-85a0b4c9] Bootstrap cleanup actor for queue [cloud4/000000000000000301v0] 2026-01-07T17:34:03.331275Z node 7 :SQS DEBUG: queue_leader.cpp:2050: Created new retention actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7592674295548067592:3919] 2026-01-07T17:34:03.331285Z node 7 :SQS INFO: fifo_cleanup.cpp:31: Request [84d23e90-7d999f69-847adb24-d93b4bf7] Bootstrap cleanup actor for queue [cloud4/000000000000000301v0] 2026-01-07T17:34:03.331301Z node 7 :SQS DEBUG: queue_leader.cpp:2054: Created new purge actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7592674295548067593:3920] 2026-01-07T17:34:03.331303Z node 7 :SQS INFO: retention.cpp:30: Request [1c07a4b1-41c4c66a-a8997946-8d67313c] Bootstrap retention actor for queue [cloud4/000000000000000301v0] 2026-01-07T17:34:03.331311Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [da1f41f0-4f52d207-86582b61-6efba399] Sending executed reply 2026-01-07T17:34:03.331321Z node 7 :SQS INFO: purge.cpp:35: Request [5348d01e-c6d7b8e-8e3fbe8c-f837b15a] Create purge actor for queue /Root/SQS/cloud4/000000000000000301v0 2026-01-07T17:34:03.331575Z node 7 :SQS DEBUG: action.h:627: Request [e2bc44e1-6241830-8fd1bb51-2166fe83] Get configuration duration: 17ms 2026-01-07T17:34:03.331592Z node 7 :SQS TRACE: action.h:647: Request [e2bc44e1-6241830-8fd1bb51-2166fe83] Got configuration. Root url: http://ghrun-z2c6jslqle.auto.internal:8771, Shards: 1, Fail: 0 2026-01-07T17:34:03.331608Z node 7 :SQS TRACE: action.h:427: Request [e2bc44e1-6241830-8fd1bb51-2166fe83] DoRoutine 2026-01-07T17:34:03.331662Z node 7 :SQS TRACE: action.h:264: Request [e2bc44e1-6241830-8fd1bb51-2166fe83] SendReplyAndDie from action actor { ListQueueTags { RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" } } 2026-01-07T17:34:03.331735Z node 7 :SQS TRACE: proxy_service.h:35: Request [e2bc44e1-6241830-8fd1bb51-2166fe83] Sending sqs response: { ListQueueTags { RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" } RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2026-01-07T17:34:03.331801Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7592674295548067567:3898]. Found: 1 2026-01-07T17:34:03.331825Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ListQueueTags { RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" } RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2026-01-07T17:34:03.331871Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7592674295548067566:2552]: ListQueueTags { RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" } RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2026-01-07T17:34:03.331960Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [e2bc44e1-6241830-8fd1bb51-2166fe83] HandleResponse: { ListQueueTags { RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" } RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2026-01-07T17:34:03.332004Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [e2bc44e1-6241830-8fd1bb51-2166fe83] Sending reply from proxy actor: { ListQueueTags { RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" } RequestId: "e2bc44e1-6241830-8fd1bb51-2166fe83" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2026-01-07T17:34:03.332110Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ListQueueTags] requestId [e2bc44e1-6241830-8fd1bb51-2166fe83] Got succesfult GRPC response. 2026-01-07T17:34:03.332153Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListQueueTags] requestId [e2bc44e1-6241830-8fd1bb51-2166fe83] reply ok 2026-01-07T17:34:03.332254Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ListQueueTags] requestId [e2bc44e1-6241830-8fd1bb51-2166fe83] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 153 SourceAddress: f8a4:5c11:557c:0:e0a4:5c11:557c:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2026-01-07T17:34:03.332334Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:48010) <- (200 , 2 bytes) 2026-01-07T17:34:03.332438Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:48010) connection closed |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpRboPg::Filter >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> KqpRboYql::Filter+ColumnStore >> KqpRboYql::ConstantFolding+ColumnStore >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithDuplicatePartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlap >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionTo [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToGapIndices [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToDifferentOrder >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidThreeChildren [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithWrongPartition >> KqpRboPg::Select >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition |85.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateWithoutCredentials >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsWithOverlap >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> HttpRequest::Analyze >> HttpRequest::ProbeBaseStatsServerless >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 |85.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpRequest::Status >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlap |85.4%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::ServerlessGlobalIndex >> BasicStatistics::NotFullStatisticsDatashard >> HttpRequest::AnalyzeServerless >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot |85.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling >> BasicStatistics::TwoServerlessTwoSharedDbs |85.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::PersistenceWithStorageFailuresAndReboots >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToDifferentOrder [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsWithOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsMixed >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AllTypesColumns >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge |85.4%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsMixed [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition >> KqpResultSetFormats::ArrowFormat_Types_Struct [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:05.076224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:05.076325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.076365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:05.076406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:05.076442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:05.076471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:05.076529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.076621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:05.077523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:05.077827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:05.168977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:05.169039Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:05.189538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:05.189920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:05.190154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:05.206638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:05.207058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:05.207871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:05.208145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:05.211230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.211423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:05.212565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:05.212626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.212744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:05.212808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:05.212915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:05.213183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:05.366433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.367488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.367610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.367699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.367777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.367859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.367946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.368038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.368148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.368222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.368293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.368356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.368442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.368539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.368613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eshard: 72057594046678944, cookie: 281474976710672 2026-01-07T17:34:08.670408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2026-01-07T17:34:08.670482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [3:216:2216] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2026-01-07T17:34:08.670524Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [3:216:2216] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-07T17:34:08.676698Z node 3 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-07T17:34:08.686529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:08.686721Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.686911Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:34:08.686957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:34:08.687142Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:34:08.687214Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:08.689066Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:08.689224Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:34:08.689400Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.689456Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:34:08.689497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:34:08.689535Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:34:08.691078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.691145Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:34:08.691187Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:34:08.692570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.692614Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.692670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:08.692724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:34:08.692848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:08.694166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:34:08.694285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:34:08.694552Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:08.694662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 12884904046 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:08.694709Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:08.695004Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:34:08.695068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:08.695245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:08.695312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:08.696829Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:08.696878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:08.697094Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:08.697142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:34:08.697437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.697487Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:34:08.697589Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:34:08.697631Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:08.697670Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:34:08.697705Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:08.697743Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:34:08.697787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:08.697829Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:34:08.697866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:34:08.697932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:08.697974Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:34:08.698012Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:34:08.698532Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:34:08.698631Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:34:08.698676Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:34:08.698724Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:34:08.698767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:08.698854Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:34:08.701216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:34:08.701636Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:04.118340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:04.118477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.118544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:04.118590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:04.118628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:04.118670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:04.118746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.118831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:04.119793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:04.120286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:04.215138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:04.215217Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:04.231195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:04.231660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:04.231884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:04.238692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:04.239054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:04.239958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:04.240233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:04.243053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:04.243242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:04.244457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:04.244520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:04.244674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:04.244727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:04.244835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:04.245060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:04.407002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.408953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.409031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.409107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.409205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.409297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Step: 250 2026-01-07T17:34:08.604615Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:34:08.604662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.604697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:34:08.604818Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-07T17:34:08.605033Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:08.608115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.608276Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:08.608311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:08.608527Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:08.608578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-07T17:34:08.608743Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:08.608784Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:34:08.608871Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:08.608900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:08.608951Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:08.608988Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:08.609032Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-07T17:34:08.609071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:08.609115Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:34:08.609156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:34:08.609295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:08.609342Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-07T17:34:08.609377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:34:08.610752Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:08.610859Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:08.610915Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:34:08.610964Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:34:08.611009Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:08.611100Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-07T17:34:08.611145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:813:2779] 2026-01-07T17:34:08.612903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:34:08.613018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:08.613058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1032:2945] TestWaitNotification: OK eventTxId 105 2026-01-07T17:34:08.613718Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:34:08.613943Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 247us result status StatusSuccess 2026-01-07T17:34:08.614600Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:04.735223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:04.735319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.735362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:04.735412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:04.735455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:04.735489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:04.735564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.735634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:04.736531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:04.736830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:04.825453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:04.825520Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:04.839924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:04.840546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:04.840748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:04.848474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:04.848861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:04.849618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:04.849912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:04.853983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:04.854203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:04.855495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:04.855566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:04.855715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:04.855772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:04.855873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:04.856077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:04.994540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.995484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.995576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.995636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.995731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.995817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.995912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.995988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.996059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.996149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.996211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.996280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.996395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.996493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:04.996583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... X_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-07T17:34:09.156031Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.156071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:34:09.156207Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:34:09.156331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:34:09.156379Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:09.158562Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.159175Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:09.159215Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:09.159325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:09.159498Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:09.159533Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-07T17:34:09.159569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:34:09.159775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.159818Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:34:09.159922Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:34:09.159955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.159988Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:34:09.160013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.160044Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:34:09.160088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.160134Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:34:09.160172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:34:09.160327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:09.160376Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-07T17:34:09.160410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:34:09.160433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:34:09.161204Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.161261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.161287Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:34:09.161342Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:34:09.161374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:34:09.162104Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.162164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.162194Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:34:09.162219Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:34:09.162241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:09.162301Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:34:09.164641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:34:09.165299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:34:09.165451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:34:09.165482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:34:09.165741Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:34:09.165815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:34:09.165843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:993:2912] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "W" ChildPartitionIds: 1 ChildPartitionIds: 2 CreateRootLevelSibling: true } TestModificationResults wait txId: 105 2026-01-07T17:34:09.169130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "W" ChildPartitionIds: 1 ChildPartitionIds: 2 CreateRootLevelSibling: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:09.169309Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.169464Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3), at schemeshard: 72057594046678944 2026-01-07T17:34:09.171117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:09.171306Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:34:09.171516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:34:09.171545Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:34:09.171826Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:09.171902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:09.171935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1000:2919] TestWaitNotification: OK eventTxId 105 |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> AsyncIndexChangeCollector::DeleteNothing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:05.257793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:05.257894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.257937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:05.257976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:05.258034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:05.258064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:05.258141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.258217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:05.259160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:05.259556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:05.342312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:05.342368Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:05.357189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:05.357481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:05.357668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:05.364108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:05.364458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:05.365230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:05.365490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:05.368362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.368551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:05.369783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:05.369844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.369944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:05.369999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:05.370122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:05.370303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:05.502912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.503950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.504995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.505070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... , tablet: 72075186233409548, partId: 0 2026-01-07T17:34:09.255585Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-07T17:34:09.255671Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-07T17:34:09.255773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.255824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:34:09.256019Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:34:09.256202Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:34:09.256283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:09.259059Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.259655Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:09.259696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:09.259845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:09.260035Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:09.260073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-07T17:34:09.260113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:34:09.260381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.260430Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:34:09.260551Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:34:09.260611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.260657Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:34:09.260701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.260740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:34:09.260786Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.260831Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:34:09.260871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:34:09.261172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:09.261238Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-07T17:34:09.261276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:34:09.261310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:34:09.262409Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.262499Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.262548Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:34:09.262595Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:34:09.262644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:34:09.263750Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.263830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.263860Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:34:09.263893Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:34:09.263926Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:09.264008Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:34:09.267998Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:34:09.269325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:34:09.269530Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:34:09.269572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:34:09.269928Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:34:09.270003Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:34:09.270034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:993:2912] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2026-01-07T17:34:09.272632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:09.272827Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.273041Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2026-01-07T17:34:09.274768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:09.275018Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:34:09.275261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:34:09.275297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:34:09.275603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:09.275669Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:09.275699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1000:2919] TestWaitNotification: OK eventTxId 105 >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling [GOOD] |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:04.966760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:04.966895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.966952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:04.966998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:04.967038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:04.967072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:04.967161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.967256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:04.968239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:04.968548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:05.062055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:05.062119Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:05.077955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:05.078278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:05.078482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:05.085156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:05.085456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:05.086062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:05.086270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:05.088584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.088742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:05.089758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:05.089803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.089913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:05.089952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:05.090028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:05.090206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:05.297062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.298999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.299098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.299169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 01-07T17:34:09.330181Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-07T17:34:09.330254Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-07T17:34:09.330401Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.330446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:34:09.330621Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:34:09.330756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:34:09.330803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:09.332610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.333111Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:09.333155Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:09.333294Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:09.333466Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:09.333500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-07T17:34:09.333536Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:34:09.334171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.334221Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:34:09.334336Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:34:09.334377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.334420Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:34:09.334456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.334498Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:34:09.334540Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:34:09.334585Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:34:09.334622Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:34:09.334777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:09.334822Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-07T17:34:09.334867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:34:09.334919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:34:09.336492Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.336579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.336621Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:34:09.336662Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:34:09.336706Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:34:09.337812Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.337894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:09.337927Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:34:09.337956Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:34:09.337992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:09.338073Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:34:09.341351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:34:09.341425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:34:09.341575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:34:09.341610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:34:09.341900Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:34:09.341990Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:34:09.342030Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:979:2908] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "\177" ChildPartitionIds: 1 } TestModificationResults wait txId: 105 2026-01-07T17:34:09.344910Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "\177" ChildPartitionIds: 1 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:09.345191Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.345395Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid number of child partitions: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:09.347488Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid number of child partitions: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:09.347744Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid number of child partitions: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:34:09.348064Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:34:09.348107Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:34:09.348453Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:09.348540Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:09.348581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:986:2915] TestWaitNotification: OK eventTxId 105 |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> KqpRboPg::Filter [GOOD] >> KqpRboPg::OrderBy >> AsyncIndexChangeCollector::UpsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:05.039920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:05.040027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.040074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:05.040120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:05.040162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:05.040197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:05.040264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.040355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:05.041354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:05.041681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:05.120744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:05.120811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:05.135177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:05.135496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:05.135669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:05.142752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:05.143100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:05.143796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:05.144048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:05.146511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.146679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:05.147793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:05.147852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.147964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:05.148031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:05.148131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:05.148331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:05.282848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.286747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.286904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.287853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-07T17:34:09.494660Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-07T17:34:09.508898Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:34:09.509159Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:34:09.509194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:34:09.509518Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:09.509552Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-07T17:34:09.509595Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:09.566032Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 250, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:09.566199Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:09.566265Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2026-01-07T17:34:09.566325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-07T17:34:09.585205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-07T17:34:09.585369Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:34:09.585456Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:34:09.585523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.585569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:34:09.585762Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-07T17:34:09.585972Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:09.594448Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.594935Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:09.594998Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:09.595348Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:09.595403Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-07T17:34:09.595523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.595581Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:34:09.595714Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:09.595748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:09.595785Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:09.595823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:09.595868Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-07T17:34:09.595921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:09.595970Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:34:09.596007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:34:09.596177Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:09.596226Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-07T17:34:09.596271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:34:09.598654Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:09.598872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:09.598951Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:34:09.599029Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:34:09.599127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:09.599240Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-07T17:34:09.599290Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:813:2779] 2026-01-07T17:34:09.601918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:34:09.602023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:09.602068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1028:2946] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2026-01-07T17:34:09.606419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:09.606692Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:34:09.606943Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2026-01-07T17:34:09.609327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:09.609659Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:34:09.610006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:34:09.610054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:34:09.610537Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:34:09.610641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:34:09.610683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1079:2981] TestWaitNotification: OK eventTxId 106 |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2026-01-07T17:32:26.373027Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1767807146372986 2026-01-07T17:32:26.799152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673878963268378:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:26.801641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:26.851937Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673881244615370:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:26.852054Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:26.857501Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:32:26.881260Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:26.881965Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:32:27.089530Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:27.150450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:27.181260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:27.181351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:27.181755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:27.181852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:27.195374Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:32:27.195519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:27.197007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:27.322882Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:27.362715Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:27.408035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:27.436149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003921/r3tmp/yandex4X2qLD.tmp 2026-01-07T17:32:27.436178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003921/r3tmp/yandex4X2qLD.tmp 2026-01-07T17:32:27.436335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003921/r3tmp/yandex4X2qLD.tmp 2026-01-07T17:32:27.436405Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:27.489165Z INFO: TTestServer started on Port 15711 GrpcPort 63085 2026-01-07T17:32:27.843899Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:27.905957Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:63085 2026-01-07T17:32:27.933000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:28.184330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:32:31.197896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673900438106153:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.198006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.199247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673900438106166:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.199322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673900438106167:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.199491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:31.202995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:31.281118Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673900438106170:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:32:31.625137Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673900438106260:2969] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:31.670520Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592673900438106273:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:32:31.671001Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWZhNzczZWItZmE4M2FhMmItZTkwNTVmM2YtZTc2N2ZlZmI=, ActorId: [1:7592673900438106151:2330], ActorState: ExecuteState, LegacyTraceId: 01kecr8v092chw1mmwyjnm5t51, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:32:31.675710Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592673902719452102:2308], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:32:31.677309Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NDZkNjRhMzQtMzIzODhhZWEtMTc3NDY0MmMtZmRhZWE3Y2Y=, ActorId: [2:7592673902719452069:2301], ActorState: ExecuteState, LegacyTraceId: 01kecr8v4s4xqemjnbb0q1ky22, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:32:31.682766Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:32:31.682991Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta ... Id: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 98 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 1 WriteTimestampMS: 1767807242111 CreateTimestampMS: 1767807242103 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 91 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 2 WriteTimestampMS: 1767807242112 CreateTimestampMS: 1767807242103 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 98 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1767807242112 CreateTimestampMS: 1767807242103 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 71 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 4 WriteTimestampMS: 1767807242112 CreateTimestampMS: 1767807242104 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 1 SizeLag: 18446744073709551539 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2026-01-07T17:34:07.148372Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-07T17:34:07.148430Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 4117c1f1-9e66ae47-592c84e-ce072fec has messages 1 2026-01-07T17:34:07.148581Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 read done: guid# 4117c1f1-9e66ae47-592c84e-ce072fec, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 432 2026-01-07T17:34:07.148619Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 response to read: guid# 4117c1f1-9e66ae47-592c84e-ce072fec 2026-01-07T17:34:07.148836Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 Process answer. Aval parts: 0 2026-01-07T17:34:07.149727Z :DEBUG: [/Root] [/Root] [bf6500c9-5cdb3e28-c9af33e6-3d7e834a] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:34:07.153090Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2026-01-07T17:34:07.153092Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 grpc read done: success# 1, data# { read { } } 2026-01-07T17:34:07.153257Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-07T17:34:07.153305Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-07T17:34:07.153327Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (2-2) 2026-01-07T17:34:07.153347Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 2} (3-3) 2026-01-07T17:34:07.153005Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:07.153287Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 got read request: guid# 5e440df2-41275a10-180c76e3-bafb146e 2026-01-07T17:34:07.153039Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:07.153389Z :DEBUG: [/Root] [/Root] [bf6500c9-5cdb3e28-c9af33e6-3d7e834a] [null] The application data is transferred to the client. Number of messages 4, size 14 bytes 2026-01-07T17:34:07.153058Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:07.153076Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:07.153089Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:34:07.153578Z :INFO: [/Root] [/Root] [bf6500c9-5cdb3e28-c9af33e6-3d7e834a] Closing read session. Close timeout: 0.000000s 2026-01-07T17:34:07.153623Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:test-topic:0:1:3:0 2026-01-07T17:34:07.153675Z :INFO: [/Root] [/Root] [bf6500c9-5cdb3e28-c9af33e6-3d7e834a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 32 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:34:07.153759Z :NOTICE: [/Root] [/Root] [bf6500c9-5cdb3e28-c9af33e6-3d7e834a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:34:07.153798Z :DEBUG: [/Root] [/Root] [bf6500c9-5cdb3e28-c9af33e6-3d7e834a] [null] Abort session to cluster 2026-01-07T17:34:07.154287Z :NOTICE: [/Root] [/Root] [bf6500c9-5cdb3e28-c9af33e6-3d7e834a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:34:07.154477Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|62cf809b-283cdc08-96d779b0-b560c780_0] Write session: close. Timeout = 0 ms 2026-01-07T17:34:07.154513Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|62cf809b-283cdc08-96d779b0-b560c780_0] Write session will now close 2026-01-07T17:34:07.154557Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|62cf809b-283cdc08-96d779b0-b560c780_0] Write session: aborting 2026-01-07T17:34:07.154903Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|62cf809b-283cdc08-96d779b0-b560c780_0] Write session: gracefully shut down, all writes complete 2026-01-07T17:34:07.154967Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|62cf809b-283cdc08-96d779b0-b560c780_0] Write session: destroy 2026-01-07T17:34:07.156838Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: test-message-group-id|62cf809b-283cdc08-96d779b0-b560c780_0 grpc closed 2026-01-07T17:34:07.156850Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 grpc read done: success# 0, data# { } 2026-01-07T17:34:07.156866Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 grpc read failed 2026-01-07T17:34:07.156893Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|62cf809b-283cdc08-96d779b0-b560c780_0 is DEAD 2026-01-07T17:34:07.156893Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 grpc closed 2026-01-07T17:34:07.156940Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_98577871366648750_v1 is DEAD 2026-01-07T17:34:07.158411Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7592674289486521623:2474] destroyed 2026-01-07T17:34:07.158468Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:34:07.158494Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:07.158509Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:07.158521Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:07.158539Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:07.158557Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:34:07.158594Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_98577871366648750_v1 2026-01-07T17:34:07.158625Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7592674315256325677:2528] destroyed 2026-01-07T17:34:07.158666Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_98577871366648750_v1 2026-01-07T17:34:07.157842Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:34:07.157957Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7592674315256325674:2525] disconnected. 2026-01-07T17:34:07.157975Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7592674315256325674:2525] disconnected; active server actors: 1 2026-01-07T17:34:07.157997Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7592674315256325674:2525] client user disconnected session shared/user_11_1_98577871366648750_v1 2026-01-07T17:34:07.253839Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:07.253892Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:07.253917Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:07.253945Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:07.253965Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:34:07.884461Z node 11 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [11:7592674315256325712:2530] TxId: 281474976715683. Ctx: { TraceId: 01kecrbs2q3a27xv6j9dh974ky, Database: /Root, SessionId: ydb://session/3?node_id=11&id=NzY3ZmMzM2YtMWMxNjRjZmItMTI5NWYzOTQtMWYwNWEwZjg=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 trace_id# 2026-01-07T17:34:07.884592Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [11:7592674315256325717:2530], TxId: 281474976715683, task: 3. Ctx: { TraceId : 01kecrbs2q3a27xv6j9dh974ky. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=11&id=NzY3ZmMzM2YtMWMxNjRjZmItMTI5NWYzOTQtMWYwNWEwZjg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7592674315256325712:2530], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |85.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] >> KqpRboPg::Select [GOOD] >> KqpRboPg::TestCrossInnerJoin+ColumnStore >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:04.959969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:04.960062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.960097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:04.960138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:04.960191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:04.960222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:04.960311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.960371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:04.961162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:04.961396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:05.046614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:05.046660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:05.060266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:05.060586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:05.060760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:05.067082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:05.067458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:05.068165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:05.068405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:05.071094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.071258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:05.072288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:05.072343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.072482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:05.072523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:05.072611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:05.072777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:05.183512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.187994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.188037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1-07T17:34:10.245033Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:10.245177Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2026-01-07T17:34:10.245453Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-07T17:34:10.297777Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-07T17:34:10.298047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:34:10.298171Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:34:10.298256Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.298316Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:34:10.298546Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-07T17:34:10.298799Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:10.303760Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.304214Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:10.304279Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:10.304674Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:10.304736Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:211:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-07T17:34:10.305531Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.305601Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:34:10.305765Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:10.305814Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.305868Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:10.305916Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.305969Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-07T17:34:10.306039Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.306107Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:34:10.306162Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:34:10.306361Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:10.306419Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-07T17:34:10.306471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:34:10.308364Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:10.308517Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:10.308582Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:34:10.308644Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:34:10.308704Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:10.308820Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-07T17:34:10.308896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:822:2788] 2026-01-07T17:34:10.312970Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:34:10.313166Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:10.313232Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1039:2957] TestWaitNotification: OK eventTxId 105 2026-01-07T17:34:10.313966Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:34:10.314279Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 349us result status StatusSuccess 2026-01-07T17:34:10.315309Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 4 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 4 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 4 NextPartitionId: 4 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:05.267615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:05.267713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.267759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:05.267800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:05.267841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:05.267872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:05.267936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.268033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:05.269055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:05.269364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:05.366744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:05.366802Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:05.383067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:05.383434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:05.383639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:05.391114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:05.391524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:05.392205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:05.392453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:05.395268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.395443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:05.396571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:05.396636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.396768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:05.396831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:05.396957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:05.397147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:05.538107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.541735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.541928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.542967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... chemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.141895Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:10.141933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:10.142227Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:10.142263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-07T17:34:10.142900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.142956Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:34:10.143068Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:10.143115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.143156Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:10.143189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.143246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-07T17:34:10.143290Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.143338Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:34:10.143370Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:34:10.143517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:10.143567Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2026-01-07T17:34:10.143605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:34:10.147934Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:10.148034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:10.148071Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:34:10.148122Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:34:10.148197Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:10.148309Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-07T17:34:10.152232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:34:10.152549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:34:10.152598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:34:10.153013Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:10.153105Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:10.153146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1070:2979] TestWaitNotification: OK eventTxId 105 2026-01-07T17:34:10.717708Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:34:10.717995Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 340us result status StatusSuccess 2026-01-07T17:34:10.718731Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2026-01-07T17:34:10.722748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:10.722991Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.723163Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2026-01-07T17:34:10.725605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:10.725886Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:34:10.726260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:34:10.726309Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:34:10.726796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:34:10.726908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:34:10.726952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1085:2993] TestWaitNotification: OK eventTxId 106 |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:04.927953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:04.928047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.928077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:04.928108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:04.928136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:04.928173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:04.928228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:04.928292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:04.929020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:04.929242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:05.007444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:05.007508Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:05.026377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:05.026783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:05.027011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:05.035918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:05.036346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:05.037169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:05.037454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:05.041189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.041399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:05.042683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:05.042754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.042885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:05.042954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:05.043059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:05.043294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:05.187980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.188984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.189957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:10.320764Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 1200, at tablet: 72057594046678944 2026-01-07T17:34:10.320822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-07T17:34:10.330708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-07T17:34:10.330857Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 1200 2026-01-07T17:34:10.330955Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 1200 2026-01-07T17:34:10.331027Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.331074Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:34:10.331239Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-07T17:34:10.331433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:10.335297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.335735Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:10.335779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:10.336040Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:10.336077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-07T17:34:10.336445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.336508Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:34:10.336639Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:10.336682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.336731Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:10.336769Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.336813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-07T17:34:10.336861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:10.336908Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:34:10.336966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:34:10.337119Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:10.337171Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2026-01-07T17:34:10.337214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:34:10.338113Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:10.338198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:10.338242Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:34:10.338284Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:34:10.338335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:10.338431Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-07T17:34:10.350312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:34:10.350643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:34:10.350700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:34:10.351273Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:10.351405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:10.351456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1078:2985] TestWaitNotification: OK eventTxId 105 2026-01-07T17:34:10.815544Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:34:10.815911Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 428us result status StatusSuccess 2026-01-07T17:34:10.816626Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:05.247559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:05.247649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.247701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:05.247732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:05.247775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:05.247808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:05.247887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:05.247941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:05.248702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:05.249233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:05.326865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:05.326944Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:05.343094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:05.343387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:05.343584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:05.351259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:05.351736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:05.352559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:05.352842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:05.355537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.355682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:05.356594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:05.356657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:05.356747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:05.356782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:05.356865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:05.357095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:05.471590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.472513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.472776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.472853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.472913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:05.473571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-07T17:34:10.918744Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-07T17:34:10.920907Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:34:10.921209Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:34:10.921262Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:34:10.921719Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:10.921776Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-07T17:34:10.921826Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:34:10.978843Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 250, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:10.979024Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:10.979095Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2026-01-07T17:34:10.979155Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-07T17:34:10.996600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-07T17:34:10.996799Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:34:10.996891Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:34:10.996979Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:34:10.997030Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:34:10.997225Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-07T17:34:10.997440Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:11.000581Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:11.001630Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:11.001687Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:11.001999Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:11.002055Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:211:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-07T17:34:11.002756Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:34:11.002819Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:34:11.002963Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:11.003004Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:11.003056Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:34:11.003097Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:11.003145Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-07T17:34:11.003200Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:34:11.003249Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:34:11.003294Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:34:11.003450Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:34:11.003494Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-07T17:34:11.003536Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:34:11.005200Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:11.005319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:34:11.005385Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:34:11.005431Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:34:11.005482Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:11.005584Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-07T17:34:11.005631Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:822:2788] 2026-01-07T17:34:11.011254Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:34:11.011411Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:34:11.011468Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1039:2957] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2026-01-07T17:34:11.015206Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:11.015491Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:34:11.015711Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2026-01-07T17:34:11.018359Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:11.018655Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:34:11.019002Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:34:11.019054Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:34:11.019496Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:34:11.019614Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:34:11.019661Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [4:1092:2994] TestWaitNotification: OK eventTxId 106 |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> CdcStreamChangeCollector::InsertSingleRow |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest |85.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> KqpResultSetFormats::ArrowFormat_LargeTable [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::SecureMode >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TBackupCollectionTests::DisallowedPath >> KqpRboYql::ConstantFolding+ColumnStore [GOOD] >> KqpRboYql::ConstantFolding-ColumnStore >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> YdbIndexTable::OnlineBuild ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-07T17:34:14.134610Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.137707Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.138012Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:14.138070Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.138127Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:14.138900Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:260:2253], now have 1 active actors on pipe 2026-01-07T17:34:14.138968Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.158810Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.159041Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.159934Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.160129Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.160569Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.161051Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2224] 2026-01-07T17:34:14.163517Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.163577Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-07T17:34:14.163621Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2224] 2026-01-07T17:34:14.163671Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.163743Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.163813Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.163869Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:14.163924Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.163962Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.164001Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.164036Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-07T17:34:14.164163Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.164436Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.164952Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:271:2258], now have 1 active actors on pipe 2026-01-07T17:34:14.213900Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.217354Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.217694Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-07T17:34:14.217755Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.217805Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-07T17:34:14.218486Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:401:2356], now have 1 active actors on pipe 2026-01-07T17:34:14.218595Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.220723Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-07T17:34:14.220841Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.221609Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-07T17:34:14.221739Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.222050Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.222274Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:409:2327] 2026-01-07T17:34:14.224243Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.224299Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2026-01-07T17:34:14.224342Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:409:2327] 2026-01-07T17:34:14.224389Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.224443Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928138][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.224479Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.224514Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928138][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:34:14.224551Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.224588Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928138][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.224626Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.224658Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928138][Partition][1][StateIdle] Try persist 2026-01-07T17:34:14.224765Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.224976Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.225422Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:412:2361], now have 1 active actors on pipe 2026-01-07T17:34:14.244649Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.247657Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.247903Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-07T17:34:14.247959Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.248011Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:14.248643Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:461:2397], now have 1 active actors on pipe 2026-01-07T17:34:14.248780Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.250992Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:14.251123Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.251927Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 3 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:14.252083Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.252401Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.252659Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:469:2368] 2026-01-07T17:34:14.254440Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.254498Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:14.254541Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:469:2368] 2026-01-07T17:34:14.254590Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.254642Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.254683Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.254717Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:14.254750Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.254785Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.254823Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.254853Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:14.254963Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.255217Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.255708Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:472:2402], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2026-01-07T17:34:14.263801Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:479:2405], now have 1 active actors on pipe 2026-01-07T17:34:14.264536Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:482:2406], now have 1 active actors on pipe 2026-01-07T17:34:14.264795Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:483:2406], now have 1 active actors on pipe 2026-01-07T17:34:14.265552Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:479:2405] destroyed 2026-01-07T17:34:14.266137Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [2:482:2406] destroyed 2026-01-07T17:34:14.266223Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:483:2406] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> KqpRboPg::OrderBy [GOOD] >> KqpRboPg::LeftJoinToKqpOpJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-07T17:34:14.009643Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.013451Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.013748Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:14.013811Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.013889Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:14.014544Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2026-01-07T17:34:14.014654Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.032449Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.032633Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.033456Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.033634Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.033989Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.034398Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2026-01-07T17:34:14.036632Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.036680Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-07T17:34:14.036724Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2026-01-07T17:34:14.036772Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.036829Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.036886Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.036958Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:14.037007Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.037042Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.037078Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.037115Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-07T17:34:14.037215Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.037456Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.037874Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2026-01-07T17:34:14.082988Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.086936Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.087255Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-07T17:34:14.087310Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.087361Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:14.087996Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:401:2356], now have 1 active actors on pipe 2026-01-07T17:34:14.088104Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.090188Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:14.090318Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.091066Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:14.091194Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.091481Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.091701Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:409:2327] 2026-01-07T17:34:14.093723Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.093772Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:14.093814Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:409:2327] 2026-01-07T17:34:14.093860Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.093909Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.093945Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.093974Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:14.094005Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.094034Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.094067Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.094100Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:14.094200Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.094398Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.094814Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:412:2361], now have 1 active actors on pipe 2026-01-07T17:34:14.095981Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2026-01-07T17:34:14.096340Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:420:2365], now have 1 active actors on pipe 2026-01-07T17:34:14.096748Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:418:2364] destroyed 2026-01-07T17:34:14.097112Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:420:2365] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-07T17:34:14.236396Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.240216Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.240556Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:14.240645Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.240714Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:14.241508Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2026-01-07T17:34:14.241636Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.258628Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.258831Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.259556Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.259750Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.260144Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.260512Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2026-01-07T17:34:14.262547Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.262594Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-07T17:34:14.262634Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2026-01-07T17:34:14.262680Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.262748Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.262807Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.262851Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:14.262901Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.262925Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.262955Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.262989Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-07T17:34:14.263083Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.263270Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.263644Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2026-01-07T17:34:14.326151Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.329785Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.330117Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-07T17:34:14.330187Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.330240Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:14.330912Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:401:2356], now have 1 active actors on pipe 2026-01-07T17:34:14.331025Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.333138Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:14.333268Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.334018Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:14.334155Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.334463Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.334702Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:409:2327] 2026-01-07T17:34:14.336694Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.336750Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:14.336794Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:409:2327] 2026-01-07T17:34:14.336846Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.336907Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.337037Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.337075Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:14.337111Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.337147Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.337185Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.337220Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:14.337331Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.337570Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.338023Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:412:2361], now have 1 active actors on pipe 2026-01-07T17:34:14.339250Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2026-01-07T17:34:14.339688Z node 2 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:34:14.339787Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:420:2365], now have 1 active actors on pipe 2026-01-07T17:34:14.340149Z node 2 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:34:14.340374Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:418:2364] destroyed 2026-01-07T17:34:14.340719Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:420:2365] destroyed 2026-01-07T17:34:14.869861Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.873604Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.873986Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:14.874059Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.874123Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01 ... termark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:34:15.060661Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:535:2443], now have 1 active actors on pipe 2026-01-07T17:34:15.060770Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:34:15.072787Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:543:2450], now have 1 active actors on pipe 2026-01-07T17:34:15.100635Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:15.103070Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:15.104283Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:15.104341Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:15.104460Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:15.104777Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:15.105054Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:599:2453] 2026-01-07T17:34:15.106973Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-07T17:34:15.108471Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-07T17:34:15.108734Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:34:15.108852Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-07T17:34:15.109214Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:34:15.109296Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-07T17:34:15.109525Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-07T17:34:15.109573Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:34:15.109612Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:34:15.109652Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:34:15.109756Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-07T17:34:15.109974Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:34:15.110017Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:15.110058Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:15.110100Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:599:2453] 2026-01-07T17:34:15.110153Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:15.110210Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:15.110256Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:15.110292Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:15.110330Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:15.110366Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:15.110409Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:15.110444Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:15.110535Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:15.110746Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:15.111593Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:534:2443] destroyed 2026-01-07T17:34:15.111654Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:533:2442] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 77 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 77 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 91 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 91 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-07T17:34:14.241205Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.247270Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.247609Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:14.247682Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.247776Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:14.248441Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:259:2253], now have 1 active actors on pipe 2026-01-07T17:34:14.248548Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.265881Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.266022Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.266666Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.266806Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.267146Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.267481Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:267:2224] 2026-01-07T17:34:14.269209Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.269277Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-07T17:34:14.269328Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2224] 2026-01-07T17:34:14.269381Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.269445Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.269534Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.269576Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:14.269631Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.269666Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.269704Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.269743Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-07T17:34:14.269840Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.270027Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.270460Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:270:2258], now have 1 active actors on pipe 2026-01-07T17:34:14.332700Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.336816Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.337169Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-07T17:34:14.337235Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.337299Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-07T17:34:14.338122Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:398:2354], now have 1 active actors on pipe 2026-01-07T17:34:14.338202Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.340657Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.340800Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.341594Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:14.341745Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:14.342109Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:14.342341Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:406:2325] 2026-01-07T17:34:14.344496Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:14.344556Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-07T17:34:14.344602Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:406:2325] 2026-01-07T17:34:14.344655Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:14.344729Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:14.344771Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:14.344811Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:14.344849Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.344883Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:14.344920Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:14.344978Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-07T17:34:14.345094Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:14.345320Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:14.345842Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:409:2359], now have 1 active actors on pipe 2026-01-07T17:34:14.363828Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:14.367384Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:14.367773Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-07T17:34:14.367842Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.367905Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-07T17:34:14.368875Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:458:2395], now have 1 active actors on pipe 2026-01-07T17:34:14.368976Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:14.371855Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-07T17:34:14.372017Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:14.372715Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { ... ][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:15.409564Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:15.409597Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:15.409682Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:15.409898Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:15.410369Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:528:2440], now have 1 active actors on pipe 2026-01-07T17:34:15.411875Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:534:2443], now have 1 active actors on pipe 2026-01-07T17:34:15.412082Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:535:2444], now have 1 active actors on pipe 2026-01-07T17:34:15.412253Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:34:15.412524Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:536:2444], now have 1 active actors on pipe 2026-01-07T17:34:15.412638Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:34:15.412792Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:34:15.423758Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:544:2451], now have 1 active actors on pipe 2026-01-07T17:34:15.460599Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:15.462553Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:15.463705Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:15.463761Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:15.463874Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:15.464178Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:15.464479Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:600:2454] 2026-01-07T17:34:15.466362Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-07T17:34:15.467503Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-07T17:34:15.467770Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:34:15.467873Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-07T17:34:15.468134Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:34:15.468206Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-07T17:34:15.468371Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-07T17:34:15.468411Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:34:15.468448Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:34:15.468488Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:34:15.468586Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-07T17:34:15.468804Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:34:15.468850Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:15.468897Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:15.468960Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:600:2454] 2026-01-07T17:34:15.469011Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:15.469070Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:15.469111Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:15.469148Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:15.469189Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:15.469224Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:15.469260Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:15.469294Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:15.469390Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:15.469547Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:15.470169Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:535:2444] destroyed 2026-01-07T17:34:15.470254Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:534:2443] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 77 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 77 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::CreateAbsolutePath |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_Stress >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::ParallelCreate >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] >> KqpRboYql::Filter+ColumnStore [GOOD] >> KqpRboYql::Filter-ColumnStore >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> YdbIndexTable::MultiShardTableOneIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:23.451854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:23.451938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:23.451984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:23.452016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:23.452048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:23.452078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:23.452184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:23.452244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:23.453072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:23.453346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:23.541613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:23.541665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:23.556660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:23.556976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:23.557163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:23.563284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:23.563622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:23.564310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:23.564545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:23.566997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:23.567187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:23.568240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:23.568304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:23.568428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:23.568466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:23.568502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:23.568640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:23.716109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.721386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.721583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.721683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.721777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.721852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.721927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.721989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.722068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.722138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.722237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.722306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.722373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.722447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:23.722530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -07T17:34:17.265831Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:17.265956Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:34:17.266048Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:34:17.266134Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:34:17.268588Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:17.268683Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:34:17.268785Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:34:17.270673Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:17.270750Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:17.270873Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:17.270988Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:34:17.271265Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:17.273006Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:34:17.273272Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:34:17.273845Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:17.274076Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 158913792112 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:17.274193Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:17.274615Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:34:17.274728Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:17.275158Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:17.275300Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:34:17.277671Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:17.277772Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:17.278194Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:17.278292Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [37:213:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-07T17:34:17.279068Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:17.279176Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:34:17.279443Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:34:17.279532Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:17.279615Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:34:17.279693Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:17.279776Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:34:17.279883Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:17.279974Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:34:17.280051Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:34:17.280194Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:17.280277Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:34:17.280365Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:34:17.281192Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:34:17.281391Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:34:17.281498Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:34:17.281582Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:34:17.281680Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:17.281840Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:34:17.285215Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:34:17.286082Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:34:17.288017Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [37:676:2665] Bootstrap 2026-01-07T17:34:17.290680Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [37:676:2665] Become StateWork (SchemeCache [37:681:2670]) 2026-01-07T17:34:17.294816Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:17.295580Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:17.295815Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2026-01-07T17:34:17.296835Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2026-01-07T17:34:17.298562Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [37:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:34:17.302667Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:17.303258Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-07T17:34:17.303981Z node 37 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::DropTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] Test command err: Trying to start YDB, gRPC: 6295, MsgBus: 17488 2026-01-07T17:32:42.313869Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673948940421400:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:42.313920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:42.453156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:42.862479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:42.947664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:42.947739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:43.021798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:43.023867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:43.026966Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673948940421372:2081] 1767807162297108 != 1767807162297111 2026-01-07T17:32:43.069001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:43.328363Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:43.337515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:43.337536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:43.337541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:43.337634Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:44.521494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:44.655877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:44.951928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.221730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:45.313250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.314479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673948940421400:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:47.319764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:48.074739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673974710227031:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.074841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.075242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673974710227041:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.075298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:48.598906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:48.643368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:48.676484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:48.707173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:48.744174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:48.796652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:48.840037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:48.914172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:49.013319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673979005195208:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.013408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.013543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673979005195213:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.017554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673979005195215:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.017809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:49.020170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:49.041765Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673979005195216:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:49.103917Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673979005195270:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8787, MsgBus: 25151 2026-01-07T17:32:52.714639Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline ... _base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:02.790930Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:02.937962Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:03.001343Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:03.016902Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:03.017157Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:03.028075Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:03.110723Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:03.110759Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:03.110771Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:03.110920Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:03.517669Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:03.769112Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:04.123406Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:07.738136Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592674292737091698:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:07.738221Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:08.659372Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674318506896330:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:08.659372Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674318506896320:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:08.659483Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:08.659750Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674318506896335:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:08.659808Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:08.665454Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:08.681194Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592674318506896334:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:34:08.755621Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592674318506896391:2544] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; Trying to start YDB, gRPC: 17132, MsgBus: 61183 2026-01-07T17:34:10.140769Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7592674325465534828:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:10.141013Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:10.198422Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:10.316612Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:10.316729Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:10.319289Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:10.335651Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:34:10.335685Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:34:10.346042Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:10.405038Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:10.408318Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:10.408345Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:10.408355Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:10.408462Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:11.144396Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:11.262667Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:11.277284Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:34:15.138009Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7592674325465534828:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:15.138108Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:16.093262Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674351235339449:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:16.093448Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:16.097388Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674351235339461:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:16.097513Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674351235339462:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:16.098095Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:16.108521Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:16.163230Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7592674351235339465:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:34:16.270259Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7592674351235339520:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> Secret::DeactivatedQueryService [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> KqpRboYql::ConstantFolding-ColumnStore [GOOD] >> KqpRboYql::BasicJoins >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |85.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TestMalformedRequest::CompressedDeflateContentLengthHigher [GOOD] >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> TReplicationTests::SecureMode [GOOD] >> TReplicationTests::Describe ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:06.428355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:06.556659Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:292:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:06.930157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:06.930326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:06.937536Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807243406261 != 1767807243406265 2026-01-07T17:34:06.945259Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:07.005852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:07.120581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:07.620302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:07.621738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:07.621807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:07.621857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:07.622110Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:07.636184Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-07T17:34:19.463731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:887:2766], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.463868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2771], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.464432Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.472266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:903:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.472855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.482092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:19.518042Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:901:2774], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:34:19.579343Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:954:2808] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:19.846664Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:965:2818], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2026-01-07T17:34:19.849367Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YjAzYTNkZjAtNTliNjRkNGYtYTQwNzJlM2YtODQ0YjY5ODk=, ActorId: [1:885:2764], ActorState: ExecuteState, LegacyTraceId: 01kecrc4s53f831y3jr6j86ms6, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 50 } message: "Executing CREATE OBJECT SECRET" end_position { row: 1 column: 50 } severity: 1 issues { message: "metadata provider service is disabled" severity: 1 } } } tx_id# trace_id# REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } |85.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> Secret::Deactivated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> KqpRboPg::LeftJoinToKqpOpJoin [GOOD] >> KqpRboPg::FiveJoinsCBO >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::Drop >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> TestMalformedRequest::ContentLengthHigher [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-07T17:34:20.778976Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:20.782944Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:20.783325Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:20.783431Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:20.783498Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:20.784327Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:258:2252], now have 1 active actors on pipe 2026-01-07T17:34:20.784402Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:20.807742Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:20.807930Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:20.808802Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:20.809095Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:20.809611Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:20.810090Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [3:266:2223] 2026-01-07T17:34:20.812448Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:20.812505Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-07T17:34:20.812551Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:266:2223] 2026-01-07T17:34:20.812604Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:20.812663Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:20.812726Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:20.812761Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:20.812808Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:20.812848Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:20.812902Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:20.812976Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-07T17:34:20.813105Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:20.813354Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:20.813822Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:269:2257], now have 1 active actors on pipe 2026-01-07T17:34:20.881534Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:20.885563Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:20.885959Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-07T17:34:20.886025Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:20.886101Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-07T17:34:20.886888Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:396:2352], now have 1 active actors on pipe 2026-01-07T17:34:20.887033Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:20.889590Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:20.889715Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:20.890506Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:20.890646Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:20.891028Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:20.891321Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [3:404:2323] 2026-01-07T17:34:20.896224Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:20.896297Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-07T17:34:20.896362Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:404:2323] 2026-01-07T17:34:20.896416Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:20.896477Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:20.896523Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:20.896562Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:20.896614Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:20.896653Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:20.896695Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:20.896737Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-07T17:34:20.896846Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:20.897109Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:20.897616Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:407:2357], now have 1 active actors on pipe 2026-01-07T17:34:20.919796Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:20.927605Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:20.928028Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-07T17:34:20.928114Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:20.928185Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-07T17:34:20.928984Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:456:2393], now have 1 active actors on pipe 2026-01-07T17:34:20.929176Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:20.932019Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-07T17:34:20.932152Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:20.933042Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInfligh ... 3.010491Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928138][Partition][1][StateIdle] Try persist 2026-01-07T17:34:23.010577Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:23.010808Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:34:23.011291Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:472:2402], now have 1 active actors on pipe 2026-01-07T17:34:23.028406Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:23.031593Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:23.031975Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-07T17:34:23.032047Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:23.032108Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:23.032889Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:521:2438], now have 1 active actors on pipe 2026-01-07T17:34:23.033043Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:23.035714Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:23.035840Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:23.036658Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 8 actor [4:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:23.036805Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:23.037236Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:23.037471Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:529:2409] 2026-01-07T17:34:23.039245Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:23.039299Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:23.039338Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:529:2409] 2026-01-07T17:34:23.039389Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:23.039445Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:23.039486Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:23.039542Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:23.039584Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:23.039619Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:23.039658Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:23.039695Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:23.039783Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:23.040008Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:23.040494Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:532:2443], now have 1 active actors on pipe 2026-01-07T17:34:23.050120Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [4:538:2446], now have 1 active actors on pipe 2026-01-07T17:34:23.050240Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:539:2447], now have 1 active actors on pipe 2026-01-07T17:34:23.050323Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:540:2447], now have 1 active actors on pipe 2026-01-07T17:34:23.061290Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:545:2451], now have 1 active actors on pipe 2026-01-07T17:34:23.120025Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:23.122058Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:23.123211Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:23.123262Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:23.123358Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:23.123639Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:23.123897Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:601:2454] 2026-01-07T17:34:23.125673Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-07T17:34:23.126743Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-07T17:34:23.127043Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:34:23.127145Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-07T17:34:23.127360Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:34:23.127428Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-07T17:34:23.127669Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-07T17:34:23.127712Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:34:23.127754Z node 4 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:34:23.127794Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:34:23.127893Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-07T17:34:23.128068Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:34:23.128103Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:23.128140Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:23.128184Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:601:2454] 2026-01-07T17:34:23.128234Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:23.128284Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:23.128328Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:23.128364Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:23.128400Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:23.128458Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:23.128494Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:23.128526Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:23.128607Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:23.129004Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:23.129650Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [4:539:2447] destroyed 2026-01-07T17:34:23.129718Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [4:538:2446] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:08.262064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:08.390365Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:292:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:08.749469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:08.749636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:08.756535Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807245367019 != 1767807245367023 2026-01-07T17:34:08.764261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:08.830190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:08.938021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:09.443153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:09.444327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:09.444393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:09.444447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:09.444689Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:09.458317Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-07T17:34:21.811346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:889:2767], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.811528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.811894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:899:2770], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.811989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |85.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |85.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthHigher [GOOD] Test command err: 2026-01-07T17:32:09.854564Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673805423788609:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:09.854947Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:09.878916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:10.053339Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:10.095227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:10.095380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:10.154185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:10.213926Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:10.216179Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673805423788490:2081] 1767807129813486 != 1767807129813489 2026-01-07T17:32:10.256087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:10.277635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:10.277659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:10.277665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:10.277735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:10.522525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:10.533895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:10.776466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:10.782856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:32:10.793424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:32:10.857815Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:10.871988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.001429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.055200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.096474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.139628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.176119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.234782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.271900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.303962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:11.336062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.878489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673818308692037:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:12.878625Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:12.878968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673818308692049:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:12.879074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673818308692050:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:12.879131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:12.882462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:12.896763Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673818308692053:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:32:12.964202Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673818308692104:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:13.392097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.431637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.483613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:13.557550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schem ... 1.643303Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:21.643345Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 13ms 2026-01-07T17:34:21.643737Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:21.643771Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:34:21.643897Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 13ms 2026-01-07T17:34:21.644403Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:21.730516Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674374858176496:2900], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.730618Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674374858176497:2901], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:21.730680Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.731810Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674374858176500:2902], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.731876Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.740769Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674374858176494:2899]: Pool not found 2026-01-07T17:34:21.741779Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:34:22.236577Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674379153143818:2909], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:22.236696Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674379153143819:2910], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:22.236750Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:22.238255Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674379153143822:2911], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:22.238322Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:22.251268Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674379153143816:2908]: Pool not found 2026-01-07T17:34:22.251948Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:34:22.258901Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674379153143839:2916], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:22.258984Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674379153143840:2917], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:22.259043Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:22.259825Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674379153143843:2918], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:22.259860Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:22.264034Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674379153143837:2915]: Pool not found 2026-01-07T17:34:22.264666Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:34:22.493106Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:45342) connection closed by inactivity timeout |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] >> KqpRboYql::Filter-ColumnStore [GOOD] >> KqpRboYql::ExpressionSubquery |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TBackupCollectionTests::DropEmptyBackupCollection >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthHigher [GOOD] Test command err: 2026-01-07T17:32:11.194279Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673814501258789:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:11.212452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:11.564185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:11.564323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:11.565182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:11.573087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:11.709947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:11.711557Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673814501258570:2081] 1767807131151844 != 1767807131151847 2026-01-07T17:32:11.782384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:11.782424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:11.782433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:11.782511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:11.798476Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:12.012489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:12.020412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:32:12.192128Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:12.252795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:12.258309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:32:12.260557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:32:12.326618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.469166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.531703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.581211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.621882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.652189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.682596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.718633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.759658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:12.797204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.609080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673827386162124:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.609097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673827386162113:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.609192Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.610307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673827386162128:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.610387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:14.613295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:14.625090Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673827386162127:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-07T17:32:14.707362Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673827386162180:3103] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:15.145621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.193048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.228891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.259463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:15.287102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... :22.965440Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:22.965477Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 8ms 2026-01-07T17:34:22.965868Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:22.965902Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:34:22.966020Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 8ms 2026-01-07T17:34:22.966516Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:23.032466Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674382691236006:2796], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:23.032755Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674382691236005:2795], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.032815Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.033914Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674382691236009:2797], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.033987Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.043626Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674382691236003:2794]: Pool not found 2026-01-07T17:34:23.044426Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:34:23.712263Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674382691236028:2803], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.712364Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674382691236029:2804], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:23.712413Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.713722Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674382691236032:2805], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.713785Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.719692Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674382691236026:2802]: Pool not found 2026-01-07T17:34:23.719938Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:34:23.723295Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674382691236050:2811], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:23.723376Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674382691236049:2810], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.723470Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.724453Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674382691236053:2812], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.724502Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.728350Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674382691236047:2809]: Pool not found 2026-01-07T17:34:23.728608Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:34:23.825170Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:55490) connection closed by inactivity timeout |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> HttpRequest::Status [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> HttpRequest::Analyze [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TBackupCollectionTests::DropEmptyBackupCollection [GOOD] >> TBackupCollectionTests::DropNonExistentCollection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-07T17:34:24.247460Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:24.251022Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:24.251409Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:24.251488Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:24.251549Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:24.252354Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:260:2253], now have 1 active actors on pipe 2026-01-07T17:34:24.252424Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:24.281989Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:24.282192Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:24.283592Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:24.283810Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:24.284332Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:24.284821Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2224] 2026-01-07T17:34:24.287574Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:24.287642Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-07T17:34:24.287693Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2224] 2026-01-07T17:34:24.287753Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:24.287829Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:24.287903Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:24.287977Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:24.288050Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:24.288094Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:24.288138Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:24.288179Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-07T17:34:24.288305Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:24.289158Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:24.291309Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:271:2258], now have 1 active actors on pipe 2026-01-07T17:34:24.357225Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:24.360959Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:24.361343Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-07T17:34:24.361429Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:24.361495Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-07T17:34:24.362268Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:400:2355], now have 1 active actors on pipe 2026-01-07T17:34:24.362398Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:24.364999Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:24.365148Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:24.366014Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:24.366151Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:24.366510Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:24.366802Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:408:2326] 2026-01-07T17:34:24.368847Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:24.368913Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-07T17:34:24.369015Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:408:2326] 2026-01-07T17:34:24.369075Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:24.369145Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:24.369192Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:24.369230Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:24.369270Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:24.369309Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:24.369353Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:24.369396Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-07T17:34:24.369504Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:24.369763Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:24.370303Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:411:2360], now have 1 active actors on pipe 2026-01-07T17:34:24.391545Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:24.395789Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:24.396180Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-07T17:34:24.396243Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:24.396308Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-07T17:34:24.397132Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:460:2396], now have 1 active actors on pipe 2026-01-07T17:34:24.397258Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:24.399859Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-07T17:34:24.399988Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:24.400802Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { ... 37928138][Partition][1][StateIdle] Try persist 2026-01-07T17:34:26.329610Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:26.329913Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:34:26.330481Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:472:2402], now have 1 active actors on pipe 2026-01-07T17:34:26.374725Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:26.378663Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:26.379067Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-07T17:34:26.379149Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:26.379217Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:26.380129Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:521:2438], now have 1 active actors on pipe 2026-01-07T17:34:26.380258Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:26.382971Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:26.383109Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:26.383758Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 12 actor [4:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:26.383913Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:26.384297Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:26.384517Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:529:2409] 2026-01-07T17:34:26.389528Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:26.389625Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:26.389681Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:529:2409] 2026-01-07T17:34:26.389756Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:26.389841Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:26.389891Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:26.389936Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:26.389984Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:26.390027Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:26.390075Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:26.390118Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:26.390235Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:26.390503Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:26.391065Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:532:2443], now have 1 active actors on pipe 2026-01-07T17:34:26.401070Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [4:538:2446], now have 1 active actors on pipe 2026-01-07T17:34:26.401467Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:539:2447], now have 1 active actors on pipe 2026-01-07T17:34:26.401600Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:540:2447], now have 1 active actors on pipe 2026-01-07T17:34:26.414727Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:548:2454], now have 1 active actors on pipe 2026-01-07T17:34:26.490172Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:26.493446Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:26.494568Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:26.494648Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:26.494811Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:26.495198Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:26.495448Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:604:2457] 2026-01-07T17:34:26.505944Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-07T17:34:26.507628Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-07T17:34:26.508046Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:34:26.508207Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-07T17:34:26.508511Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:34:26.508604Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-07T17:34:26.508870Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-07T17:34:26.509942Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:34:26.510041Z node 4 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:34:26.510099Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:34:26.510237Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-07T17:34:26.510538Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:34:26.510599Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:26.510650Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:26.510704Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:604:2457] 2026-01-07T17:34:26.510764Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:26.510853Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:26.510904Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:26.510964Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:26.511006Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:26.511048Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:26.511092Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:26.511136Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:26.511239Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:26.511439Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:26.512104Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [4:539:2447] destroyed 2026-01-07T17:34:26.512187Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [4:538:2446] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] Test command err: 2026-01-07T17:32:12.833612Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673820231240605:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:12.833674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:13.152786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:13.152887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:13.175058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:13.191758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:13.267348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:13.284543Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673820231240573:2081] 1767807132831584 != 1767807132831587 2026-01-07T17:32:13.364729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:13.367511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:13.367530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:13.367537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:13.367631Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:13.576228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:13.787398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:32:13.800699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:32:13.803088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:32:13.892150Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:13.903913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.041985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.093270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-07T17:32:14.100348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.163482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.201229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.241220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.297887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.342767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.371025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:14.402309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.132679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673837411111410:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.132819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.133177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673837411111422:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.133248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.133262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673837411111423:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:16.138033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:16.158328Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673837411111426:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:32:16.250483Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673837411111477:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:16.684309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.718719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.752099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.797004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:16.838235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... :24.867878Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:24.867920Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 6ms 2026-01-07T17:34:24.868324Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:24.868357Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:34:24.868482Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 6ms 2026-01-07T17:34:24.869048Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:34:25.004721Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674392039831774:2784], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.004818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674392039831775:2785], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:25.004866Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.005404Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674392039831778:2786], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.005802Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.018705Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674392039831772:2783]: Pool not found 2026-01-07T17:34:25.018889Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:34:25.584752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674392039831798:2792], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.584854Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674392039831799:2793], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:25.584906Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.585578Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674392039831802:2794], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.586358Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.592254Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674392039831796:2791]: Pool not found 2026-01-07T17:34:25.592969Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:34:25.605195Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674392039831819:2799], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.605337Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592674392039831820:2800], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:34:25.605405Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.609413Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674392039831823:2801], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.609902Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.614281Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592674392039831817:2798]: Pool not found 2026-01-07T17:34:25.614924Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:34:25.705086Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:57670) connection closed by inactivity timeout |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> DataShardVolatile::VolatileTxAbortedOnSplit >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-07T17:34:26.369855Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:26.374619Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:26.375028Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:26.375107Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:26.375168Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:26.375985Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2026-01-07T17:34:26.376117Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:26.419184Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:26.419412Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:26.420378Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:26.420576Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:26.425283Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:26.425756Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2026-01-07T17:34:26.428123Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:26.428190Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-07T17:34:26.428249Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2026-01-07T17:34:26.428300Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:26.428358Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:26.428426Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:26.428460Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:26.428531Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:26.428570Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:26.428603Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:26.428640Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-07T17:34:26.428768Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:26.433411Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:26.433991Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2026-01-07T17:34:26.557325Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:26.573392Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:26.573821Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-07T17:34:26.573889Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:26.573950Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:26.574729Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:401:2356], now have 1 active actors on pipe 2026-01-07T17:34:26.574884Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:26.581369Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:26.581537Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:26.582533Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:26.582709Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:26.583193Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:26.583460Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:409:2327] 2026-01-07T17:34:26.585527Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:26.585590Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:26.585633Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:409:2327] 2026-01-07T17:34:26.585687Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:26.585742Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:26.585790Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:26.585828Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:26.585863Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:26.585900Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:26.585944Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:26.585976Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:26.586088Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:26.586342Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:26.586905Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:412:2361], now have 1 active actors on pipe 2026-01-07T17:34:26.588372Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2026-01-07T17:34:26.588725Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:420:2365], now have 1 active actors on pipe 2026-01-07T17:34:26.589099Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:418:2364] destroyed 2026-01-07T17:34:26.589604Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:420:2365] destroyed 2026-01-07T17:34:27.311605Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:27.314910Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:27.315225Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:27.315282Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:27.315338Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:27.316067Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:256:2250], now have 1 active actors on pipe 2026-01-07T17:34:27.316126Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:27.318124Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:27.31 ... -07T17:34:27.421867Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:27.422144Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:34:27.422748Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:468:2399], now have 1 active actors on pipe 2026-01-07T17:34:27.440655Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:27.444462Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:27.444779Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-07T17:34:27.444837Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:27.444895Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:27.445644Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:517:2435], now have 1 active actors on pipe 2026-01-07T17:34:27.445773Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:27.448037Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:27.448161Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:27.449025Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 6 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:27.449178Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:27.449518Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:27.449743Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:525:2406] 2026-01-07T17:34:27.452086Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:27.452162Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:27.452210Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:525:2406] 2026-01-07T17:34:27.452268Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:27.452339Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:27.452388Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:27.452425Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:27.452461Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:27.452503Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:27.452545Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:27.452582Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:27.452679Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:27.452913Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:27.454024Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:528:2440], now have 1 active actors on pipe 2026-01-07T17:34:27.455461Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:534:2443], now have 1 active actors on pipe 2026-01-07T17:34:27.455550Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:535:2444], now have 1 active actors on pipe 2026-01-07T17:34:27.455649Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:536:2444], now have 1 active actors on pipe 2026-01-07T17:34:27.469841Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:541:2448], now have 1 active actors on pipe 2026-01-07T17:34:27.514351Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:27.516689Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:27.517909Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:27.517973Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:27.518102Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:27.518436Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:27.518692Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:597:2451] 2026-01-07T17:34:27.525336Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-07T17:34:27.526951Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-07T17:34:27.527244Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:34:27.527360Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-07T17:34:27.527705Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:34:27.527793Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-07T17:34:27.528004Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-07T17:34:27.528047Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:34:27.528086Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:34:27.528126Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:34:27.528223Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-07T17:34:27.528406Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:34:27.528452Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:27.528491Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:27.528577Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:597:2451] 2026-01-07T17:34:27.528635Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:27.528695Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:27.528740Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:27.528774Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:27.528814Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:27.528853Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:27.528894Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:27.528953Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:27.529047Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:27.529271Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:27.529943Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:535:2444] destroyed 2026-01-07T17:34:27.530004Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:534:2443] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName |85.6%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs |85.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:10.700542Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:10.800809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:10.809584Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:10.810055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:10.810215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:11.230730Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.351374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.351506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.396207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.489838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:12.228887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:12.229842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:12.229898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:12.229928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:12.230450Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.302076Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:13.003331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:34:16.507434Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:16.516365Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:34:16.519185Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:16.570583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.570828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.621125Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:34:16.627899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:16.719345Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.016609s 2026-01-07T17:34:16.922966Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.923107Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.924805Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.926010Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.926705Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.927907Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.928056Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.929040Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.929332Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.929672Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.929856Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.950816Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:17.261349Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:17.292477Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:34:17.292574Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:34:17.335396Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:34:17.336956Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:34:17.337196Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:34:17.337263Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:34:17.337319Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:34:17.337379Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:34:17.337437Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:34:17.337493Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:34:17.338338Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:34:17.339697Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1911:2455] 2026-01-07T17:34:17.343880Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:34:17.352320Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. Describe result: PathErrorUnknown 2026-01-07T17:34:17.352394Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. Creating table 2026-01-07T17:34:17.352501Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:34:17.354800Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.354935Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2142:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.388065Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2215:2649] 2026-01-07T17:34:17.388745Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2215:2649], schemeshard id = 72075186224037897 2026-01-07T17:34:17.413033Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2660], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:34:17.447298Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.467564Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:34:17.467722Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. Subscribe on create table tx: 281474976720657 2026-01-07T17:34:17.487922Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:34:17.572870Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:34:17.620825Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:34:18.297058Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:34:18.433864Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:34:18.434036Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2114:2581] Owner: [2:2113:2580]. Column diff is empty, finishing 2026-01-07T17:34:19.242934Z node 2 ... ;this=136526287570688;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3006;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=131:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.968845Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;self_id=[2:2885:3067];ev=NActors::IEventHandle;tablet_id=72075186224037902;tx_id=281474976715659;this=136526287574048;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3007;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=151:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.994699Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[2:2886:3068];ev=NActors::IEventHandle;tablet_id=72075186224037903;tx_id=281474976715659;this=136526287575168;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3007;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=161:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.033619Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;self_id=[2:2903:3079];ev=NActors::IEventHandle;tablet_id=72075186224037901;tx_id=281474976715659;this=136526287579648;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=141:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.034314Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[2:2906:3081];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976715659;this=136526287580768;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=171:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.034770Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;self_id=[2:2908:3082];ev=NActors::IEventHandle;tablet_id=72075186224037905;tx_id=281474976715659;this=136526287581888;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=181:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.068870Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:2922:3091];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715659;this=136526237189280;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=201:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.072059Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[2:2923:3092];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976715659;this=136526237187488;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=211:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.072683Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;self_id=[2:2925:3093];ev=NActors::IEventHandle;tablet_id=72075186224037906;tx_id=281474976715659;this=136526237186368;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1907:2453];cookie=191:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.243493Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.243629Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.243684Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.363804Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.363901Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.363940Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.410120Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.410224Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.410286Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.436270Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.436384Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.436420Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.494484Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.494584Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.494624Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.516082Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.516177Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.516214Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.553850Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.553958Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.554039Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.575588Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.575707Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.575749Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.610622Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.610741Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.610781Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.632132Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.632226Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.632261Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000016s 2026-01-07T17:34:24.536579Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3760:3725] 2026-01-07T17:34:24.590023Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::AggregateByColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:10.463700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:10.577609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:10.588630Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:10.589088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:10.589244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:11.010456Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.109383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.109527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.146468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.250029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:12.036570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:12.040771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:12.040838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:12.040874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:12.041427Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.117240Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:12.697643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:34:16.217635Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:16.226267Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:34:16.230183Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:16.313997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.314146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.371050Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:34:16.377409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:16.626946Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.627061Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.628292Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.629021Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.629561Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.630348Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.630815Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.630993Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.631191Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.631340Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.631495Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.649447Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:16.911047Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:16.973838Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:34:16.973960Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:34:17.020327Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:34:17.021992Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:34:17.022250Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:34:17.022312Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:34:17.022372Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:34:17.022428Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:34:17.022488Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:34:17.022543Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:34:17.023444Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:34:17.038164Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.038307Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.076478Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:34:17.077476Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:34:17.168946Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:34:17.171178Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:34:17.192863Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:34:17.192920Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:34:17.193030Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:34:17.201161Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:34:17.205357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.213097Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:34:17.213242Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:34:17.228735Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:34:17.415028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:34:17.503499Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:34:18.014964Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:34:18.187789Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:34:18.187898Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:34:19.124793Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... =0;src=[2:1904:2451];cookie=151:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.849604Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;self_id=[2:2902:3073];ev=NActors::IEventHandle;tablet_id=72075186224037901;tx_id=281474976715659;this=136986913315488;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1904:2451];cookie=141:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.904576Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;self_id=[2:2928:3089];ev=NActors::IEventHandle;tablet_id=72075186224037906;tx_id=281474976715659;this=136986913309664;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1904:2451];cookie=191:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.905568Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[2:2919:3082];ev=NActors::IEventHandle;tablet_id=72075186224037903;tx_id=281474976715659;this=136986913308544;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1904:2451];cookie=161:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.906064Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[2:2926:3087];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976715659;this=136986913306976;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1904:2451];cookie=171:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.943973Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[2:2930:3091];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976715659;this=136986913301376;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3011;max=18446744073709551615;plan=0;src=[2:1904:2451];cookie=211:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.944629Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:2931:3092];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715659;this=136986913300256;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3011;max=18446744073709551615;plan=0;src=[2:1904:2451];cookie=201:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:20.976763Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;self_id=[2:2937:3094];ev=NActors::IEventHandle;tablet_id=72075186224037905;tx_id=281474976715659;this=136986913297344;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3012;max=18446744073709551615;plan=0;src=[2:1904:2451];cookie=181:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.209531Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.209675Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.209743Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.286858Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.286956Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.287004Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.316609Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.316714Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.316757Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.341811Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.341935Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.341975Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.358213Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.358329Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.358372Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.382153Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.382263Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.382315Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.403871Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.403996Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.404057Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.423791Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.423896Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.423937Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.444222Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.444321Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.444360Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.465407Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.465524Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:34:21.465576Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000019s 2026-01-07T17:34:24.344444Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3723:3677] 2026-01-07T17:34:24.371171Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3693:3511] , Record { OperationId: "\000\000\000\000\017\366as\252\033}?\372\36548" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Database: "" } 2026-01-07T17:34:24.371271Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `as}?48', DatabaseName: `', Types: 2026-01-07T17:34:24.371329Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `as}?48', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: Answer: 'Analyze sent. OperationId: 00000003zpc5stm6vx7zxfad1r' |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TBackupCollectionTests::DropNonExistentCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithMultipleBackups ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::BrokenCompressedData [GOOD] Test command err: 2026-01-07T17:32:30.724131Z :ReadSession INFO: Random seed for debugging is 1767807150724095 2026-01-07T17:32:31.441651Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673901282121395:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:31.441702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:31.522558Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:32:31.560401Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673901043532326:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:31.575816Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:31.594790Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:32:31.940221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:31.953721Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:32.144784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:32.301075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:32.301162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:32.330413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:32.330489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:32.394295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:32.438771Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:32.461953Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:32.462892Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:32:32.465054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:32.473121Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:32.576724Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:32.778186Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:32.858818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038ad/r3tmp/yandexXwf00d.tmp 2026-01-07T17:32:32.858847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0038ad/r3tmp/yandexXwf00d.tmp 2026-01-07T17:32:32.859039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0038ad/r3tmp/yandexXwf00d.tmp 2026-01-07T17:32:32.859146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:33.085167Z INFO: TTestServer started on Port 26007 GrpcPort 5163 PQClient connected to localhost:5163 2026-01-07T17:32:33.560525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:33.633724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 2026-01-07T17:32:36.445067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673901282121395:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:36.445165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:36.554655Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673901043532326:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:36.554711Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:36.637366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673922756959211:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.637602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.638141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673922756959223:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.638187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673922756959224:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.642066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:36.646307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:36.704116Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673922756959228:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2026-01-07T17:32:37.177124Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673922756959311:2986] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:37.216158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:37.231002Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592673922518369129:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:32:37.230600Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592673927051926623:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:32:37.231791Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=M2UyYWQ1OWYtNjJkZjQxNGYtZjI4MTc3YWQtYzZhOGIwMWM=, ActorId: [2:7592673922518369112:2303], ActorState: ExecuteState, LegacyTraceId: 01kecr90hsf31vkcgx9zckxm86, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:32:37.233941Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:32:37.245219Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTFiYmY5NDctZTg4NzllZmMtZTU3YTFlYmYtNzdlN2Y5Y2U=, ActorId: [1:7592673922756959194:2331], ActorState: ExecuteState, LegacyTraceId: 01kecr90cj89sfsb ... tno 0 count 1 size 211 endOffset 3 max time lag 0ms effective offset 2 2026-01-07T17:34:25.210938Z node 14 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037892][Partition][0][StateIdle] read cookie 7 added 1 blobs, size 176 count 1 last offset 2, current partition end offset: 3 2026-01-07T17:34:25.210966Z node 14 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037892][Partition][0][StateIdle] Reading cookie 7. Send blob request. 2026-01-07T17:34:25.211042Z node 14 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 2 partno 0 count 1 parts_count 0 source 1 size 176 accessed 1 times before, last time 2026-01-07T17:34:24.000000Z 2026-01-07T17:34:25.211089Z node 14 :PERSQUEUE DEBUG: read.h:126: [72075186224037892][PQCacheProxy]Reading cookie 7. All 1 blobs are from cache. 2026-01-07T17:34:25.211127Z node 14 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:34:25.211275Z node 14 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 2 totakecount 1 count 1 size 156 from pos 0 cbcount 1 2026-01-07T17:34:25.211394Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2026-01-07T17:34:25.213083Z node 14 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:34:25.213817Z node 13 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1767807264046 CreateTimestampMS: 1767807264045 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 1 SizeLag: 20 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 2 } 2026-01-07T17:34:25.214011Z node 13 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset 3 2026-01-07T17:34:25.214063Z node 13 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid c950b95c-544882ba-311829e7-6360675 has messages 1 2026-01-07T17:34:25.214230Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 read done: guid# c950b95c-544882ba-311829e7-6360675, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2026-01-07T17:34:25.214269Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 response to read: guid# c950b95c-544882ba-311829e7-6360675 2026-01-07T17:34:25.214575Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 Process answer. Aval parts: 0 2026-01-07T17:34:25.217919Z :DEBUG: [/Root] [/Root] [613a7883-205f382c-99e6de4f-8ece99ab] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:34:25.218147Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2026-01-07T17:34:25.218309Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2026-01-07T17:34:25.218381Z :DEBUG: [/Root] [/Root] [613a7883-205f382c-99e6de4f-8ece99ab] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2026-01-07T17:34:24.045000Z WriteTime: 2026-01-07T17:34:24.046000Z Ip: "ipv6:[::1]:36298" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:36298" } } } } 2026-01-07T17:34:25.218595Z :INFO: [/Root] [/Root] [613a7883-205f382c-99e6de4f-8ece99ab] Closing read session. Close timeout: 3.000000s 2026-01-07T17:34:25.218651Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-01-07T17:34:25.218711Z :INFO: [/Root] [/Root] [613a7883-205f382c-99e6de4f-8ece99ab] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1593 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:34:25.219301Z :INFO: [/Root] [/Root] [613a7883-205f382c-99e6de4f-8ece99ab] Closing read session. Close timeout: 0.000000s 2026-01-07T17:34:25.219357Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-01-07T17:34:25.219414Z :INFO: [/Root] [/Root] [613a7883-205f382c-99e6de4f-8ece99ab] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1594 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:34:25.219548Z :NOTICE: [/Root] [/Root] [613a7883-205f382c-99e6de4f-8ece99ab] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:34:25.221130Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 grpc read done: success# 1, data# { read { } } 2026-01-07T17:34:25.221202Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 grpc closed 2026-01-07T17:34:25.221276Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_13_1_13608923360469360042_v1 is DEAD 2026-01-07T17:34:25.222421Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [13:7592674382685533175:2489] disconnected. 2026-01-07T17:34:25.222454Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [13:7592674382685533175:2489] disconnected; active server actors: 1 2026-01-07T17:34:25.222478Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [13:7592674382685533175:2489] client user disconnected session shared/user_13_1_13608923360469360042_v1 2026-01-07T17:34:25.223977Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_13_1_13608923360469360042_v1 2026-01-07T17:34:25.224035Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [13:7592674382685533178:2492] destroyed 2026-01-07T17:34:25.224135Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_13_1_13608923360469360042_v1 2026-01-07T17:34:25.297170Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:25.297205Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:25.297223Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:25.297247Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:25.297262Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:34:25.401036Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:25.401071Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:25.401090Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:25.401124Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:25.401140Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:34:28.094620Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:34:28.094704Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:34:28.094777Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:34:28.105252Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:34:28.105919Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:34:28.106230Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:34:28.108026Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:34:28.109601Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:34:28.110459Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2026-01-07T17:34:28.110556Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2026-01-07T17:34:28.110765Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:34:28.110841Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:34:28.110878Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:34:28.110946Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } |85.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |85.6%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> KqpRboPg::TestCrossInnerJoin+ColumnStore [GOOD] >> KqpRboPg::PredicatePushdownLeftJoin |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false >> TSchemeShardExtSubDomainTest::CreateAndWait >> KqpRboPg::FiveJoinsCBO [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TSchemeShardExtSubDomainTest::Create >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2026-01-07T17:34:28.922599Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:28.926957Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:28.927370Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-07T17:34:28.927445Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:28.927528Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-07T17:34:28.928453Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [1:258:2252], now have 1 active actors on pipe 2026-01-07T17:34:28.928573Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:28.954585Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:28.954836Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:28.956032Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:28.956432Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:28.956976Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:28.957429Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [1:266:2223] 2026-01-07T17:34:28.960423Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:28.960510Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-07T17:34:28.960562Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:266:2223] 2026-01-07T17:34:28.960640Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:28.960718Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:28.960793Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:28.960841Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:28.960892Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:28.964424Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:28.964548Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:28.964593Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-07T17:34:28.964755Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:28.965104Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:28.965819Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [1:269:2257], now have 1 active actors on pipe 2026-01-07T17:34:29.023918Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:29.027337Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:29.027669Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-07T17:34:29.027820Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:29.027875Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-07T17:34:29.028598Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [1:396:2352], now have 1 active actors on pipe 2026-01-07T17:34:29.028696Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:29.031160Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:29.031307Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:29.032168Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-07T17:34:29.032318Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:29.032599Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:29.032842Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [1:404:2323] 2026-01-07T17:34:29.034934Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:29.035001Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-07T17:34:29.035169Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:404:2323] 2026-01-07T17:34:29.035216Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:29.035271Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:29.035314Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:34:29.035347Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:34:29.035380Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:29.035413Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:29.035462Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:29.035500Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-07T17:34:29.035579Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:29.035789Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:34:29.036253Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [1:407:2357], now have 1 active actors on pipe 2026-01-07T17:34:29.053206Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:29.056560Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:29.056975Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-07T17:34:29.057058Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:29.057126Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-07T17:34:29.057834Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [1:456:2393], now have 1 active actors on pipe 2026-01-07T17:34:29.057951Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:29.060365Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-07T17:34:29.060479Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:29.061575Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-07T17:34:29.061772Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializ ... 0000, To tx_18446744073709551615 2026-01-07T17:34:31.114528Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-07T17:34:31.114601Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:31.114670Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:31.115400Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:516:2434], now have 1 active actors on pipe 2026-01-07T17:34:31.115495Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:34:31.117487Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:31.117641Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:31.118196Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 12 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-07T17:34:31.118321Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:31.118601Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:31.118812Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:524:2405] 2026-01-07T17:34:31.120879Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:31.120964Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:31.121010Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:524:2405] 2026-01-07T17:34:31.121067Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:31.121123Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:31.121166Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:31.121204Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:31.121245Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:31.121278Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:31.121319Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:31.121354Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:31.121439Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:31.121624Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:31.122082Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:527:2439], now have 1 active actors on pipe 2026-01-07T17:34:31.123500Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:534:2442], now have 1 active actors on pipe 2026-01-07T17:34:31.124060Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:536:2443], now have 1 active actors on pipe 2026-01-07T17:34:31.124148Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:537:2443], now have 1 active actors on pipe 2026-01-07T17:34:31.124264Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:538:2443], now have 1 active actors on pipe 2026-01-07T17:34:31.124804Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:551:2454], now have 1 active actors on pipe 2026-01-07T17:34:31.150640Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:34:31.152997Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:34:31.153833Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:34:31.153890Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-07T17:34:31.154003Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:34:31.154328Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:34:31.154540Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:607:2457] 2026-01-07T17:34:31.156500Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-07T17:34:31.157663Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-07T17:34:31.157982Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:34:31.158104Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-07T17:34:31.158362Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:34:31.158434Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-07T17:34:31.158599Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-07T17:34:31.158647Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:34:31.158685Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:34:31.158724Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:34:31.158839Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-07T17:34:31.158992Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:34:31.159033Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:34:31.159075Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-07T17:34:31.159120Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:607:2457] 2026-01-07T17:34:31.159176Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:34:31.159232Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:34:31.159271Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-07T17:34:31.159305Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:34:31.159347Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:31.159384Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:34:31.159421Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:34:31.159457Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-07T17:34:31.159535Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:34:31.159683Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T17:34:31.160386Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:534:2442] destroyed 2026-01-07T17:34:31.160436Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928137] server disconnected, pipe [3:536:2443] destroyed 2026-01-07T17:34:31.160522Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:537:2443] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } } } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |85.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:10.847749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:10.958443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:10.968617Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:10.969108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:10.969240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:11.424310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.547723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.547861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.588415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.664579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:12.368465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:12.369787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:12.369838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:12.369872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:12.370082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.435567Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:13.117400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:34:17.171348Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:17.179240Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:34:17.183428Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:17.263564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:17.263835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:17.315175Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:34:17.319316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:17.575246Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:17.575364Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:17.577120Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.577857Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.578441Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.579444Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.579584Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.579940Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.580113Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.580250Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.580431Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.607573Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:17.931042Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:17.991254Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:34:17.991355Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:34:18.115987Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:34:18.121778Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:34:18.122147Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:34:18.122227Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:34:18.122290Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:34:18.122368Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:34:18.122433Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:34:18.122502Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:34:18.123387Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:34:18.129621Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:34:18.133765Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:34:18.158508Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:34:18.158588Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:34:18.158679Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:34:18.177258Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:18.177452Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:18.214011Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:34:18.214488Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:34:18.218401Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:34:18.279800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:18.308708Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:34:18.308887Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:34:18.356432Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:34:18.570213Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:34:18.629116Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:34:19.103261Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:34:19.213745Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:34:19.213845Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:34:20.093520Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... rc=[2:2886:3016];cookie=101:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.740415Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;self_id=[2:3569:3444];ev=NActors::IEventHandle;tablet_id=72075186224037912;tx_id=281474976715661;this=137134024516768;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4510;max=18446744073709551615;plan=0;src=[2:2886:3016];cookie=141:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.741071Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[2:3579:3449];ev=NActors::IEventHandle;tablet_id=72075186224037911;tx_id=281474976715661;this=137134024515200;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4510;max=18446744073709551615;plan=0;src=[2:2886:3016];cookie=131:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.741658Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;self_id=[2:3581:3450];ev=NActors::IEventHandle;tablet_id=72075186224037910;tx_id=281474976715661;this=137134024514080;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4510;max=18446744073709551615;plan=0;src=[2:2886:3016];cookie=121:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.742137Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;self_id=[2:3587:3452];ev=NActors::IEventHandle;tablet_id=72075186224037909;tx_id=281474976715661;this=137134024512960;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4510;max=18446744073709551615;plan=0;src=[2:2886:3016];cookie=111:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.785654Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:3596:3460];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715661;this=137134024498400;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2886:3016];cookie=91:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.786300Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;self_id=[2:3597:3461];ev=NActors::IEventHandle;tablet_id=72075186224037913;tx_id=281474976715661;this=137134024505792;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2886:3016];cookie=151:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.786841Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;self_id=[2:3598:3462];ev=NActors::IEventHandle;tablet_id=72075186224037914;tx_id=281474976715661;this=137134024505120;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2886:3016];cookie=161:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.966656Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.966804Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.966858Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.999558Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.999662Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:24.999701Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.009369Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.009470Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.009508Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.017548Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.017653Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.017690Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.025693Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.025798Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.025839Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.033330Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.033426Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.033462Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.042871Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.042974Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.043013Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.052760Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.052872Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.052911Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.061776Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.061888Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.061926Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.069736Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.069835Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-07T17:34:25.069873Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; waiting actualization: 0/0.000034s 2026-01-07T17:34:27.838427Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4384:4039] 2026-01-07T17:34:27.875150Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4356:3699] , Record { OperationId: "\000\000\000\000\025\321\025\276]\010?\031b&\214?" Tables { PathId { OwnerId: 72057594046644480 LocalId: 38 } } Database: "" } 2026-01-07T17:34:27.875268Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `]?b&?', DatabaseName: `', Types: 2026-01-07T17:34:27.875318Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `]?b&?', PathId: [OwnerId: 72057594046644480, LocalPathId: 38], ColumnTags: Answer: 'Analyze sent. OperationId: 00000005eh2pz5t21z35h2d31z' |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning+isOlap >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::FiveJoinsCBO [GOOD] Test command err: Trying to start YDB, gRPC: 14497, MsgBus: 5767 2026-01-07T17:34:06.052724Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674309305401972:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:06.052796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:06.091955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:06.472110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:06.472248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:06.482108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:06.484650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:06.565585Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:06.566926Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674309305401940:2081] 1767807246050689 != 1767807246050692 2026-01-07T17:34:06.655625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:06.655650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:06.655655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:06.655744Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:06.708101Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:07.065302Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:07.125657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:07.136559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:34:09.104890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674322190304713:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.105055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.105410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674322190304723:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.105445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.354805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:09.490037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674322190304824:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.490109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.490373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674322190304829:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.490440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674322190304830:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.490476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.494015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:09.503957Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674322190304833:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:34:09.630285Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674322190304884:2598] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24678, MsgBus: 3820 2026-01-07T17:34:10.900157Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592674328502481111:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:10.900199Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:10.943263Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:11.056773Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.057006Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592674328502480989:2081] 1767807250879559 != 1767807250879562 2026-01-07T17:34:11.100460Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.100781Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.110758Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.169301Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:11.169331Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:11.169345Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:11.169440Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:11.202832Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:11.657447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:11.664729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:34:11.925602Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:14.378268Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674345682351057:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:14.378339Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:14.378637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674345682351067:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:14.378680Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:14.407664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976 ... ce] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.863340Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674368964850952:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.863387Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674368964850951:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.863425Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.868646Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:20.901172Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592674368964850955:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:34:20.972245Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592674368964851006:2764] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:21.221196Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592674351784980638:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:21.221287Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8138, MsgBus: 27868 2026-01-07T17:34:23.513944Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592674382279509462:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:23.514090Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:23.573002Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:23.682869Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:23.684965Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592674382279509354:2081] 1767807263491053 != 1767807263491056 2026-01-07T17:34:23.701238Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:23.701329Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:23.727833Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:23.737909Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:23.889530Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:23.889556Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:23.889566Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:23.889648Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:24.397021Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:24.525124Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:28.517048Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592674382279509462:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:28.517142Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:29.635615Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674408049314026:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:29.635708Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:29.636191Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674408049314036:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:29.636250Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:29.683798Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:29.771558Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:29.831630Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:29.890428Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:29.945878Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:30.000652Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:30.157250Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674412344281781:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:30.157380Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:30.157739Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674412344281786:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:30.157774Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674412344281787:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:30.157810Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:30.162673Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:30.191258Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592674412344281790:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-07T17:34:30.268466Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592674412344281842:2835] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TBackupCollectionTests::DropCollectionWithMultipleBackups [GOOD] >> TBackupCollectionTests::DropCollectionWithNestedTables |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |85.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> TSchemeShardViewTest::EmptyQueryText ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithFullBackup >> TSchemeShardViewTest::EmptyName >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardViewTest::CreateView >> TSchemeShardViewTest::AsyncDropSameView [GOOD] |85.7%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardViewTest::AsyncCreateSameView >> TSchemeShardViewTest::DropView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:33.674892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:33.675030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:33.675089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:33.675130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:33.675165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:33.675192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:33.675252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:33.675334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:33.676269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:33.676578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:33.762151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:33.762216Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:33.778591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:33.779039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:33.779242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:33.798181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:33.798576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:33.799305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:33.799592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:33.802823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:33.803069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:33.804299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:33.804366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:33.804489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:33.804544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:33.804586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:33.804773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:33.936502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.937502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.937654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.937748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.937842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.937914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.937986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.938090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.938172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.938256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.938321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.938390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.938491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.938562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.938661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:34.757643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:34:34.760234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:34.760303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:34.760501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:34.760659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:34.760705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:34:34.760769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:34:34.761270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:34.761329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:34:34.761475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:34:34.761511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:34.761568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:34:34.761606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:34.761642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:34:34.761684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:34.761720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:34:34.761755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:34:34.761845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:34:34.761886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:34:34.761935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:34:34.761974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:34:34.762685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:34.762814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:34.762856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:34:34.762911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:34:34.762955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:34.763839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:34.763915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:34.763946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:34:34.763974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:34:34.764003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:34:34.764081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:34:34.764438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:34.764487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:34:34.764570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:34.768832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:34.769101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:34.769564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 102 2026-01-07T17:34:34.769916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:34:34.769969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-07T17:34:34.770060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:34:34.770099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2026-01-07T17:34:34.770164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:34:34.770185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:34:34.770788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:34:34.770921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:34:34.770961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:736:2725] 2026-01-07T17:34:34.771105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:34:34.771241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:34:34.771285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:34:34.771308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:736:2725] 2026-01-07T17:34:34.771453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:34:34.771481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:736:2725] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2026-01-07T17:34:34.772107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:34.772298Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 215us result status StatusPathDoesNotExist 2026-01-07T17:34:34.772477Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> TBackupCollectionTests::DropCollectionWithFullBackup [GOOD] >> TBackupCollectionTests::DropCollectionWithIncrementalBackup >> TBackupCollectionTests::DropCollectionWithNestedTables [GOOD] >> TBackupCollectionTests::DropLargeBackupCollection >> TSchemeShardViewTest::CreateView [GOOD] >> TSchemeShardViewTest::ReadOnlyMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:34.649703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:34.649790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:34.649850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:34.649899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:34.649934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:34.649968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:34.650030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:34.650091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:34.650966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:34.651268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:34.790803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:34.790866Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:34.815599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:34.815979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:34.816213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:34.824456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:34.824827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:34.825722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:34.826035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:34.829149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:34.829353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:34.830465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:34.830524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:34.830640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:34.830689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:34.830747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:34.830945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:34.986221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.987978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.988042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.988136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.988202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.988312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:34.993098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:34:35.868086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:35.868303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2026-01-07T17:34:35.868377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2026-01-07T17:34:35.868513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:34:35.868583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-07T17:34:35.868622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 101:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2026-01-07T17:34:35.868674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:34:35.870142Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:34:35.879851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2026-01-07T17:34:35.880079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2026-01-07T17:34:35.880423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:35.880482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2026-01-07T17:34:35.880535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-01-07T17:34:35.880678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:35.881176Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-07T17:34:35.882547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-07T17:34:35.882688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-07T17:34:35.883030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:35.883161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:35.883218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-07T17:34:35.883352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-07T17:34:35.883512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:35.883593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:34:35.885716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:35.885752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:35.885880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:35.885950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:35.886005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:34:35.886035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:34:35.886283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:35.886333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:34:35.886449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:34:35.886493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:35.886530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:34:35.886565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:35.886597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:34:35.886646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:35.886683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:34:35.886732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:34:35.886798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:34:35.886835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:34:35.886875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:34:35.886903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:34:35.887569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:35.887680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:35.887715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:34:35.887749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:34:35.887785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:35.888408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:35.888475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:35.888503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:34:35.888539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:34:35.888572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:34:35.888631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:34:35.891325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:34:35.892118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:35.033042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:35.033139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:35.033199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:35.033250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:35.033283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:35.033313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:35.033401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:35.033473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:35.034370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:35.037262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:35.139103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:35.139172Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:35.172882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:35.173917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:35.174136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:35.213514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:35.213972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:35.214696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:35.214993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:35.223048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:35.223256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:35.224513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:35.224583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:35.224705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:35.224755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:35.224797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:35.225009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:35.400544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.401582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.401726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.401815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.401899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.401967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:35.402684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... irst GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:36.120550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:36.120707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:34:36.120911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.121007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:34:36.121044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:34:36.121077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:34:36.123746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.123813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:34:36.123854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:34:36.125888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.125956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.125997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:36.126040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:34:36.126168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:36.133641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:34:36.133820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:34:36.134171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:36.134337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:36.134385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:36.134670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:34:36.134745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:36.134898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:36.134965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:36.138915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:36.138970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:36.139306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:36.139350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:34:36.139740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.139792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:34:36.139885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:34:36.139919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:36.139954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:34:36.139981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:36.140026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:34:36.140085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:34:36.140120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:34:36.140146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:34:36.140277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:36.140314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:34:36.140347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:34:36.140886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:34:36.140992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:34:36.141029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:34:36.141088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:34:36.141129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:36.141227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:34:36.151687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:34:36.152189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:34:36.152758Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2665] Bootstrap 2026-01-07T17:34:36.153801Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:34:36.156600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:36.156781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2026-01-07T17:34:36.156852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2026-01-07T17:34:36.156960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-07T17:34:36.158139Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:34:36.161025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:36.161255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2026-01-07T17:34:36.161762Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:35.685029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:35.685128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:35.685200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:35.685245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:35.685278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:35.685305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:35.685372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:35.685429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:35.686269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:35.686549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:35.806501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:35.806558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:35.846015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:35.846364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:35.846535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:35.854742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:35.855101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:35.855769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:35.856040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:35.861002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:35.861208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:35.862344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:35.862411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:35.862531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:35.862580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:35.862629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:35.862819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.010727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.011450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.011569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.011675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.011755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.011814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.011857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.011917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.011972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.012024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.012059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.012093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.012131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.012190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.012261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-07T17:34:36.663558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-07T17:34:36.663691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-07T17:34:36.664032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:36.664164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:36.664224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-07T17:34:36.664361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-07T17:34:36.664575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:36.664655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:34:36.666744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:36.666791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:36.666964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:36.667103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:36.667164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:34:36.667212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:34:36.667532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.667583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:34:36.667689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:34:36.667722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:36.667758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:34:36.667787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:36.667851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:34:36.667899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:36.667937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:34:36.667977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:34:36.668056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:34:36.668093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:34:36.668136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:34:36.668166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:34:36.668974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:36.669075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:36.669157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:34:36.669195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:34:36.669232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:36.670153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:36.670242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:36.670292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:34:36.670329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:34:36.670358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:34:36.670424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:34:36.673792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:34:36.674653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:34:36.674896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:34:36.674943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:34:36.675351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:34:36.675445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:34:36.675481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:706:2695] TestWaitNotification: OK eventTxId 101 2026-01-07T17:34:36.675894Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:36.676091Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 234us result status StatusSuccess 2026-01-07T17:34:36.676462Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> TSchemeShardViewTest::DropView [GOOD] >> KqpRboPg::PredicatePushdownLeftJoin [GOOD] >> KqpRboPg::TPCH >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:36.302553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:36.302715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:36.302771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:36.302818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:36.302857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:36.302885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:36.302952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:36.303015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:36.303866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:36.304152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:36.394782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:36.394845Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:36.416671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:36.417064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:36.417255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:36.432695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:36.433135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:36.433798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:36.434052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:36.438965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:36.439171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:36.440222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:36.440279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:36.440386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:36.440427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:36.440468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:36.440616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.597727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.598747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.598889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.598975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.599925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:34:37.303114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:37.303160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:37.303339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:37.303445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:37.303491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:34:37.303544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:34:37.303618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:34:37.303662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:34:37.303785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:34:37.303828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:37.303877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:34:37.303907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:37.303939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:34:37.303988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:34:37.304024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:34:37.304054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:34:37.304135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:34:37.304171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:34:37.304239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:34:37.304279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:34:37.305885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:37.307755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:37.307819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:34:37.307868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:34:37.307913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:37.308831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:37.309062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:34:37.309097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:34:37.309125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:34:37.309173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:34:37.309254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:34:37.311614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:34:37.313501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-01-07T17:34:37.313828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:34:37.313872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:34:37.313961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:34:37.313986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-07T17:34:37.314029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:34:37.314051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:34:37.314655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:34:37.314791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:34:37.314902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:34:37.314952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:712:2701] 2026-01-07T17:34:37.315053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:34:37.315073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:712:2701] 2026-01-07T17:34:37.315202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:34:37.315296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:34:37.315320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:712:2701] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-07T17:34:37.315755Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:37.315990Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 274us result status StatusSuccess 2026-01-07T17:34:37.316375Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> CdcStreamChangeCollector::NewImage [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::Drop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:36.755719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:36.755817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:36.755867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:36.755911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:36.755949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:36.755977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:36.756046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:36.756112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:36.756748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:36.757110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:36.845050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:36.845101Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:36.857501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:36.857718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:36.857857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:36.862605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:36.862876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:36.863432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:36.863613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:36.865963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:36.866144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:36.867273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:36.867332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:36.867440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:36.867485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:36.867525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:36.867680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:37.050087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.051970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.052069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.052165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:37.895668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:34:37.895694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:34:37.895723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:34:37.895792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:34:37.899280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:37.899386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-01-07T17:34:37.899733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:34:37.899775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:34:37.899906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:34:37.899938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-07T17:34:37.899993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:34:37.900022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:34:37.900437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:34:37.900625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:34:37.900657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:739:2728] 2026-01-07T17:34:37.901009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:34:37.901079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:34:37.901113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:34:37.901136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:739:2728] 2026-01-07T17:34:37.901179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:34:37.901209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:739:2728] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-07T17:34:37.901752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:37.901954Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 246us result status StatusSuccess 2026-01-07T17:34:37.902759Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:37.903325Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:37.903512Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 219us result status StatusSuccess 2026-01-07T17:34:37.903936Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:37.904493Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:37.904823Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 187us result status StatusSuccess 2026-01-07T17:34:37.907009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:36.486634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:36.486761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:36.486821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:36.486867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:36.486900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:36.486927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:36.486991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:36.487060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:36.487948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:36.488246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:36.579719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:36.579779Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:36.601007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:36.601439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:36.601650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:36.609247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:36.609654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:36.610478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:36.610760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:36.613727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:36.613948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:36.615232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:36.615300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:36.615432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:36.615491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:36.615541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:36.615726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:36.778315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.779442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.779597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.779687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.779798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.779869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.779936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.780081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.780174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.780274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.780335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.780405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.780470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.780552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:36.780665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:37.569300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-07T17:34:37.569449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-07T17:34:37.569854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:37.569988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:37.570046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_view.cpp:43: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2026-01-07T17:34:37.570337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:34:37.570535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:37.570608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:34:37.572774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:37.572827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:37.573040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:37.573223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:37.573273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:34:37.573332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:34:37.573670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:37.573731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:34:37.573853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:34:37.573890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:37.573949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:34:37.573987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:37.574026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:34:37.574066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:37.574106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:34:37.574142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:34:37.574216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:34:37.574262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:34:37.574294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:34:37.574354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:34:37.575115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:37.575214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:37.575276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:34:37.575319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:34:37.575364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:37.576135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:37.576236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:37.576270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:34:37.576302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:34:37.576335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:34:37.576429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:34:37.576724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:37.576777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:34:37.576852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:37.580393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:37.580595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:37.581628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:34:37.581864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:34:37.581911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:34:37.582426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:34:37.582528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:34:37.582568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:730:2719] TestWaitNotification: OK eventTxId 102 2026-01-07T17:34:37.583076Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:37.583290Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 208us result status StatusPathDoesNotExist 2026-01-07T17:34:37.583472Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:13.726131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:13.844678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:13.863389Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:13.863915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:13.863982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:14.152467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:14.152603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:14.238585Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807250732117 != 1767807250732121 2026-01-07T17:34:14.253879Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:14.301123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:14.415323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:14.727138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:14.741467Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:14.857448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:14.933491Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-01-07T17:34:14.933764Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:34:14.983127Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-07T17:34:14.983418Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:34:14.993449Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:34:14.993636Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:34:14.995378Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:34:14.995491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:34:14.995545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:34:14.995935Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:34:14.996190Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:34:14.996280Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-07T17:34:14.996697Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:34:14.996820Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:34:14.998251Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:34:14.998326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:34:14.998383Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:34:14.998678Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:34:14.998791Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:34:14.998883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-07T17:34:15.010048Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:34:15.048726Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:34:15.050459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:34:15.050666Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-07T17:34:15.050718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:34:15.050772Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:34:15.050815Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:15.051187Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:34:15.051233Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:34:15.051306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:34:15.051366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-07T17:34:15.051392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:34:15.051437Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:34:15.051480Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:34:15.051893Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:34:15.051999Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:34:15.052156Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:34:15.052204Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:34:15.052264Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:34:15.052322Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:15.052375Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:34:15.052474Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:34:15.052557Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:34:15.052597Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:34:15.052625Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:34:15.052654Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:34:15.053128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-07T17:34:15.053299Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:34:15.053599Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:34:15.053714Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:34:15.054273Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-07T17:34:15.054486Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:34:15.054659Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-07T17:34:15.054722Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-07T17:34:15.058462Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:15.058598Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:34:15.069681Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:34:15.069800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:34:15.070457Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:34:15.070538Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-07T17:34:15.212713Z node 1 :TX_DATASHARD ... E TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 40] schema version# 1 2026-01-07T17:34:36.285972Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:34:36.286410Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:34:36.288560Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2026-01-07T17:34:36.288637Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:34:36.290099Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:34:36.290148Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:36.290259Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2026-01-07T17:34:36.290336Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:34:36.291050Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:34:36.291107Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:36.292168Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:34:36.292240Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:34:36.292296Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-07T17:34:36.292378Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:34:36.292443Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:34:36.292543Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:34:36.294842Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:36.294987Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:34:36.295332Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:36.295375Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:34:36.295423Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:34:36.295488Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:34:36.295535Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:34:36.295607Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:36.299676Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:34:36.299894Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-07T17:34:36.299973Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-07T17:34:36.300799Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:34:36.301591Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:34:36.301651Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:34:36.313140Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:998:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.313300Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.313418Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1009:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.314621Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1014:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.314906Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.319702Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:36.330860Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:36.331019Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:34:36.464196Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:36.464511Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:34:36.467480Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1012:2854], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:34:36.504335Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1071:2894] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:36.599953Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1139:2925], serverId# [4:1140:2926], sessionId# [0:0:0] 2026-01-07T17:34:36.600471Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-07T17:34:36.600821Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767807276600700 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.601051Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-07T17:34:36.612735Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-07T17:34:36.612842Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:34:36.689531Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037889 2026-01-07T17:34:36.689884Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767807276689774 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.690095Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1767807276689774 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.690225Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037889, row count=1 2026-01-07T17:34:36.701816Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-07T17:34:36.701927Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:34:36.707146Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1179:2957], serverId# [4:1180:2958], sessionId# [0:0:0] 2026-01-07T17:34:36.714164Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1181:2959], serverId# [4:1182:2960], sessionId# [0:0:0] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:14.130191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:14.236730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:14.253455Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:14.253997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:14.254071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:14.551259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:14.551394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:14.648385Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807251003711 != 1767807251003715 2026-01-07T17:34:14.660846Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:14.710639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:14.820105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:15.165842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:15.180593Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:15.293632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:15.356113Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-01-07T17:34:15.356412Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:34:15.456830Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-07T17:34:15.457156Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:34:15.466486Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:34:15.466670Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:34:15.468360Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:34:15.468454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:34:15.468518Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:34:15.468870Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:34:15.469132Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:34:15.469211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-07T17:34:15.469566Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:34:15.469675Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:34:15.470966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:34:15.471030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:34:15.471072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:34:15.471339Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:34:15.471435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:34:15.471517Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-07T17:34:15.483887Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:34:15.526662Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:34:15.526925Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:34:15.527158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-07T17:34:15.527203Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:34:15.527240Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:34:15.527281Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:15.527621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:34:15.527667Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:34:15.527728Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:34:15.527782Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-07T17:34:15.527806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:34:15.527846Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:34:15.527884Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:34:15.528307Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:34:15.528403Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:34:15.528586Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:34:15.528633Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:34:15.528676Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:34:15.528720Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:15.528763Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:34:15.528890Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:34:15.529000Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:34:15.529047Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:34:15.529088Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:34:15.529129Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:34:15.529527Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-07T17:34:15.529698Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:34:15.529994Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:34:15.530121Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:34:15.530634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-07T17:34:15.530842Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:34:15.531044Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-07T17:34:15.531108Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-07T17:34:15.533000Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:15.533101Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:34:15.544079Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:34:15.544196Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:34:15.544849Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:34:15.544961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-07T17:34:15.679614Z node 1 :TX_DATASHARD ... 7995Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-07T17:34:36.378037Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-07T17:34:36.378104Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:34:36.378155Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:34:36.378224Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:34:36.379530Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:36.379648Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:34:36.379722Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-07T17:34:36.386503Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:34:36.386986Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:34:36.387079Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:34:36.387285Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:34:36.388189Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:34:36.388605Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-07T17:34:36.388662Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-07T17:34:36.389333Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2026-01-07T17:34:36.389382Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-07T17:34:36.401043Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1045:2883], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.401182Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1056:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.401252Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.402385Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1061:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.402607Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:36.407295Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:36.414373Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:36.414497Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:34:36.414550Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-07T17:34:36.544279Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:36.544405Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:34:36.544464Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-07T17:34:36.547298Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1059:2891], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:34:36.584980Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1119:2932] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:36.683815Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1223:2975], serverId# [4:1224:2976], sessionId# [0:0:0] 2026-01-07T17:34:36.684214Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-07T17:34:36.684504Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767807276684399 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.684691Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767807276684399 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.684797Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-07T17:34:36.696448Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-07T17:34:36.696534Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:34:36.783475Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037889 2026-01-07T17:34:36.783834Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1767807276783701 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.784042Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 4 Group: 1767807276783701 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.784185Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 1767807276783701 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.784272Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 6 Group: 1767807276783701 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:34:36.784351Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037889, row count=1 2026-01-07T17:34:36.795791Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-07T17:34:36.795879Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:34:36.800466Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1271:3015], serverId# [4:1272:3016], sessionId# [0:0:0] 2026-01-07T17:34:36.809378Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1273:3017], serverId# [4:1274:3018], sessionId# [0:0:0] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TTxAllocatorClientTest::Boot |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:15.498063Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:15.633391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:15.653553Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:15.654083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:15.654170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:15.934261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:15.934401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.024676Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807252611422 != 1767807252611426 2026-01-07T17:34:16.035937Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:16.082706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:16.198292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:16.543501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:16.557611Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:16.680067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:16.727293Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:34:16.727595Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:34:16.790775Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:34:16.790978Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:34:16.792807Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:34:16.792916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:34:16.792998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:34:16.793444Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:34:16.793611Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:34:16.793714Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:34:16.804660Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:34:16.839681Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:34:16.839955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:34:16.840129Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:34:16.840180Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:34:16.840220Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:34:16.840268Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:16.840818Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:34:16.840996Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:34:16.841115Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:34:16.841171Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:34:16.841212Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:34:16.841255Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:16.841344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:34:16.841858Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:34:16.842152Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:34:16.842278Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:34:16.844226Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:16.855310Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:34:16.855431Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:34:16.978274Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:34:16.983468Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:34:16.983572Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:16.984075Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:34:16.984130Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:34:16.984179Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:34:16.984504Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:34:16.984678Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:34:16.984882Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:34:16.984985Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:34:16.987077Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:34:16.987552Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:34:16.989948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:34:16.990005Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:16.991155Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:34:16.991256Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:16.992383Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:16.992435Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:34:16.992483Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:34:16.992547Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:34:16.992666Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:34:16.992758Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:16.998418Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:17.000339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:34:17.000539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:34:17.000606Z node 1 :TX_DATASHARD DEBUG: datashar ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:34:37.251127Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:37.253078Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:37.253128Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:34:37.253180Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:34:37.253256Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:34:37.253317Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:34:37.253397Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:37.254162Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:37.255934Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:34:37.256056Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:34:37.256107Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:34:37.271214Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:34:37.271396Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-07T17:34:37.271453Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-07T17:34:37.271503Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-07T17:34:37.272107Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:37.293707Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:34:37.507934Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:34:37.508027Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:37.513496Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:34:37.513610Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:34:37.513673Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-07T17:34:37.513930Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-07T17:34:37.514121Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:34:37.514569Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:34:37.515647Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:34:37.589612Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-07T17:34:37.589752Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:37.589811Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:34:37.589864Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:37.589949Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:34:37.590029Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-07T17:34:37.590127Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:37.592542Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-07T17:34:37.592650Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:34:37.607934Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.608069Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.608151Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.614214Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.614528Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.619897Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:37.627237Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:37.807075Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:34:37.826046Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:34:37.861258Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:37.988712Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2957], serverId# [4:1167:2958], sessionId# [0:0:0] 2026-01-07T17:34:37.989230Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-07T17:34:37.989621Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767807277989488 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:34:37.989847Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-07T17:34:38.001539Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-07T17:34:38.001674Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:38.126396Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-07T17:34:38.126770Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767807278126598 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:34:38.126935Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-07T17:34:38.139552Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-07T17:34:38.139633Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:34:38.141849Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1193:2976], serverId# [4:1194:2977], sessionId# [0:0:0] 2026-01-07T17:34:38.148196Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1195:2978], serverId# [4:1196:2979], sessionId# [0:0:0] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true >> TTxAllocatorClientTest::ZeroRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:37.484820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:37.484885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:37.484948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:37.485004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:37.485044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:37.485077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:37.485160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:37.485224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:37.486087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:37.486409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:37.578712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:37.578772Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:37.621854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:37.622249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:37.622464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:37.675191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:37.675662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:37.676358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:37.676633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:37.689673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:37.689898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:37.691094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:37.691158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:37.691267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:37.691317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:37.691364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:37.691550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:37.963653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.964827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.964997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:37.965954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... less_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:39.288877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:785:2753] sender: [1:843:2058] recipient: [1:15:2062] 2026-01-07T17:34:39.331115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:39.331353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2026-01-07T17:34:39.331408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2026-01-07T17:34:39.331520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:34:39.331601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-07T17:34:39.331641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: 2026-01-07T17:34:39.331689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:34:39.334522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2026-01-07T17:34:39.334792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2026-01-07T17:34:39.335009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:34:39.335053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2026-01-07T17:34:39.335113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-07T17:34:39.335222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:39.337658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-07T17:34:39.337849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2026-01-07T17:34:39.338587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:39.338733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:39.338782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000039 2026-01-07T17:34:39.338927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-07T17:34:39.339092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:34:39.339150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:34:39.341689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:39.341733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:39.341952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:34:39.342058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:39.342092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:834:2791], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-07T17:34:39.342127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:834:2791], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:34:39.342820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:34:39.342877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:34:39.342963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:34:39.342996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:39.343032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:34:39.343059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:39.343090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:34:39.343127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:39.343182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:34:39.343212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:34:39.343308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:34:39.343413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-07T17:34:39.343444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:34:39.343478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:34:39.344276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:34:39.344406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:34:39.344449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:34:39.344490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:34:39.344543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:34:39.345470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:34:39.345543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:34:39.345569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:34:39.345610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:34:39.345636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:34:39.345719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:34:39.349660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:34:39.352036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TBackupCollectionTests::DropCollectionWithIncrementalBackup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveBackup >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |85.7%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2026-01-07T17:34:40.239896Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:34:40.240381Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:34:40.240972Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:34:40.242714Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.243263Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:34:40.255004Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.255157Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.255235Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.255375Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:34:40.255549Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.255661Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:34:40.255850Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TTxAllocatorClientTest::InitiatingRequest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 22808, MsgBus: 26138 2026-01-07T17:32:41.525649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673943546584704:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:41.526052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:41.547910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:42.065054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:42.105071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:42.105180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:42.201686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:42.396953Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673943546584554:2081] 1767807161469682 != 1767807161469685 2026-01-07T17:32:42.416689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:42.421325Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:42.517490Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:42.580633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:32:42.580662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:32:42.580669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:32:42.580773Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:43.275965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:43.285165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:32:43.349357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.605361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.880474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:43.996523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:46.406934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673965021422898:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.407057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.407556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673965021422908:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.407603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:46.517323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673943546584704:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:46.525391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:47.077451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.125169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.174516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.229842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.285531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.353439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.437254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.549805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:47.722350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673969316391079:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.722437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.722991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673969316391084:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.723042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673969316391085:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.723184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:47.727872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:47.753394Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673969316391088:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:32:47.836916Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673969316391139:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... pSnapshotManager: discarding snapshot; our snapshot: [step: 1767807260170, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 6868, MsgBus: 4609 2026-01-07T17:34:23.448318Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7592674381986973481:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:23.448387Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:23.512018Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:23.741466Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:23.758661Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:23.758807Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:23.791784Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:23.825051Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:24.129758Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:24.129794Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:24.129807Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:24.129933Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:24.547361Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:25.496409Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:25.510670Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:34:25.525755Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.793261Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:26.180769Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:26.445953Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:28.446743Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592674381986973481:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:28.446866Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:32.070789Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674420641681022:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:32.070932Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:32.071334Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674420641681032:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:32.071395Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:32.220349Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:32.321389Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:32.445766Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:32.567655Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:32.696145Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:32.911230Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:33.011568Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:33.136619Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:33.344741Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674424936649222:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:33.344868Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:33.345274Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674424936649228:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:33.345331Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674424936649227:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:33.345548Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:33.354063Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:33.510932Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592674424936649231:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:34:33.610400Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592674424936649286:3796] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:38.170900Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807277537, txId: 281474976710673] shutting down |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn >> TTxAllocatorClientTest::InitiatingRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:33.132495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:33.132569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:33.132604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:33.132636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:33.132684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:33.132729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:33.132774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:33.132828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:33.133572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:33.133911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:33.238873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:33.238933Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:33.260074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:33.260505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:33.260726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:33.267506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:33.267877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:33.268620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:33.268895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:33.271950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:33.272131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:33.273393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:33.273498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:33.273621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:33.273676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:33.273718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:33.273892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:33.422819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.423843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.423978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.424840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xId: 103 ready parts: 1/1 2026-01-07T17:34:41.238371Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:34:41.238411Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:34:41.238605Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-07T17:34:41.240305Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:34:41.241072Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-01-07T17:34:41.241325Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:34:41.241648Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:34:41.241977Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:34:41.242102Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:34:41.242315Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:41.242545Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-07T17:34:41.242862Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 Forgetting tablet 72075186233409546 2026-01-07T17:34:41.243178Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:34:41.243349Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:34:41.243828Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2026-01-07T17:34:41.244423Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:34:41.244594Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:34:41.244886Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:41.244968Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:34:41.245111Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:34:41.245475Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:41.245529Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:34:41.245622Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:34:41.249993Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:34:41.250072Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2026-01-07T17:34:41.250244Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:34:41.250272Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:34:41.250329Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:34:41.250354Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2026-01-07T17:34:41.250561Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:34:41.250606Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2026-01-07T17:34:41.250961Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:34:41.251074Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:34:41.251414Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:34:41.251471Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:34:41.251981Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:34:41.252098Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:34:41.252147Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:594:2536] TestWaitNotification: OK eventTxId 103 2026-01-07T17:34:41.252813Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:41.253072Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 283us result status StatusPathDoesNotExist 2026-01-07T17:34:41.253253Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:34:41.253833Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:41.254062Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 232us result status StatusSuccess 2026-01-07T17:34:41.254550Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:41.255423Z node 6 :HIVE INFO: tablet_helpers.cpp:1586: [72057594037968897] TEvRequestHiveInfo, msg: |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2026-01-07T17:34:41.659563Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:34:41.660211Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:34:41.661054Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:34:41.662974Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.663560Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:34:41.675899Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.676021Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.676098Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.676193Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:34:41.676349Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.676465Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:34:41.676622Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:34:41.677569Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-07T17:34:41.678150Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.678230Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.678353Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-07T17:34:41.678412Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 2026-01-07T17:34:41.678600Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-07T17:34:41.678804Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-07T17:34:41.679004Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-07T17:34:41.679245Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-07T17:34:41.679470Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-07T17:34:41.679973Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.680054Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.680175Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2026-01-07T17:34:41.680222Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 5000 to# 10000 2026-01-07T17:34:41.680422Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-07T17:34:41.680579Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-07T17:34:41.680789Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-07T17:34:41.681069Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-07T17:34:41.681209Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-07T17:34:41.681779Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.681879Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.682017Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2026-01-07T17:34:41.682080Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 10000 to# 15000 2026-01-07T17:34:41.682297Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2026-01-07T17:34:41.946246Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:34:41.946780Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:34:41.947615Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:34:41.949296Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.949778Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:34:41.960748Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.960892Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.961388Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.961514Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:34:41.961635Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.961743Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:34:41.961910Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:34:41.962726Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-07T17:34:41.963217Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.963284Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:41.963417Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-07T17:34:41.963471Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TBackupCollectionTests::DropLargeBackupCollection [GOOD] >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> TBackupCollectionTests::DropCollectionDuringActiveBackup [GOOD] >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup >> TKeyValueTest::TestRewriteThenLastValue >> TKeyValueTest::TestWriteReadPatchRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:32.575074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:32.575165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:32.575202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:32.575241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:32.575298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:32.575333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:32.575391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:32.575460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:32.576339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:32.576632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:32.671044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:32.671110Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:32.698394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:32.698862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:32.699088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:32.706652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:32.707004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:32.707732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:32.708090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:32.710893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:32.711069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:32.712240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:32.712308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:32.712426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:32.712470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:32.712508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:32.712693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.719620Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:34:32.871283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:32.871509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.871771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:34:32.871830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:34:32.872051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:34:32.872132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:32.881059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:32.881319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:34:32.881597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.881666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:34:32.881724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:34:32.881759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:34:32.887750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.887830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:34:32.887874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:34:32.896505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.896571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.896632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:32.896689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:34:32.900491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:32.904777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:34:32.905084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:34:32.906428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:32.906597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:32.906668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:32.907022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:34:32.907084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:32.907289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:34:32.907398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:32.910380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:32.910467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 75576Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:34:42.277723Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:34:42.277849Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:34:42.277896Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:34:42.277924Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:34:42.277950Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:34:42.278063Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:34:42.278123Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:34:42.278271Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:34:42.278313Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:42.278360Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:34:42.278403Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:42.278446Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:34:42.278495Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:42.278537Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:34:42.278578Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:34:42.278782Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-07T17:34:42.279763Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:34:42.280204Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:34:42.280465Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-07T17:34:42.280664Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:34:42.281661Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:34:42.281953Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:34:42.282571Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:42.282790Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2026-01-07T17:34:42.284000Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409546 2026-01-07T17:34:42.285005Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:34:42.285177Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:34:42.285609Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409549 2026-01-07T17:34:42.287239Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:42.290743Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:34:42.290946Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2026-01-07T17:34:42.293513Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:42.293580Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:34:42.293718Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:34:42.294378Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:42.294437Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:34:42.294517Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:34:42.296798Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:34:42.296884Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:34:42.297017Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:34:42.297043Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:34:42.297117Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:34:42.297141Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:34:42.298917Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:34:42.298973Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:34:42.299067Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:34:42.299282Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:34:42.299562Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:34:42.299615Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:34:42.300020Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:34:42.300114Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:34:42.300164Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:567:2508] TestWaitNotification: OK eventTxId 103 2026-01-07T17:34:42.300688Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:42.300874Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 226us result status StatusPathDoesNotExist 2026-01-07T17:34:42.301084Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] |85.8%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> TKeyValueTest::TestObtainLockNewApi >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:32.082858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:32.082970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:32.083009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:32.083058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:32.083102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:32.083134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:32.083212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:32.083324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:32.084236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:32.084546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:32.183137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:32.183222Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:32.214106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:32.214557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:32.214791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:32.233723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:32.234254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:32.235111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:32.235421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:32.249722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:32.250009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:32.251438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:32.251512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:32.251668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:32.251726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:32.251771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:32.251990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.404416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.408949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.409974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.410057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.410126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.410365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.410467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:34:42.693061Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 134 2026-01-07T17:34:42.694119Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:42.694228Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:42.696439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:42.696525Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:34:42.696661Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 134 -> 135 2026-01-07T17:34:42.696913Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:34:42.697031Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:34:42.699428Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:42.699482Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:42.699632Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:34:42.699796Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:42.699829Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:34:42.699868Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-07T17:34:42.700191Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:42.700263Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 102:0 ProgressState 2026-01-07T17:34:42.700325Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 135 -> 240 2026-01-07T17:34:42.701266Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:42.701366Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:42.701405Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:34:42.701440Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:34:42.701478Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:34:42.702677Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:42.702793Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:42.702838Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:34:42.702873Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-07T17:34:42.702905Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:34:42.702976Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:34:42.705855Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:42.705932Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:34:42.706082Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:34:42.706136Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:42.706189Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:34:42.706236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:42.706283Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:34:42.706338Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:42.706394Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:34:42.706440Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:34:42.706548Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:34:42.707232Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:42.707310Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:34:42.707393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:34:42.707839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:42.707920Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:34:42.708016Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:34:42.708828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:42.710385Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:42.713167Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:34:42.713293Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:34:42.713568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:34:42.713624Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:34:42.714221Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:34:42.714334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:34:42.714387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:345:2334] TestWaitNotification: OK eventTxId 102 2026-01-07T17:34:42.715054Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:42.715283Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 268us result status StatusPathDoesNotExist 2026-01-07T17:34:42.715482Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> KeyValueReadStorage::ReadError >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst |85.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestVacuumOnEmptyTablet >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2026-01-07T17:34:44.551450Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:44.551574Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:272} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1767807284550 ErrorReason# 2026-01-07T17:34:44.563425Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:44.563558Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:242} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1767807284563 ErrorReason# 2026-01-07T17:34:44.572040Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:44.572125Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:224} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1767807284571 ErrorReason# |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBasicWriteRead >> KqpRboYql::ExpressionSubquery [GOOD] >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> KqpResultSetFormats::ArrowFormat_Returning+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2026-01-07T17:34:45.507520Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:45.510548Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-07T17:34:45.517144Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:45.517238Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-07T17:34:45.523828Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:45.523936Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:312} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2026-01-07T17:34:45.523579Z ErrorReason# |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:31.989076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:31.989186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:31.989228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:31.989274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:31.989324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:31.989363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:31.989442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:31.989527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:31.990398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:31.990687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:32.083316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:32.083379Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:32.102921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:32.103366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:32.103600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:32.111131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:32.111497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:32.112189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:32.112436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:32.115403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:32.115583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:32.116769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:32.116828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:32.116955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:32.117011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:32.117052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:32.117230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.125831Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:34:32.267439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:32.267701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.267906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:34:32.267956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:34:32.268200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:34:32.268280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:32.270756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:32.270967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:34:32.271253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.271324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:34:32.271372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:34:32.271411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:34:32.273723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.273812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:34:32.273865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:34:32.275861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.275937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.275998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:32.276071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:34:32.279673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:32.290975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:34:32.291199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:34:32.292266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:32.292435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:32.292479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:32.292758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:34:32.292817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:34:32.293065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:34:32.293192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:32.295728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:32.295788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 6545 cookie: 0:108 msg type: 269090816 2026-01-07T17:34:44.881485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000041 2026-01-07T17:34:44.881950Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:44.882124Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773230 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:44.882187Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-07T17:34:44.882420Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 240 2026-01-07T17:34:44.882489Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-07T17:34:44.882742Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:34:44.882830Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:34:44.882905Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:425: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:34:44.884197Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-07T17:34:44.884549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2026-01-07T17:34:44.885880Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:44.885917Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:44.886126Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:34:44.886234Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:44.886265Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 108, path id: 1 2026-01-07T17:34:44.886304Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 108, path id: 41 2026-01-07T17:34:44.886764Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:34:44.886810Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-07T17:34:44.886967Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-07T17:34:44.887012Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:34:44.887061Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-07T17:34:44.887101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:34:44.887153Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2026-01-07T17:34:44.887397Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:34:44.887452Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-07T17:34:44.887515Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-07T17:34:44.887609Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-07T17:34:44.887662Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 2, subscribers: 0 2026-01-07T17:34:44.887698Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-07T17:34:44.887731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2026-01-07T17:34:44.888467Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:34:44.888552Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:34:44.888591Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 108 2026-01-07T17:34:44.888646Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:34:44.888705Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-07T17:34:44.890515Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:34:44.890602Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:34:44.890635Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-01-07T17:34:44.890684Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2026-01-07T17:34:44.890719Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:34:44.890806Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-01-07T17:34:44.895235Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-07T17:34:44.895539Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-07T17:34:44.895839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-07T17:34:44.895891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-07T17:34:44.896358Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-07T17:34:44.896458Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-07T17:34:44.896503Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:887:2856] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2026-01-07T17:34:44.899600Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "extSubdomain" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:44.899830Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_extsubdomain.cpp:58: TCreateExtSubDomain Propose, path/MyRoot/extSubdomain, opId: 109:0, at schemeshard: 72057594046678944 2026-01-07T17:34:44.899989Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:34:44.902461Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/extSubdomain\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges)" TxId: 109 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 108, at schemeshard: 72057594046678944 2026-01-07T17:34:44.902731Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), operation: CREATE DATABASE, path: /MyRoot/extSubdomain TestModificationResult got TxId: 109, wait until txId: 109 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup [GOOD] >> TBackupCollectionTests::DropCollectionRollbackOnFailure >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2026-01-07T17:34:46.129608Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:46.132110Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-07T17:34:46.138764Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:46.138828Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-07T17:34:46.144301Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:124} Inline read request KeyValue# 1 Status# OK 2026-01-07T17:34:46.144357Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:136} Expected OK or UNKNOWN and given OK readCount# 0 2026-01-07T17:34:46.144390Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::ExpressionSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 9835, MsgBus: 15790 2026-01-07T17:34:06.322850Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674308661218890:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:06.322892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:06.375640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:06.734196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:06.763944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:06.764080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:06.859146Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674308661218760:2081] 1767807246263715 != 1767807246263718 2026-01-07T17:34:06.865262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:06.867989Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:06.996079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:07.005689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:07.005718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:07.005726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:07.005813Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:07.369073Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:07.464377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:07.481684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:34:09.713949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674321546121537:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.714425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.716395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674321546121548:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.716524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:10.082856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:34:10.690572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:34:10.690785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:34:10.690977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:34:10.691041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:34:10.691096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:34:10.691178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:34:10.691309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:34:10.691413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:34:10.691502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:34:10.691617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:34:10.691720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:34:10.691807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:34:10.691892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674325841089401:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:34:10.709218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:34:10.709301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:34:10.709518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:34:10.709653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:34:10.709766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:34:10.709861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:34:10.709977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:34:10.710108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:34:10.710210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:34:10.710305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:34:10.710419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:34:10.710524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7592674325841089403:2332];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:34:10.710633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=720751862240 ... X_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.840552Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.840570Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.851228Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037984;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.851228Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.851287Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.851297Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037984;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.851308Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.851315Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037984;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.861321Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.861321Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.861396Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.861403Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.861417Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.861418Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.871435Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.871435Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.871488Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.871508Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.871509Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.871524Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.881563Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.881563Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.881628Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.881655Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.881659Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.881678Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.891667Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.891668Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037988;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.891726Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037988;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.891732Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.891746Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037988;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.891747Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.901396Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.901489Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:36.901511Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-07T17:34:37.006914Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674442614111604:3008], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.006994Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.007100Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674442614111609:3011], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.007236Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674442614111611:3012], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.007280Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:37.010797Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:37.028808Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592674442614111612:3013], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:34:37.112312Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592674442614111666:5238] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:41.085734Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:34:41.085770Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> TKeyValueTest::TestRewriteThenLastValueNewApi >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TBackupCollectionTests::DropCollectionRollbackOnFailure [GOOD] >> TBackupCollectionTests::DropCollectionValidationCases >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:33.408852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:33.409003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:33.409061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:33.409097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:33.409139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:33.409176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:33.409241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:33.409305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:33.410159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:33.410454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:33.539685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:33.539752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:33.557525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:33.557895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:33.558085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:33.568072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:33.568423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:33.569271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:33.569528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:33.572238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:33.572414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:33.573629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:33.573703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:33.573836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:33.573884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:33.573929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:33.574094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:33.723520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.724469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.724586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.724649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.724748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.724812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.724876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.725174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.725267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.725396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.725465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.725546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.725607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.725678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:33.725743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... State leave, operationId 103:0, at tablet# 72057594046678944 2026-01-07T17:34:48.243277Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-07T17:34:48.243432Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:48.245047Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-07T17:34:48.245194Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-07T17:34:48.245552Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:48.245710Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 34359740526 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:48.245765Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2026-01-07T17:34:48.246058Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-07T17:34:48.246128Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2026-01-07T17:34:48.246275Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:34:48.246453Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:588: Send TEvUpdateTenantSchemeShard, to actor: [8:795:2764], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:34:48.248477Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6262: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2026-01-07T17:34:48.248592Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2026-01-07T17:34:48.248795Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 2026-01-07T17:34:48.249026Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:48.249083Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:48.249276Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:48.249325Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-07T17:34:48.249773Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:34:48.249839Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2026-01-07T17:34:48.249882Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 240 -> 240 2026-01-07T17:34:48.250638Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:34:48.250751Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:34:48.250803Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:34:48.250850Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-07T17:34:48.250903Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-07T17:34:48.251008Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:34:48.253360Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-07T17:34:48.253453Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:34:48.253557Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:795:2764], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:34:48.253650Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2026-01-07T17:34:48.253680Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2026-01-07T17:34:48.253807Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2026-01-07T17:34:48.253839Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:881:2824], at schemeshard: 72075186234409546, txId: 0, path id: 1 2026-01-07T17:34:48.254913Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72075186234409546, cookie: 0 2026-01-07T17:34:48.255463Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:34:48.255521Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:34:48.255669Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:34:48.255719Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:48.255765Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:34:48.255803Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:48.255853Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:34:48.255906Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:48.255952Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:34:48.255993Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:34:48.256075Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:34:48.256386Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:34:48.256462Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2026-01-07T17:34:48.258256Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:34:48.258312Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:34:48.258774Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:34:48.258882Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:34:48.258925Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:1007:2948] TestWaitNotification: OK eventTxId 103 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> KqpResultSetFormats::ArrowFormat_Stress [FAIL] >> KqpResultSetFormats::ArrowFormat_Types_List_1 >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TBackupCollectionTests::DropCollectionValidationCases [GOOD] >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:32.442232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:32.442331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:32.442373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:32.442419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:32.442463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:32.442495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:32.442550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:32.442636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:32.443578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:32.443875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:32.575611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:32.575687Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:32.608475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:32.608896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:32.609124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:32.629614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:32.629981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:32.630775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:32.631078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:32.643188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:32.643427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:32.644719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:32.644794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:32.644906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:32.644974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:32.645026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:32.645238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:32.910785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.911936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.912872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.913206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.913347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:32.913431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... r__attr_1" Value: "value" } UserAttributesVersion: 2 2026-01-07T17:34:49.664061Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186234409546 2026-01-07T17:34:49.664321Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 2026-01-07T17:34:49.664614Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:49.664673Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:49.664960Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:49.665012Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:34:49.665971Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:49.666103Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:34:49.666167Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:34:49.666233Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-07T17:34:49.666291Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:34:49.666412Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:34:49.669475Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 2 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-07T17:34:49.669584Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:34:49.669691Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:795:2764], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 2, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:34:49.669803Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2026-01-07T17:34:49.669832Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2026-01-07T17:34:49.669980Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2026-01-07T17:34:49.670008Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:881:2824], at schemeshard: 72075186234409546, txId: 0, path id: 1 2026-01-07T17:34:49.670861Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186234409546, cookie: 0 2026-01-07T17:34:49.671026Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:34:49.671113Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:34:49.671437Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:34:49.671494Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:34:49.672027Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:34:49.672164Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:34:49.672216Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [8:1027:2968] TestWaitNotification: OK eventTxId 104 2026-01-07T17:34:49.672979Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:49.673242Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 318us result status StatusSuccess 2026-01-07T17:34:49.673763Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:49.674542Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2026-01-07T17:34:49.674818Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 295us result status StatusSuccess 2026-01-07T17:34:49.675369Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409546 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976720657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:39.206120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:39.206232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:39.206297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:39.206352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:39.206392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:39.206424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:39.206480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:39.206563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:39.207467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:39.207789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:39.325199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:39.325262Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:39.351904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:39.352282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:39.352495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:39.358915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:39.359278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:39.360095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:39.360328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:39.366389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:39.366581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:39.367878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:39.367946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:39.368078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:39.368128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:39.368173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:39.368377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:39.537057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.538943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.539006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.539111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.539188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.539273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:39.539355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ], version: 18446744073709551615 2026-01-07T17:34:51.286082Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2026-01-07T17:34:51.286167Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:34:51.288706Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:34:51.288874Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:34:51.288947Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:34:51.288980Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:34:51.289011Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:34:51.289253Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:34:51.289312Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:34:51.289473Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:34:51.289525Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:51.289578Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:34:51.289627Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:51.289682Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:34:51.289739Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:34:51.289793Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:34:51.289842Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:34:51.290046Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-07T17:34:51.291433Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:34:51.292195Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-01-07T17:34:51.292397Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:34:51.292755Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:34:51.293049Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:34:51.293227Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:34:51.293364Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:51.293543Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2026-01-07T17:34:51.294331Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-01-07T17:34:51.294539Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:34:51.294717Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:34:51.295058Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2026-01-07T17:34:51.295238Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:34:51.295374Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:34:51.295914Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:51.295982Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:34:51.296125Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:34:51.296433Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:34:51.296474Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:34:51.296556Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:34:51.300841Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:34:51.300916Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2026-01-07T17:34:51.301088Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:34:51.301110Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:34:51.301324Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:34:51.301358Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2026-01-07T17:34:51.301773Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:34:51.301836Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2026-01-07T17:34:51.302254Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:34:51.302394Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:34:51.302708Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:34:51.302750Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:34:51.303256Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:34:51.303390Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:34:51.303440Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:585:2526] TestWaitNotification: OK eventTxId 103 2026-01-07T17:34:51.304003Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:51.304230Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 258us result status StatusPathDoesNotExist 2026-01-07T17:34:51.304460Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop [GOOD] >> TBackupCollectionTests::DropErrorRecoveryTest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:38.432111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:38.432206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:38.432241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:38.432290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:38.432328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:38.432353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:38.432402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:38.432457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:38.433312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:38.433616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:38.527444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:38.527507Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:38.557929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:38.558405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:38.558621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:38.567207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:38.567536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:38.568309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:38.568563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:38.579866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:38.580084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:38.581444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:38.581520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:38.581654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:38.581710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:38.581763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:38.581977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:38.734452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.735473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.735609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.735682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.735860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.735980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:38.736743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2026-01-07T17:34:52.342998Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 3 -> 128 2026-01-07T17:34:52.345054Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.345275Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.345342Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.345407Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2026-01-07T17:34:52.345478Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2026-01-07T17:34:52.345655Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:34:52.347392Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-07T17:34:52.347536Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-07T17:34:52.347844Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:52.347961Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 34359740526 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:52.348010Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2026-01-07T17:34:52.348322Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:34:52.348405Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2026-01-07T17:34:52.348548Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:34:52.348650Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:762:2735], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:34:52.350385Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:52.350438Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:34:52.350645Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:52.350695Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:34:52.351018Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.351079Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2026-01-07T17:34:52.351120Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-07T17:34:52.351717Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:52.351814Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:34:52.351856Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:34:52.351900Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:34:52.351941Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-07T17:34:52.352021Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:34:52.355026Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.355102Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:34:52.355248Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:34:52.355304Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:52.355344Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:34:52.355382Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:52.355425Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:34:52.355505Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:703:2692] message: TxId: 102 2026-01-07T17:34:52.355555Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:34:52.355601Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:34:52.355636Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:34:52.355837Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:34:52.357178Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:34:52.358227Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:34:52.358295Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:950:2891] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-07T17:34:52.360821Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:34:52.361003Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2026-01-07T17:34:52.361071Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2026-01-07T17:34:52.361196Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2026-01-07T17:34:52.361241Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2026-01-07T17:34:52.363274Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:34:52.363559Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:78:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:81:2057] recipient: [5:80:2112] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:83:2057] recipient: [5:80:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:82:2113] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:198:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:79:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:84:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:83:2113] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:199:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_ColumnOrder >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveOperation |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> KqpRboYql::BasicJoins [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> TBackupCollectionTests::DropErrorRecoveryTest [GOOD] >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart >> KqpRboPg::TPCH [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink >> KqpResultSetFormats::ArrowFormat_Types_List_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TBackupCollectionTests::DropCollectionDuringActiveOperation [GOOD] >> TBackupCollectionTests::ConcurrentDropProtectionTest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TKeyValueTest::TestWriteLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2026-01-07T17:34:55.854059Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2026-01-07T17:34:55.857230Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::BasicJoins [GOOD] Test command err: Trying to start YDB, gRPC: 27783, MsgBus: 17406 2026-01-07T17:34:06.255456Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674309389903525:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:06.255826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:06.489062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:06.489953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:06.490054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:06.519673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:06.567116Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:06.661681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:06.661711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:06.661720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:06.661822Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:06.665062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:07.114662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:07.264725Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:09.349705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674322274806253:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.349817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.350249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674322274806263:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.350309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:09.639751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:34:10.344348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:34:10.344703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:34:10.344938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:34:10.345035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:34:10.345112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:34:10.345218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:34:10.345321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:34:10.345427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:34:10.345528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:34:10.345631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:34:10.345765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:34:10.345866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:34:10.345986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7592674322274806702:2329];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:34:10.352569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:34:10.352629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:34:10.352763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:34:10.352850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:34:10.355787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:34:10.355941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:34:10.356027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:34:10.356132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:34:10.356243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:34:10.356339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:34:10.356420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:34:10.356499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:34:10.356578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592674322274806714:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:34:10.414411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7592674326569774012:2332];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:34:10.414481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7592674326569774012:2332];tablet_id=72075186224037950;pr ... 5186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.785221Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.785303Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.785323Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.792480Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.792554Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.792589Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.806746Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.806841Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.806871Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.810202Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.810278Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.810369Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.818296Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.818370Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.818398Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.820166Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.820219Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.820237Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.828594Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.828673Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.828693Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.834829Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.834902Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.834925Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-07T17:34:37.862085Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrcc2p31mj3rqj6030cs1h", SessionId: ydb://session/3?node_id=3&id=MTU0NzA5YjctNjc4NjEyNjctYWZlNTY1MzItNmJmOGU4ZmI=, Slow query, duration: 10.957371s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/t1` (\n a Int64\tNOT NULL,\n\t b Int64,\n primary key(a)\n ) WITH (Store = Column);\n\n CREATE TABLE `/Root/t2` (\n a Int64\tNOT NULL,\n\t b Int64,\n primary key(a)\n ) WITH (Store = Column);\n\n CREATE TABLE `/Root/t3` (\n a Int64\tNOT NULL,\n\t b Int64,\n primary key(a)\n ) WITH (Store = Column);\n ", parameters: 0b 2026-01-07T17:34:38.066374Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674447160609127:3348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:38.066475Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:38.066736Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674447160609132:3351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:38.066772Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674447160609133:3352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:38.066819Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:38.071980Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:38.101770Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-07T17:34:38.103125Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592674447160609136:3353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:34:38.173691Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592674447160609187:6615] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:52.740309Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7592674507290160005:11406], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:3:135: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:609: Condition violated: `(isLeftSidePredicate && !isRightSidePredicate) || (isRightSidePredicate && !isLeftSidePredicate)', code: 1 2026-01-07T17:34:52.740775Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=NjQ1ZGE4N2UtMTg1ODM0OC0zZjMzZWVkZC1lZmZlMzEwNg==, ActorId: [3:7592674507290160002:11404], ActorState: ExecuteState, LegacyTraceId: 01kecrd579e5432j4tc2yx4veg, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 3 column: 135 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:609: Condition violated: `(isLeftSidePredicate && !isRightSidePredicate) || (isRightSidePredicate && !isLeftSidePredicate)\'" end_position { row: 3 column: 135 } issue_code: 1 } } tx_id# trace_id# 2026-01-07T17:34:52.849175Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7592674507290160022:11415], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:3:143: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:548: OR in join predicate is not supported, code: 1 2026-01-07T17:34:52.849543Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=NjYyOGRlMmYtYTdlMWM1NDQtMWUzNTYyY2EtODYyYzI5YmI=, ActorId: [3:7592674507290160019:11413], ActorState: ExecuteState, LegacyTraceId: 01kecrd5an8jx3209cqb0qcpvg, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 3 column: 143 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:548: OR in join predicate is not supported" end_position { row: 3 column: 143 } issue_code: 1 } } tx_id# trace_id# |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:203:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:137:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:136:2159] sender: [1:138:2058] recipient: [1:114:2145] 2026-01-07T17:33:21.337043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:21.337135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.337275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:21.337313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:21.337362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:21.337387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:21.337461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:21.337526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:21.338355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:21.338606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:21.453932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:33:21.454006Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:21.454833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:186:2058] recipient: [1:15:2062] 2026-01-07T17:33:21.469145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:21.469318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:21.469453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:21.474920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:21.475129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:21.475728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:21.476001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:21.478994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.479150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:21.480052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:21.480096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:21.480234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:21.480279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:21.480316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:21.480436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:21.606912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.607898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:21.608735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... emeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-07T17:34:52.005049Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-07T17:34:52.005106Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-07T17:34:52.005153Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:34:52.005203Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:34:52.005318Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2026-01-07T17:34:52.009631Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1080 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-07T17:34:52.009684Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2026-01-07T17:34:52.009835Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1080 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-07T17:34:52.009964Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1080 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2026-01-07T17:34:52.011019Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 741 RawX2: 219043334822 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-07T17:34:52.011073Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2026-01-07T17:34:52.011196Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 741 RawX2: 219043334822 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-07T17:34:52.011256Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:34:52.011346Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 741 RawX2: 219043334822 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-07T17:34:52.011412Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:52.011450Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.011491Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:34:52.011536Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-07T17:34:52.011953Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-07T17:34:52.014153Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.014305Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.014416Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:34:52.014460Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2026-01-07T17:34:52.014561Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-07T17:34:52.014609Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:34:52.014650Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-07T17:34:52.014683Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:34:52.014720Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-07T17:34:52.014764Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:34:52.014801Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-07T17:34:52.014834Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-07T17:34:52.014961Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-07T17:34:52.018735Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-07T17:34:52.018785Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-07T17:34:52.019157Z node 51 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-07T17:34:52.019246Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-07T17:34:52.019284Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:864:2835] TestWaitNotification: OK eventTxId 1003 2026-01-07T17:34:52.019746Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:34:52.019950Z node 51 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 236us result status StatusSuccess 2026-01-07T17:34:52.020536Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::TPCH [GOOD] Test command err: Trying to start YDB, gRPC: 7529, MsgBus: 27162 2026-01-07T17:34:06.961180Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674307229008091:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:06.961589Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:07.219879Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:07.223071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:07.223200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:07.247747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:07.313019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:07.396837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:07.396870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:07.396879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:07.396999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:07.425165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:07.836087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:07.950789Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:10.028654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674324408877978:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:10.028685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674324408877971:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:10.028814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:10.029181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674324408877986:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:10.029251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:10.033351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:10.049026Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674324408877985:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:34:10.200899Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674324408878038:2532] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9317, MsgBus: 10527 2026-01-07T17:34:11.361094Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592674330776042440:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:11.361376Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:11.377023Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:11.524398Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.537084Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.537160Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.571981Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.679026Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:11.679946Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:11.679963Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:11.679970Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:11.680052Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.118043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:12.368683Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:14.882097Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674343660945156:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:14.882207Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:14.887594Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674343660945166:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:14.887697Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:15.218603Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:34:15.755782Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:34:15.756086Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:34:15.756412Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:34:15.756535Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:34:15.756664Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:34:15.756825Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:34:15.756947Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:34:15.757048Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:34:15.757135Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[2:7592674347955912887:2335];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_regist ... X_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.036946Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.036967Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.037013Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.037067Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.037101Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.046502Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.046564Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.046579Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.047002Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.047068Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.047120Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.054989Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.055060Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.055082Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.056540Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.056606Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.056625Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.062869Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.062948Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.062968Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.066518Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.066599Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.066630Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.072569Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.072629Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.072644Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.076665Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.076743Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.076762Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.080906Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.080982Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.080999Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.086788Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.086875Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.086895Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:34:50.129612Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674497716017085:3022], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:50.129696Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:50.129728Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674497716017090:3025], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:50.129949Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592674497716017092:3026], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:50.130013Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:50.134220Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710666:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:50.148971Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592674497716017093:3027], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710666 completed, doublechecking } 2026-01-07T17:34:50.211000Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592674497716017149:5412] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 49], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:53.392185Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:34:53.392221Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TBackupCollectionTests::ConcurrentDropProtectionTest [GOOD] >> TBackupCollectionTests::BackupServiceDirectoryValidation >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestRenameWorks >> TKeyValueTest::TestConcatWorks >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart [GOOD] >> TBackupCollectionTests::IncrementalBackupOperation >> TKeyValueTest::TestCopyRangeWorks >> TBackupCollectionTests::BackupServiceDirectoryValidation [GOOD] >> TBackupCollectionTests::BackupWithIndexes >> BasicStatistics::ServerlessGlobalIndex [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> TBackupCollectionTests::BackupWithIndexes [GOOD] >> TBackupCollectionTests::BackupWithIndexesOmit >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:10.911362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:11.017797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:11.026012Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:11.026444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:11.026617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:11.452029Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.569683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.569811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.613595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.712182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:12.472448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:12.473521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:12.473570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:12.473602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:12.474066Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.531394Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:13.251260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:34:17.083924Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:17.093298Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:34:17.097726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:17.147544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:17.147665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:17.207179Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:34:17.209111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:17.431060Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:17.431181Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:17.432482Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.436305Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.436879Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.437749Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.438249Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.438432Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.438731Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.438917Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.439085Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.462419Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:17.805556Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:17.897674Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:34:17.897793Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:34:17.981819Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:34:17.983408Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:34:17.983670Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:34:17.983756Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:34:17.983829Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:34:17.983894Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:34:17.983954Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:34:17.984015Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:34:17.985100Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:34:18.003792Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:18.003920Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:18.041584Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:34:18.042679Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:34:18.126772Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:34:18.128801Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:34:18.156138Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:34:18.156216Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:34:18.156311Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:34:18.162761Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:34:18.175960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:18.197294Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:34:18.197511Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:34:18.228866Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:34:18.525755Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:34:18.539968Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:34:19.084464Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:34:19.256464Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:34:19.256571Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:34:20.148468Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... atType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:44.434270Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-07T17:34:44.434315Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:4573:3811], StatRequests.size() = 1 2026-01-07T17:34:45.621842Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:4609:3829]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:45.622201Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-07T17:34:45.622250Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:4609:3829], StatRequests.size() = 1 2026-01-07T17:34:45.644177Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:34:46.783813Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:4643:3845]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:46.784303Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-07T17:34:46.784351Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:4643:3845], StatRequests.size() = 1 2026-01-07T17:34:47.898555Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:4673:3857]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:47.898944Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:34:47.898993Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4673:3857], StatRequests.size() = 1 2026-01-07T17:34:47.921127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:34:47.921438Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-07T17:34:47.921842Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:34:47.922045Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:34:49.002335Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4707:3873]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:49.002712Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:34:49.002759Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4707:3873], StatRequests.size() = 1 2026-01-07T17:34:50.069855Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4739:3885]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:50.070153Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:34:50.070197Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4739:3885], StatRequests.size() = 1 2026-01-07T17:34:51.303142Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4778:3900]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:51.303446Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:34:51.303493Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4778:3900], StatRequests.size() = 1 2026-01-07T17:34:51.336693Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:34:51.472731Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:34:51.472812Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:34:51.472855Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:34:51.472900Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:34:52.599514Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4815:3916]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:52.599846Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:34:52.599898Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4815:3916], StatRequests.size() = 1 2026-01-07T17:34:53.807391Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4846:3928]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:53.807824Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:34:53.807876Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4846:3928], StatRequests.size() = 1 2026-01-07T17:34:53.829889Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:34:53.830138Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-07T17:34:53.830730Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:34:53.830953Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:34:53.842095Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:34:53.842205Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:34:53.842526Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:34:53.856909Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:34:54.988075Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4880:3945]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:54.988410Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:34:54.988452Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4880:3945], StatRequests.size() = 1 2026-01-07T17:34:55.546834Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-07T17:34:55.546895Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.406000s, at schemeshard: 72075186224037899 2026-01-07T17:34:55.547022Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-07T17:34:55.560921Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:34:56.123427Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4912:3960]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:56.123786Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:34:56.123839Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4912:3960], StatRequests.size() = 1 2026-01-07T17:34:57.315602Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4944:3973]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:57.315913Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:34:57.315962Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4944:3973], StatRequests.size() = 1 2026-01-07T17:34:57.338327Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:34:58.454369Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4982:3989]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:58.454709Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:34:58.454752Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4982:3989], StatRequests.size() = 1 2026-01-07T17:34:59.457669Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5010:4000]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:59.457945Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:34:59.457979Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5010:4000], StatRequests.size() = 1 2026-01-07T17:34:59.479845Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:34:59.480076Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-07T17:34:59.480654Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:34:59.480873Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:34:59.491625Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:34:59.491701Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:34:59.491962Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:34:59.505261Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:00.542641Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:5044:4017]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:00.542881Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-07T17:35:00.542911Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:5044:4017], StatRequests.size() = 1 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestVacuumWithMockDisk >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink >> KqpResultSetFormats::ArrowFormat_ColumnOrder [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_None ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:33:42.720683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:42.720787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:42.720836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:42.720878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:42.721570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:42.721630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:42.721715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:42.723074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:42.724999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:42.726101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:42.857424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:33:42.857483Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:42.875208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:42.875591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:42.876771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:42.892829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:42.893184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:42.899303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:42.906218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:42.915110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:42.927472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:42.937108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:42.937195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:42.937329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:42.937398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:42.937493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:42.938115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:43.105750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.108985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.109074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:43.109141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:35:02.496186Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:35:02.496240Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:35:02.496302Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:35:02.496359Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:35:02.496408Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:35:02.496590Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:02.496658Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:35:02.496709Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-07T17:35:02.496754Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:35:02.498034Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [10:210:2211], Recipient [10:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 10 } 2026-01-07T17:35:02.498089Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-07T17:35:02.498170Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:02.498257Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:02.498298Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:35:02.498353Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:35:02.498407Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:02.498534Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:35:02.499676Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [10:210:2211], Recipient [10:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 } 2026-01-07T17:35:02.499723Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-07T17:35:02.499804Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:02.499890Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:02.499923Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:35:02.499957Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:35:02.499993Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:35:02.500093Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:35:02.500160Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:35:02.500486Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [10:127:2152], Recipient [10:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-07T17:35:02.500528Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-07T17:35:02.500613Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:35:02.500671Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:35:02.500765Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:02.503668Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:35:02.504850Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:35:02.504892Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:35:02.505497Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:35:02.595974Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:35:02.596309Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:35:02.596673Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:35:02.596739Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:35:02.597197Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [10:849:2814], Recipient [10:127:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:35:02.597268Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:35:02.597324Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:35:02.597501Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [10:796:2761], Recipient [10:127:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2026-01-07T17:35:02.597546Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:35:02.597635Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:35:02.597758Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:35:02.597808Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:847:2812] 2026-01-07T17:35:02.598043Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [10:849:2814], Recipient [10:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:35:02.598086Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:35:02.598135Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-07T17:35:02.598608Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [10:850:2815], Recipient [10:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-07T17:35:02.598680Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:35:02.598809Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:02.599048Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 247us result status StatusPathDoesNotExist 2026-01-07T17:35:02.599247Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_replication/unittest >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] >> TBackupCollectionTests::BackupWithIndexesOmit [GOOD] >> TBackupCollectionTests::BackupWithIndexesDefault |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |85.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TBackupCollectionTests::IncrementalBackupOperation [GOOD] >> TBackupCollectionTests::EmptyIncrementalBackupRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:105:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:85:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:87:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:87:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:205:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:86:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:91:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:90:2118] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:206:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:87:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:89:2118] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:92:2057] recipient: [12:89:2118] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:91:2119] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:88:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:90:2119] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:93:2057] recipient: [13:90:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:92:2120] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:112:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2122] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:96:2057] recipient: [15:93:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2123] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] 2026-01-07T17:34:53.808979Z node 1 :KEYVALUE ERROR: keyvalue_state.cpp:3029: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2026-01-07T17:35:03.296880Z node 2 :KEYVALUE ERROR: keyvalue_state.cpp:3029: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:451:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:454:2057] recipient: [2:453:2379] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:456:2057] recipient: [2:453:2379] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:455:2380] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:571:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:451:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:454:2057] recipient: [3:453:2379] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:456:2057] recipient: [3:453:2379] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:455:2380] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:571:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:452:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:455:2057] recipient: [4:454:2379] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:457:2057] recipient: [4:454:2379] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:456:2380] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:572:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] 2026-01-07T17:35:06.289383Z node 17 :KEYVALUE ERROR: keyvalue_storage_read_request.cpp:256: {KV323@keyvalue_storage_read_request.cpp:256} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2026-01-07T17:35:06.292316Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1006: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2026-01-07T17:35:06.292378Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1925: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TBackupCollectionTests::BackupWithIndexesDefault [GOOD] >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertToSameKey >> TBackupCollectionTests::EmptyIncrementalBackupRace [GOOD] >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:10.943109Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:11.042373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:11.051950Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:529:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:11.052309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:11.052404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:11.433040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.552789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.552951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.599885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.689651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:12.440136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:12.441206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:12.441270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:12.441309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:12.441733Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.505451Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:13.322493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:34:16.586627Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:16.597071Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-07T17:34:16.600855Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:16.642137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.642259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.696749Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:34:16.701297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:16.902593Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.902719Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.904498Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.905320Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.906164Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.907187Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.907366Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.907578Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.907852Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.908043Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.908196Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.927420Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:17.212579Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:17.273441Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:34:17.273565Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:34:17.312378Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:34:17.314152Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:34:17.314433Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:34:17.314509Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:34:17.314566Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:34:17.314639Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:34:17.314701Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:34:17.314761Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:34:17.315870Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:34:17.334188Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.334325Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2206:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.359923Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2250:2622] 2026-01-07T17:34:17.360845Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2250:2622], schemeshard id = 72075186224037897 2026-01-07T17:34:17.437472Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2309:2652] 2026-01-07T17:34:17.444301Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2026-01-07T17:34:17.503165Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. Describe result: PathErrorUnknown 2026-01-07T17:34:17.503245Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. Creating table 2026-01-07T17:34:17.503350Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Shared1/.metadata/_statistics 2026-01-07T17:34:17.509737Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2393:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:34:17.514227Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.525244Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:34:17.525436Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. Subscribe on create table tx: 281474976720657 2026-01-07T17:34:17.555153Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:34:17.801380Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared1/.metadata/script_executions 2026-01-07T17:34:17.815932Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:34:18.577490Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:34:18.725801Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:34:18.725908Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2332:2665] Owner: [3:2331:2664]. Column diff is empty, finishing 2026-01-07T17:34:19.580289Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 20 ... node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:7304:3788], StatRequests.size() = 1 2026-01-07T17:34:55.806458Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:7349:3799]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:55.806924Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-07T17:34:55.806971Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:7349:3799], StatRequests.size() = 1 2026-01-07T17:34:56.839520Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:34:57.043765Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:34:57.043854Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:34:57.043898Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:34:57.043941Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:34:57.191181Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:7400:3815]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:57.191700Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:34:57.191755Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:7400:3815], StatRequests.size() = 1 2026-01-07T17:34:58.589939Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:7452:3828]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:34:58.590440Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:34:58.590482Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:7452:3828], StatRequests.size() = 1 2026-01-07T17:35:00.033197Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:35:00.033526Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 5 2026-01-07T17:35:00.033641Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:35:00.034307Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:35:00.070121Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:35:00.070194Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:35:00.070447Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:35:00.085776Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:00.339083Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:7496:3841]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:00.339367Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:35:00.339400Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:7496:3841], StatRequests.size() = 1 2026-01-07T17:35:01.865805Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:7546:3856]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:01.866264Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:35:01.866310Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:7546:3856], StatRequests.size() = 1 2026-01-07T17:35:02.731353Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-07T17:35:02.731416Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224038898 2026-01-07T17:35:02.731670Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:35:02.745569Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-07T17:35:03.594168Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:7591:3867]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:03.594551Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:35:03.594596Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:7591:3867], StatRequests.size() = 1 2026-01-07T17:35:04.830368Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-07T17:35:04.830432Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.856000s, at schemeshard: 72075186224037899 2026-01-07T17:35:04.830692Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-07T17:35:04.845480Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:04.923238Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:05.162541Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:7642:3881]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:05.163090Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:35:05.163145Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:7642:3881], StatRequests.size() = 1 2026-01-07T17:35:06.581868Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:7693:3898]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:06.582319Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:35:06.582371Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:7693:3898], StatRequests.size() = 1 2026-01-07T17:35:07.167500Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038900 2026-01-07T17:35:07.167572Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.358000s, at schemeshard: 72075186224038900 2026-01-07T17:35:07.167796Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038900, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-07T17:35:07.193903Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-07T17:35:07.963986Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:35:07.964284Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 6 2026-01-07T17:35:07.964427Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:35:07.964999Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:35:07.999487Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:35:07.999541Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:35:07.999813Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:35:08.013616Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:08.163363Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:7742:3911]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:08.163637Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:35:08.163669Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:7742:3911], StatRequests.size() = 1 2026-01-07T17:35:08.164113Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7744:3825]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:08.167444Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:35:08.167989Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-07T17:35:08.168050Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:7758:3830] 2026-01-07T17:35:08.168096Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:7758:3830] 2026-01-07T17:35:08.168831Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:7762:3831] 2026-01-07T17:35:08.169145Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:7762:3831], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:35:08.169188Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:35:08.169342Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:7758:3830], server id = [2:7762:3831], tablet id = 72075186224038895, status = OK 2026-01-07T17:35:08.169518Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:35:08.169583Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:7744:3825], StatRequests.size() = 1 2026-01-07T17:35:08.169675Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_None [GOOD] >> AsyncIndexChangeCollector::InsertSingleRow >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex [GOOD] >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:94:2057] recipient: [13:93:2122] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:96:2057] recipient: [13:93:2122] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:95:2123] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:211:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:92:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:95:2057] recipient: [15:94:2122] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:97:2057] recipient: [15:94:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:96:2123] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:212:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: 2026-01-07T17:35:08.564395Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.008835s Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:35:09.272231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:09.272366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:09.272419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:09.272462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:09.272503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:09.272536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:09.287202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:09.287337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:09.288345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:09.294846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:09.646223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:35:09.646289Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:09.695244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:09.695690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:09.705711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:09.746236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:09.760173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:09.805118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:09.835405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:09.862877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:09.882091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:09.940388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:09.940494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:09.943918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:09.944013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:09.944081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:09.944297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:10.210254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.211371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.211512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.211588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.211669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.211757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.211831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.211914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.211995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.212065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.212124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.212183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.212245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.212318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:10.212459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: EScheme ... ordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:35:11.024222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2026-01-07T17:35:11.024371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2026-01-07T17:35:11.024733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:11.024886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:11.025000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_rtmr.cpp:130: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2026-01-07T17:35:11.025133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-07T17:35:11.025309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:11.025374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:11.027389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:11.027451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:11.027611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:35:11.027720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:11.027755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-07T17:35:11.027827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 FAKE_COORDINATOR: Erasing txId 100 2026-01-07T17:35:11.028243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-07T17:35:11.028303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-07T17:35:11.028413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:35:11.028451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:35:11.028499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:35:11.028527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:35:11.028563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-07T17:35:11.028622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:35:11.028662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-07T17:35:11.028690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-07T17:35:11.028767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:35:11.028803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-07T17:35:11.028835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:35:11.028879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:35:11.029516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:35:11.029607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:35:11.029639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:35:11.029735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:35:11.029786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:11.030591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:35:11.030662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:35:11.030687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:35:11.030713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:35:11.030764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:11.030833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-07T17:35:11.033108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:35:11.034208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-07T17:35:11.056712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:35:11.056790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-07T17:35:11.057309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:35:11.057435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:35:11.057475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:718:2707] TestWaitNotification: OK eventTxId 100 2026-01-07T17:35:11.057993Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:11.058207Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 229us result status StatusSuccess 2026-01-07T17:35:11.058580Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } RtmrVolumeDescription { Name: "rtmr1" PathId: 38 PartitionsCount: 0 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] |85.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:35:06.420779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:06.420873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:06.420919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:06.420979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:06.421037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:06.421070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:06.421141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:06.421211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:06.422090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:06.422386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:06.495079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:35:06.495147Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:06.508237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:06.508568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:06.508741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:06.515395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:06.515670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:06.516345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:06.516568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:06.519312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:06.519490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:06.520606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:06.520672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:06.520802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:06.520858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:06.520916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:06.521136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:06.640074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.641184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.641351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.641412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.641528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.641619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.641719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.641808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.641924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.642068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.642137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.642214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.642316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.642403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:06.642513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... onId 103:0, ProgressState, NeedSyncHive: 0 2026-01-07T17:35:14.620148Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 240 -> 240 2026-01-07T17:35:14.621197Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:35:14.621301Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:35:14.621342Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:35:14.621384Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2026-01-07T17:35:14.621435Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-07T17:35:14.621524Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:35:14.624023Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 3 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-07T17:35:14.624126Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:35:14.624238Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 3, ActorId:[8:501:2449], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:35:14.624347Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-07T17:35:14.624385Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-07T17:35:14.624544Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-07T17:35:14.624582Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:550:2487], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-07T17:35:14.626500Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 0 2026-01-07T17:35:14.626667Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:35:14.626729Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:35:14.626886Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:35:14.626943Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:35:14.626993Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:35:14.627040Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:35:14.627093Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:35:14.627145Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:35:14.627194Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:35:14.627239Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:35:14.627338Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:35:14.628020Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:35:14.628107Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:35:14.629966Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:35:14.630040Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:35:14.630672Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:35:14.630803Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:35:14.630858Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:594:2527] TestWaitNotification: OK eventTxId 103 2026-01-07T17:35:14.631466Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:14.631657Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 235us result status StatusSuccess 2026-01-07T17:35:14.632013Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 5 ShardsInside: 3 ShardsLimit: 7 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 5 MaxChildrenInDir: 3 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 7 MaxShardsInPath: 3 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:14.632775Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-07T17:35:14.632991Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 291us result status StatusSuccess 2026-01-07T17:35:14.633474Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10 ShardsInside: 3 ShardsLimit: 10 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10 MaxChildrenInDir: 10 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 10 MaxShardsInPath: 10 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |85.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |86.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |86.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |86.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |86.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameToLongKey |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes [GOOD] >> TBackupCollectionTests::TableWithMixedIndexTypes |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |86.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestReadRequestInFlightLimit >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults |86.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |86.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestVacuumOnEmptyTablet [GOOD] >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:84:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2119] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:91:2057] recipient: [9:88:2119] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2120] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2119] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:91:2057] recipient: [10:88:2119] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2120] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2121] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:93:2057] recipient: [11:90:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2122] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2121] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:93:2057] recipient: [12:90:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2122] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:90:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:92:2123] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:95:2057] recipient: [13:92:2123] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:94:2124] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:210:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:90:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:93:2057] recipient: [14:92:2123] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:95:2057] recipient: [14:92:2123] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:94:2124] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:210:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2123] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:96:2057] recipient: [15:93:2123] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2124] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:93:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:96:2057] recipient: [16:95:2125] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:98:2057] recipient: [16:95:2125] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:97:2126] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:213:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:93:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:96:2057] recipient: [17:95:2125] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:98:2057] recipient: [17:95:2125] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:97:2126] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:213:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:94:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:97:2057] recipient: [18:96:2125] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:99:2057] recipient: [18:96:2125] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:98:2126] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:214:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups [GOOD] >> TBackupCollectionTests::DropCollectionAfterIncrementalRestore >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:90:2057] recipient: [8:89:2119] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:92:2057] recipient: [8:89:2119] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:91:2120] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:207:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:91:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:94:2057] recipient: [9:93:2123] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:96:2057] recipient: [9:93:2123] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:95:2124] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:211:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:94:2057] recipient: [10:93:2123] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:96:2057] recipient: [10:93:2123] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:95:2124] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:211:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:96:2057] recipient: [11:95:2125] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:98:2057] recipient: [11:95:2125] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:97:2126] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:213:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:96:2057] recipient: [12:95:2125] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:98:2057] recipient: [12:95:2125] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:97:2126] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:213:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:96:2057] recipient: [13:95:2125] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:98:2057] recipient: [13:95:2125] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:97:2126] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:213:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:98:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:101:2057] recipient: [14:100:2129] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:103:2057] recipient: [14:100:2129] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:102:2130] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:218:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:102:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:105:2057] recipient: [15:104:2133] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:107:2057] recipient: [15:104:2133] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:106:2134] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:222:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:102:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:105:2057] recipient: [16:104:2133] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:107:2057] recipient: [16:104:2133] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:106:2134] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:222:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:104:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:107:2057] recipient: [17:106:2135] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:109:2057] recipient: [17:106:2135] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:108:2136] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:224:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:104:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:107:2057] recipient: [18:106:2135] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:109:2057] recipient: [18:106:2135] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:108:2136] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:224:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> BasicStatistics::StatisticsOnShardsRestart |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] >> ColumnStatistics::EqWidthHistogram >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TBackupCollectionTests::TableWithMixedIndexTypes [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> BasicStatistics::DedicatedTimeIntervals >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink >> AsyncIndexChangeCollector::AllColumnsInPk >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink >> TBackupCollectionTests::MultipleTablesWithIndexes |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> CdcStreamChangeCollector::DeleteNothing >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> BasicStatistics::TwoDatabases >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME >> TBackupCollectionTests::DropCollectionAfterIncrementalRestore [GOOD] >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> HttpRequest::ProbeServerless >> TKeyValueTest::TestCopyRangeWorksNewApi >> TBackupCollectionTests::BackupRestoreCoveringIndex >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] |86.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |86.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:35:04.764294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:04.764376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:04.764409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:04.764440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:04.764469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:04.764492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:04.764529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:04.764583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:04.765401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:04.765692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:04.833937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:35:04.834035Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:04.846421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:04.846747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:04.846980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:04.851968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:04.852243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:04.852780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:04.853018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:04.855081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:04.855244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:04.856138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:04.856180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:04.856265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:04.856305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:04.856340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:04.856470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:04.969457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.970994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.971044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:04.971094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6 2026-01-07T17:35:21.777870Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1171:3075], at schemeshard: 72075186233409546, txId: 116, path id: 1 2026-01-07T17:35:21.777935Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1171:3075], at schemeshard: 72075186233409546, txId: 116, path id: 39 2026-01-07T17:35:21.778458Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-07T17:35:21.778539Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2026-01-07T17:35:21.778881Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-07T17:35:21.779863Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2026-01-07T17:35:21.779980Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2026-01-07T17:35:21.780035Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2026-01-07T17:35:21.780087Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 19 2026-01-07T17:35:21.780152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 14 2026-01-07T17:35:21.780955Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2026-01-07T17:35:21.781028Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2026-01-07T17:35:21.781058Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2026-01-07T17:35:21.781086Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 39], version: 1 2026-01-07T17:35:21.781117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 39] was 4 2026-01-07T17:35:21.781186Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2026-01-07T17:35:21.783689Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2026-01-07T17:35:21.783913Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2026-01-07T17:35:21.783970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2026-01-07T17:35:21.784538Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-07T17:35:21.784767Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2026-01-07T17:35:21.784919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-07T17:35:21.784992Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2026-01-07T17:35:21.785136Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-07T17:35:21.785197Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2026-01-07T17:35:21.785295Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-07T17:35:21.785409Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 116:0 2 -> 3 2026-01-07T17:35:21.787857Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2026-01-07T17:35:21.788007Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2026-01-07T17:35:21.789633Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-07T17:35:21.789880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-07T17:35:21.789950Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2026-01-07T17:35:21.790051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2026-01-07T17:35:21.790510Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1116 RawX2: 30064774103 } TxBody: "\n\342\004\n\007Table11\020\'\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\366\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001b\031\010\000J\025\n\005\n\003hdd\022\005\n\003hdd\032\005\n\003hdd\212\001&\010\000\022\004\010\001\020\000\022\004\010\002\020\001\022\004\010\003\020\001\032\004\010\001\020\000\032\004\010\002\020\001\032\004\010\003\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\':\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2026-01-07T17:35:21.797820Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2026-01-07T17:35:21.798004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2026-01-07T17:35:21.829520Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2026-01-07T17:35:21.837842Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2026-01-07T17:35:21.838205Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TBackupCollectionTests::MultipleTablesWithIndexes [GOOD] >> TBackupCollectionTests::IncrementalBackupWithIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:203:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:86:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:89:2057] recipient: [16:88:2118] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:91:2057] recipient: [16:88:2118] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:90:2119] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:206:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:86:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:89:2057] recipient: [17:88:2118] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:91:2057] recipient: [17:88:2118] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:90:2119] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:206:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:87:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:90:2057] recipient: [18:89:2118] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:92:2057] recipient: [18:89:2118] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:91:2119] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:207:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:90:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:93:2057] recipient: [19:92:2121] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:95:2057] recipient: [19:92:2121] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:94:2122] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:210:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:90:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:93:2057] recipient: [20:92:2121] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:95:2057] recipient: [20:92:2121] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:94:2122] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:210:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:91:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:94:2057] recipient: [21:93:2121] Leader for TabletID 72057594037927937 is [21:95:2122] sender: [21:96:2057] recipient: [21:93:2121] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |86.0%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |86.0%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:137:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:136:2159] sender: [1:138:2058] recipient: [1:114:2145] 2026-01-07T17:33:35.057866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:35.057939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:35.057973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:35.058020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:35.058061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:35.058091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:35.058174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:35.058233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:35.058992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:35.059293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:35.167365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:33:35.167470Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:35.168404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:186:2058] recipient: [1:15:2062] 2026-01-07T17:33:35.183444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:35.183662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:35.183823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:35.190772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:35.191001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:35.191742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:35.192165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:35.197517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:35.197704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:35.198787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:35.198845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:35.199049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:35.199116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:35.199186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:35.199310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:35.341369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.342466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.342600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.342670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.342748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.342848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.342939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.343022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.343175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.343271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.343356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.343465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:35.343554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... 7T17:35:20.778285Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:20.778314Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [62:211:2212], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2026-01-07T17:35:20.778348Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [62:211:2212], at schemeshard: 72057594046678944, txId: 1003, path id: 39 2026-01-07T17:35:20.778591Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:35:20.778639Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:35:20.778715Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:35:20.778748Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:35:20.778780Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-07T17:35:20.779676Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-07T17:35:20.779747Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-07T17:35:20.779778Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-07T17:35:20.779813Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:35:20.779845Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:35:20.780919Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-07T17:35:20.781024Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-07T17:35:20.781055Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-07T17:35:20.781089Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:35:20.781124Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:35:20.781197Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2026-01-07T17:35:20.784610Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:35:20.784673Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:35:20.784726Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 39], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:35:20.784829Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-07T17:35:20.784864Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:35:20.784905Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-07T17:35:20.784964Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:35:20.785003Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-07T17:35:20.785046Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:35:20.785085Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-07T17:35:20.785122Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-07T17:35:20.785254Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:35:20.785291Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:35:20.786421Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:35:20.786472Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:35:20.786546Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:35:20.786987Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-07T17:35:20.787498Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-07T17:35:20.790064Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-07T17:35:20.790307Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-07T17:35:20.790349Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-07T17:35:20.790718Z node 62 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-07T17:35:20.790794Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-07T17:35:20.790832Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:875:2847] TestWaitNotification: OK eventTxId 1003 2026-01-07T17:35:20.791253Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:20.791488Z node 62 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 264us result status StatusSuccess 2026-01-07T17:35:20.792031Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TS3WrapperTests::GetObject >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> TS3WrapperTests::GetObject [GOOD] |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |86.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |86.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2026-01-07T17:35:26.192629Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 42167D85-4A7A-482E-8FD0-C53337FDC965, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:26699 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 96473833-5F4B-4721-B81D-FE61799527FA amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-07T17:35:26.199106Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 42167D85-4A7A-482E-8FD0-C53337FDC965, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-07T17:35:26.199691Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# DB2D0B98-8286-41B1-9120-3AD2AE277FAF, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:26699 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8CB6D0EC-4ED1-4E2E-AA59-F4AFD265BE5F amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-07T17:35:26.202569Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# DB2D0B98-8286-41B1-9120-3AD2AE277FAF, response# GetObjectResult { } >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpScanArrowInChanels::JoinWithParams |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |86.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks >> TBackupCollectionTests::BackupRestoreCoveringIndex [GOOD] >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi >> TBackupCollectionTests::IncrementalBackupWithIndexes [GOOD] >> TBackupCollectionTests::OmitIndexesFlag |86.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |86.1%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |86.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> MoveTable::WithCommitInProgress+Reboot >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> TColumnShardTestSchema::DropWriteRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupRestoreCoveringIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:14.965654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:14.965789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:14.965857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:14.965901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:14.965941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:14.965974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:14.966054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:14.966149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:14.967049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:14.967354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:15.059646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:15.059711Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:15.084629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:15.085090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:15.085295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:15.095876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:15.096248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:15.097093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:15.097402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:15.100767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:15.100995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:15.102198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:15.102262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:15.102379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:15.102431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:15.102526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:15.102731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:15.263553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.264623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.264766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.264855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.264962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:15.265737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:84: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:35:27.612018Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:35:27.614156Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 108 2026-01-07T17:35:27.614741Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:35:27.614809Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710693:0 from tablet: 72057594046678944 to tablet: 72075186233409554 cookie: 72057594046678944:9 msg type: 269549568 2026-01-07T17:35:27.614988Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710693, partId: 0, tablet: 72075186233409554 2026-01-07T17:35:27.617214Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 108 2026-01-07T17:35:27.617288Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:35:27.617335Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710694:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:12 msg type: 269549568 2026-01-07T17:35:27.617443Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710694, partId: 0, tablet: 72075186233409555 TestWaitNotification: OK eventTxId 108 2026-01-07T17:35:27.618665Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [25:2589:4205], Recipient [25:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/CoverTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-07T17:35:27.618783Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:35:27.618980Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CoverTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:27.619375Z node 25 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CoverTable" took 418us result status StatusSuccess 2026-01-07T17:35:27.619845Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CoverTable" PathDescription { Self { Name: "CoverTable" PathId: 65 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 1 PathState: EPathStateIncomingIncrementalRestore Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "CoverTable" Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "age" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "ega" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "id" KeyColumnIds: 1 TableIndexes { Name: "idx_name_age" LocalPathId: 66 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "name" KeyColumnNames: "age" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "ega" DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 55 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 65 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:27.621000Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [25:2590:4206], Recipient [25:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/CoverTable/idx_name_age" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-07T17:35:27.621092Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:35:27.621252Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CoverTable/idx_name_age" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:35:27.621645Z node 25 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CoverTable/idx_name_age" took 395us result status StatusSuccess 2026-01-07T17:35:27.622536Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CoverTable/idx_name_age" PathDescription { Self { Name: "idx_name_age" PathId: 66 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 65 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 67 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 66 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 55 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "idx_name_age" LocalPathId: 66 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "name" KeyColumnNames: "age" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "ega" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } FollowerGroups { } } } } } PathId: 66 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 15090, MsgBus: 29115 2026-01-07T17:30:32.358962Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673390239441277:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:32.359010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:30:32.568702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:32.809171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:30:32.832336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:30:32.832469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:30:33.011614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:30:33.018581Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:30:33.025095Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673390239441102:2081] 1767807032343807 != 1767807032343810 2026-01-07T17:30:33.293851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:30:33.363365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:30:33.363388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:30:33.363394Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:30:33.363471Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:30:33.377126Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:30:34.263963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:30:34.275539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2026-01-07T17:30:37.358888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673390239441277:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:30:37.364591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:30:37.807592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:38.006398Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2026-01-07T17:30:38.057679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673416009245876:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:38.057757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:38.057927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673416009245885:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:38.061919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:30:38.064094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673416009245890:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:38.064189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:30:38.076665Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673416009245891:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:30:38.201799Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673416009245943:2605] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2026-01-07T17:30:38.694462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:38.757364Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 21 2026-01-07T17:30:39.563894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:39.645895Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 23 2026-01-07T17:30:40.376116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:40.475565Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2026-01-07T17:30:41.228826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:41.333562Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 700 2026-01-07T17:30:41.956004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:30:42.064637Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float4, '0.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float4, '1.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float4, '2.5'::float4] ); 701 2026-01-07T17:30:42.795802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB firs ... us: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:16.196979Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:16.212305Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7592674609117649743:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:35:16.289424Z node 9 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [9:7592674609117649796:2599] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:16.535841Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [9:7592674609117649839:2355], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2026-01-07T17:35:16.536265Z node 9 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=9&id=MjIzMGM1NC05NzYzYzhlNi03OTI0YjhlMS0yYjA3Y2M1OQ==, ActorId: [9:7592674609117649832:2351], ActorState: ExecuteState, LegacyTraceId: 01kecrdwgnc3mb1fsvfvym6amt, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key1" end_position { row: 1 column: 1 } severity: 1 } issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-07T17:35:16.551285Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 19812, MsgBus: 24957 2026-01-07T17:35:18.575527Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592674617147141519:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:18.575672Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:18.603549Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:18.722777Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:18.744255Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:18.744391Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:18.802212Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:18.806529Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:18.872443Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:18.872470Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:18.872480Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:18.872603Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:19.110639Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:19.583784Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:19.707383Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:23.576120Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7592674617147141519:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:23.577017Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:24.760179Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592674642916946145:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.760320Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.762963Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592674642916946154:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.763075Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.803723Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:24.901224Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592674642916946251:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.901379Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.901831Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592674642916946256:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.901902Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592674642916946257:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.902394Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:24.909772Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:24.929925Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7592674642916946260:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:35:25.009182Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7592674647211913607:2603] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:25.762074Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7592674647211913673:2367], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2026-01-07T17:35:25.762929Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=OWQ5NGM1NmUtZTY4NDQ4M2QtZDg3MTEwM2ItOWQyNmI3YzE=, ActorId: [10:7592674647211913664:2362], ActorState: ExecuteState, LegacyTraceId: 01kecre5gm435n1s4n44trcqxw, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key1" end_position { row: 1 column: 1 } severity: 1 } issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-07T17:35:25.770865Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:78:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:81:2057] recipient: [13:80:2112] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:83:2057] recipient: [13:80:2112] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:82:2113] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:198:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:78:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:81:2057] recipient: [14:80:2112] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:83:2057] recipient: [14:80:2112] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:82:2113] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:198:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:79:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:82:2057] recipient: [15:81:2112] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:84:2057] recipient: [15:81:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:83:2113] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:199:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:82:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:85:2057] recipient: [16:84:2115] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:87:2057] recipient: [16:84:2115] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:86:2116] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:202:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:85:2057] recipient: [17:84:2115] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:87:2057] recipient: [17:84:2115] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:86:2116] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:202:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:83:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:86:2057] recipient: [18:85:2115] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:88:2057] recipient: [18:85:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:87:2116] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:105:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:88:2057] recipient: [19:87:2117] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:90:2057] recipient: [19:87:2117] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:89:2118] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:205:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:88:2057] recipient: [20:87:2117] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:90:2057] recipient: [20:87:2117] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:89:2118] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:205:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2117] Leader for TabletID 72057594037927937 is [21:90:2118] sender: [21:91:2057] recipient: [21:88:2117] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId [GOOD] >> TBackupCollectionTests::OmitIndexesFlag [GOOD] >> TBackupCollectionTests::IndexCdcStreamCountRotation >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] >> MoveTable::WithCommitInProgress+Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-07T17:35:29.768366Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:35:29.804370Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:35:29.804636Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:35:29.812465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:35:29.812723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:35:29.812970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:35:29.813113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:35:29.813215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:35:29.813321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:35:29.813431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:35:29.813541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:35:29.813646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:35:29.813779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:35:29.813882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:35:29.813994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:35:29.814105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:35:29.844195Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:35:29.844619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:35:29.844685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:35:29.844907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:35:29.845105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:35:29.845200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:35:29.845248Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:35:29.845356Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:35:29.845423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:35:29.845466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:35:29.845499Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:35:29.845682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:35:29.845746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:35:29.845804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:35:29.845842Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:35:29.845927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:35:29.845981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:35:29.846030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:35:29.846060Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:35:29.846125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:35:29.846171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:35:29.846207Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:35:29.846264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:35:29.846312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:35:29.846363Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:35:29.846576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:35:29.846636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:35:29.846731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:35:29.846856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:35:29.846958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:35:29.846990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:35:29.847043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:35:29.847084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:35:29.847117Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:35:29.847161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:35:29.847196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:35:29.847224Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:35:29.847395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:35:29.847463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-07T17:35:31.365661Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137026082727776;op_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137232244010624;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:35:31.365769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137026082727776;op_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137232244010624;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:35:31.365819Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137026082727776;op_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767807330785;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137232244010624;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-07T17:35:31.366143Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:35:31.366260Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767807330785 at tablet 9437184, mediator 0 2026-01-07T17:35:31.366325Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2026-01-07T17:35:31.366609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-07T17:35:31.366648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-07T17:35:31.366715Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000202, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:35:31.366790Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000202; 2026-01-07T17:35:31.366856Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=1000000202; 2026-01-07T17:35:31.367160Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:35:31.367747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000202; 2026-01-07T17:35:31.386042Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:35:31.387762Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=137026082730688;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1767807330788;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:35:31.400862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767807330788;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137026082730688;op_tx=120:TX_KIND_SCHEMA;min=1767807330788;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:35:31.400962Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767807330788;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137026082730688;op_tx=120:TX_KIND_SCHEMA;min=1767807330788;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:35:31.401950Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=137026082732480;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1767807330789;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:35:31.415395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767807330789;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137026082732480;op_tx=121:TX_KIND_SCHEMA;min=1767807330789;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:35:31.415473Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767807330789;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137026082732480;op_tx=121:TX_KIND_SCHEMA;min=1767807330789;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:35:31.417024Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=137026082734272;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1767807330791;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:35:31.430100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767807330791;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137026082734272;op_tx=122:TX_KIND_SCHEMA;min=1767807330791;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:35:31.430197Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767807330791;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137026082734272;op_tx=122:TX_KIND_SCHEMA;min=1767807330791;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:12.925080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:13.030911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:13.050117Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:13.050577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:13.050656Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:13.314655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:13.314790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:13.372705Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807310849236 != 1767807310849240 2026-01-07T17:35:13.385239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:13.428765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:13.512344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:13.795273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:13.808694Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:13.912684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:13.954493Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-01-07T17:35:13.954751Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:13.999397Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-07T17:35:13.999663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:14.008475Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:14.008648Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:14.010365Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:14.010480Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:14.010548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:14.010945Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:14.011181Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:14.011276Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-07T17:35:14.011655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:14.011745Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:14.013053Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:35:14.013128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:35:14.013176Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:35:14.013448Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:14.013539Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:14.013601Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-07T17:35:14.024394Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:14.060427Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:14.060644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:14.060806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-07T17:35:14.060850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:14.060881Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:14.060914Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:14.061269Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:14.061307Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:35:14.061363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:14.061414Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-07T17:35:14.061433Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:35:14.061475Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:35:14.061515Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:35:14.061936Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:14.062035Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:14.062179Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:14.062216Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:14.062252Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:14.062287Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:14.062333Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:35:14.062404Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:35:14.062546Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:35:14.062593Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:14.062617Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:35:14.062645Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:35:14.063045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-07T17:35:14.063207Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:14.063476Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:14.063586Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:35:14.064020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-07T17:35:14.064238Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:35:14.064400Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-07T17:35:14.064459Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-07T17:35:14.066222Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:14.066307Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:14.077191Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:14.077311Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:14.077920Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:35:14.078013Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:14.195528Z node 1 :TX_DATASHARD ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:35:29.989796Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:29.991027Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:29.991064Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:29.991101Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:29.991153Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:29.991198Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:29.991261Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:29.991804Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:29.993871Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:29.994148Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:35:29.994248Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:35:30.007061Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:30.007234Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-07T17:35:30.007282Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-07T17:35:30.007316Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-07T17:35:30.007835Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:30.022176Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:30.223669Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:30.223756Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.224140Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:30.224206Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:30.224266Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:30.224491Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-07T17:35:30.224633Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:30.224977Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:30.225924Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:30.288891Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-07T17:35:30.289033Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:30.289078Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:30.289135Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.289216Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:30.289281Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-07T17:35:30.289383Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.291764Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-07T17:35:30.291873Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:35:30.302746Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.302866Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.302941Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.303906Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.304132Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.309475Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:30.316781Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:30.499329Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:30.503125Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:35:30.542741Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:30.626510Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2957], serverId# [4:1167:2958], sessionId# [0:0:0] 2026-01-07T17:35:30.626930Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-07T17:35:30.627252Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767807330627132 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:35:30.627463Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-07T17:35:30.639347Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-07T17:35:30.639454Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.732011Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-07T17:35:30.732339Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767807330732230 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:35:30.732484Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-07T17:35:30.745838Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-07T17:35:30.745925Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.747878Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1193:2976], serverId# [4:1194:2977], sessionId# [0:0:0] 2026-01-07T17:35:30.758643Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1195:2978], serverId# [4:1196:2979], sessionId# [0:0:0] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:13.111330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:13.186586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:13.203071Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:13.203592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:13.203663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:13.425234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:13.425361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:13.478779Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807310751837 != 1767807310751841 2026-01-07T17:35:13.486895Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:13.529101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:13.633260Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:13.912504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:13.925402Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:14.028189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:14.067880Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:35:14.068070Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:14.105663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:14.105837Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:14.107522Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:14.107611Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:14.107665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:14.108060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:14.108196Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:14.108279Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:35:14.119031Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:14.148946Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:14.149182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:14.149311Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:35:14.149349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:14.149382Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:14.149420Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:14.149947Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:14.150064Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:14.150131Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:14.150161Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:14.150190Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:14.150230Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:14.150300Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:35:14.150665Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:14.150886Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:14.150976Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:35:14.152281Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:14.162871Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:14.162982Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:14.279937Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:35:14.284194Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:14.284277Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:14.284752Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:14.284803Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:14.284855Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:14.285236Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:35:14.285390Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:14.285637Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:14.285729Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:35:14.287664Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:35:14.288149Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:14.290600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:35:14.290668Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:14.291874Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:35:14.291964Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:14.293185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:14.293239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:14.293286Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:14.293428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:14.293505Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:14.293599Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:14.298246Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:14.299521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:14.299633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:35:14.299677Z node 1 :TX_DATASHARD DEBUG: datashar ... Heartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:29.746617Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:29.748679Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:29.748865Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:35:29.748946Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:35:29.763078Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:29.763243Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-07T17:35:29.763304Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-07T17:35:29.763342Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-07T17:35:29.763902Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:29.778783Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:29.957210Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:29.957300Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:29.957736Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:29.957805Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:29.957861Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:29.958101Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-07T17:35:29.958249Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:29.958663Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:29.959773Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:30.013668Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-07T17:35:30.013811Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:30.013861Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:30.013927Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.014024Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:30.014097Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-07T17:35:30.014240Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.017633Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-07T17:35:30.017767Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:35:30.027012Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.027141Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.027241Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.028361Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.028625Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:30.034507Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:30.043742Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:30.238720Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:30.242972Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:35:30.276892Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:30.455859Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2957], serverId# [4:1167:2958], sessionId# [0:0:0] 2026-01-07T17:35:30.456355Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-07T17:35:30.456660Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767807330456546 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:35:30.456875Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-07T17:35:30.468244Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-07T17:35:30.468351Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.554946Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-07T17:35:30.555259Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767807330555164 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:35:30.555412Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-07T17:35:30.569828Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-07T17:35:30.569905Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.757965Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:5] at 72075186224037888 2026-01-07T17:35:30.758304Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1767807330758181 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:35:30.758483Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:5] at 72075186224037888, row count=1 2026-01-07T17:35:30.769592Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-07T17:35:30.769689Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:30.771835Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1211:2987], serverId# [4:1212:2988], sessionId# [0:0:0] 2026-01-07T17:35:30.783138Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1213:2989], serverId# [4:1214:2990], sessionId# [0:0:0] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress+Reboot [GOOD] Test command err: 2026-01-07T17:35:29.728808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:35:29.762410Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:35:29.762677Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:35:29.770316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:35:29.770578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:35:29.770810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:35:29.770951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:35:29.771063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:35:29.771170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:35:29.771265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:35:29.771384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:35:29.771543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:35:29.771643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:35:29.771742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:35:29.771857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:35:29.771981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:35:29.812558Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:35:29.813550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:35:29.813633Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:35:29.813835Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:35:29.814029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:35:29.814115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:35:29.814163Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:35:29.814281Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:35:29.814390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:35:29.814444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:35:29.814476Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:35:29.814652Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:35:29.814717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:35:29.814768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:35:29.814809Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:35:29.814922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:35:29.814986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:35:29.815055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:35:29.815089Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:35:29.815144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:35:29.815199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:35:29.815237Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:35:29.815283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:35:29.815329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:35:29.815357Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:35:29.815570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:35:29.815681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:35:29.815716Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:35:29.815857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:35:29.815918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:35:29.815949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:35:29.816020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:35:29.816068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:35:29.816107Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:35:29.816157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:35:29.816195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:35:29.816226Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:35:29.816356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:35:29.816407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ype,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:35:32.054837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:530:2495];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:35:32.055002Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:32.055143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:32.055292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:32.055468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:35:32.055604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:32.055764Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:32.056044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:537:2501] finished for tablet 9437184 2026-01-07T17:35:32.056494Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:530:2495];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":2841827,"name":"_full_task","f":2841827,"d_finished":0,"c":0,"l":2858121,"d":16294},"events":[{"name":"bootstrap","f":2842240,"d_finished":2632,"c":1,"l":2844872,"d":2632},{"a":2857451,"name":"ack","f":2855946,"d_finished":1379,"c":1,"l":2857325,"d":2049},{"a":2857439,"name":"processing","f":2845171,"d_finished":5050,"c":3,"l":2857328,"d":5732},{"name":"ProduceResults","f":2844357,"d_finished":2496,"c":6,"l":2857794,"d":2496},{"a":2857799,"name":"Finish","f":2857799,"d_finished":0,"c":0,"l":2858121,"d":322},{"name":"task_result","f":2845190,"d_finished":3608,"c":2,"l":2855790,"d":3608}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:32.056568Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:530:2495];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:35:32.057000Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:530:2495];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":2841827,"name":"_full_task","f":2841827,"d_finished":0,"c":0,"l":2858608,"d":16781},"events":[{"name":"bootstrap","f":2842240,"d_finished":2632,"c":1,"l":2844872,"d":2632},{"a":2857451,"name":"ack","f":2855946,"d_finished":1379,"c":1,"l":2857325,"d":2536},{"a":2857439,"name":"processing","f":2845171,"d_finished":5050,"c":3,"l":2857328,"d":6219},{"name":"ProduceResults","f":2844357,"d_finished":2496,"c":6,"l":2857794,"d":2496},{"a":2857799,"name":"Finish","f":2857799,"d_finished":0,"c":0,"l":2858608,"d":809},{"name":"task_result","f":2845190,"d_finished":3608,"c":2,"l":2855790,"d":3608}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:32.057082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:35:31.961857Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-07T17:35:32.057120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:35:32.057261Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-07T17:35:32.058011Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-07T17:35:32.058294Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {18446744073709551615:12} readable: {18446744073709551615:max} at tablet 9437184 2026-01-07T17:35:32.058429Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-07T17:35:32.058474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:485:2458];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:35:32.058546Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:485:2458];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:14.491546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:14.575549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:14.590566Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:14.590905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:14.590949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:14.811380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:14.811503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:14.884087Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807312276749 != 1767807312276753 2026-01-07T17:35:14.894808Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:14.940680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:15.027183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:15.323527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:15.338290Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:15.441958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:15.478470Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-01-07T17:35:15.478715Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:15.515520Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-07T17:35:15.515707Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:15.523152Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:15.523309Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:15.524854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:15.524974Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:15.525030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:15.525417Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:15.525663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:15.525751Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-07T17:35:15.526163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:15.526250Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:15.527194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:35:15.527261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:35:15.527305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:35:15.527536Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:15.527615Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:15.527699Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-07T17:35:15.538495Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:15.565029Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:15.565238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:15.565372Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-07T17:35:15.565430Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:15.565468Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:15.565499Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:15.565806Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:15.565834Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:35:15.565897Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:15.565935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-07T17:35:15.565950Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:35:15.565973Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:35:15.565991Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:35:15.566317Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:15.566384Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:15.566501Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:15.566529Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:15.566554Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:15.566585Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:15.566608Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:35:15.566653Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:35:15.566743Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:35:15.566768Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:15.566783Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:35:15.566811Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:35:15.567116Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-07T17:35:15.567250Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:15.567476Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:15.567573Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:35:15.567928Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-07T17:35:15.568103Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:35:15.568243Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-07T17:35:15.568287Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-07T17:35:15.569882Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:15.569946Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:15.580626Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:15.580714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:15.581160Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:35:15.581220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:15.699595Z node 1 :TX_DATASHARD ... ransaction::Execute at 72075186224037888 2026-01-07T17:35:31.115535Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:31.115582Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:31.115775Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:35:31.115877Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:31.115976Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:35:31.116034Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:35:31.116553Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:35:31.121199Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:31.124064Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:31.124143Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 40] schema version# 1 2026-01-07T17:35:31.124527Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:35:31.124894Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:31.128846Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2026-01-07T17:35:31.128919Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:35:31.130200Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:35:31.130244Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:31.130381Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2026-01-07T17:35:31.130473Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:35:31.131117Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:35:31.131171Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:31.132193Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:35:31.132262Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:35:31.132316Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-07T17:35:31.132397Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:31.132454Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:31.132543Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:35:31.143073Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:31.143229Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:31.143625Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:31.143670Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:31.143714Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:31.143788Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:31.143838Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:31.143911Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:31.157045Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:31.157282Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-07T17:35:31.157360Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-07T17:35:31.158262Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:31.158769Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:35:31.158836Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:35:31.171362Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:998:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.171522Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.171674Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1009:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.172873Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1014:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.173192Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.179109Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:31.186234Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:31.186375Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:31.383492Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:31.383795Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:31.386977Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1012:2854], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:31.427284Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1071:2894] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:31.527510Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1139:2925], serverId# [4:1140:2926], sessionId# [0:0:0] 2026-01-07T17:35:31.528025Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-07T17:35:31.528357Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767807331528233 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:35:31.528539Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-07T17:35:31.540092Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-07T17:35:31.540233Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:35:31.552144Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1146:2931], serverId# [4:1147:2932], sessionId# [0:0:0] 2026-01-07T17:35:31.560763Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1148:2933], serverId# [4:1149:2934], sessionId# [0:0:0] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2026-01-07T17:35:31.493282Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:35:31.523409Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:35:31.523748Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:35:31.532504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:35:31.532804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:35:31.533062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:35:31.533186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:35:31.533286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:35:31.533405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:35:31.533544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:35:31.533923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:35:31.534090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:35:31.534228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:35:31.534349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:35:31.534479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:35:31.534592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:35:31.570050Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:35:31.570506Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:35:31.570582Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:35:31.570790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:35:31.570985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:35:31.571057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:35:31.571103Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:35:31.571240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:35:31.571321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:35:31.571368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:35:31.571419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:35:31.571622Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:35:31.571696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:35:31.571740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:35:31.571771Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:35:31.571881Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:35:31.571936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:35:31.572009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:35:31.572044Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:35:31.572098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:35:31.572142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:35:31.572176Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:35:31.572226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:35:31.572265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:35:31.572302Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:35:31.572624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:35:31.572688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:35:31.572736Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:35:31.572873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:35:31.572918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:35:31.574978Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:35:31.575085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:35:31.575137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:35:31.575193Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:35:31.575246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:35:31.575288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:35:31.575339Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:35:31.575547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:35:31.575604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... n;tablet_id=9437184;tx_id=101;this=136427097217120;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-07T17:35:32.229927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;this=136427097217120;op_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_this=136633258369472;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:35:32.230025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;this=136427097217120;op_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_this=136633258369472;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:104:2137]; 2026-01-07T17:35:32.230093Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;this=136427097217120;op_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1767807332451;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_this=136633258369472;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2026-01-07T17:35:32.230423Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:35:32.230562Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767807332451 at tablet 9437184, mediator 0 2026-01-07T17:35:32.230642Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-07T17:35:32.230957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:35:32.231058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:35:32.231122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:35:32.231247Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:35:32.247077Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767807332451;tx_id=101;;switch_optimizer=0;switch_accessors=0; 2026-01-07T17:35:32.247219Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:35:32.247394Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-07T17:35:32.247482Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-07T17:35:32.247774Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:35:32.266144Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-07T17:35:32.305648Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2026-01-07T17:35:32.310476Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=1;last=1; 2026-01-07T17:35:32.310579Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6120;operation_id=443de7d2-ebef11f0-abbf6fe7-dbcbb4e1;in_flight=1;size_in_flight=6120; 2026-01-07T17:35:32.339298Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:129:2159];write_id=1;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8392;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-07T17:35:32.349953Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6120;event=data_write_finished;writing_id=443de7d2-ebef11f0-abbf6fe7-dbcbb4e1; 2026-01-07T17:35:32.350325Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=78;data_size=59;sum=78;count=1; 2026-01-07T17:35:32.350391Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=174;data_size=171;sum=174;count=2;size_of_meta=112; 2026-01-07T17:35:32.350450Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=254;data_size=251;sum=254;count=1;size_of_portion=192; 2026-01-07T17:35:32.351244Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-07T17:35:32.351402Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-07T17:35:32.367195Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-07T17:35:32.367443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:35:32.407146Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;this=136427097568800;op_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;int_this=136633258203072;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:35:32.407300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;this=136427097568800;op_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;int_this=136633258203072;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:104:2137]; 2026-01-07T17:35:32.407379Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;this=136427097568800;op_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1767807332458;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:2;;int_this=136633258203072;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2026-01-07T17:35:32.407840Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:35:32.408009Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767807332458 at tablet 9437184, mediator 0 2026-01-07T17:35:32.408093Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2026-01-07T17:35:32.408439Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-07T17:35:32.427607Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2026-01-07T17:35:32.428149Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767807332459 at tablet 9437184, mediator 0 2026-01-07T17:35:32.428260Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[8] execute at tablet 9437184 2026-01-07T17:35:32.428653Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:88;progress_tx_id=102;lock_id=1;broken=0; 2026-01-07T17:35:32.449685Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[8] complete at tablet 9437184 2026-01-07T17:35:32.451499Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=102;lock_id=1;broken=0; 2026-01-07T17:35:32.451825Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=102;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-07T17:35:32.451890Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=102;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:13.495334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:13.577638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:13.591920Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:13.592312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:13.592368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:13.801292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:13.801402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:13.867063Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807311431480 != 1767807311431484 2026-01-07T17:35:13.882027Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:13.925903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:14.015993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:14.293023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:14.307051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:14.409669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:14.453262Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-01-07T17:35:14.453461Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:14.486464Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-07T17:35:14.486658Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:14.492759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:14.492868Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:14.494398Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:14.494526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:14.494580Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:14.494906Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:14.495122Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:14.495192Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-07T17:35:14.495438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:14.495495Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:14.496495Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:35:14.496535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:35:14.496570Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:35:14.496746Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:14.496811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:14.496856Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-07T17:35:14.507552Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:14.537371Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:14.537545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:14.537629Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-07T17:35:14.537657Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:14.537704Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:14.537729Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:14.537960Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:14.537985Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:35:14.538019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:14.538062Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-07T17:35:14.538079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:35:14.538105Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:35:14.538119Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:35:14.538448Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:14.538531Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:14.538629Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:14.538658Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:14.538686Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:14.538711Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:14.538748Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:35:14.538789Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:35:14.538832Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:35:14.538860Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:14.538875Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:35:14.538891Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:35:14.539159Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-07T17:35:14.539247Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:14.539426Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:14.539509Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:35:14.539804Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-07T17:35:14.539924Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:35:14.540038Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-07T17:35:14.540075Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-07T17:35:14.541287Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:14.541340Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:14.552083Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:14.552200Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:14.552673Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:35:14.552736Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:14.670125Z node 1 :TX_DATASHARD ... tashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:35:31.056133Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:31.056199Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:31.056262Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:31.056341Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:31.056406Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:31.056532Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:31.056672Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2026-01-07T17:35:31.056708Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:35:31.058569Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:35:31.058684Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:35:31.058736Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-07T17:35:31.058808Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:31.058867Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:31.058944Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:35:31.060186Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2000} 2026-01-07T17:35:31.060266Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:35:31.062881Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:35:31.062941Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-07T17:35:31.062992Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-07T17:35:31.063074Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:31.063130Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:31.063201Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:35:31.064484Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:31.064594Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:31.064668Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-07T17:35:31.070734Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:31.071236Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:35:31.071332Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:35:31.071549Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:31.072415Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:31.072842Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-07T17:35:31.072898Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-07T17:35:31.073708Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2026-01-07T17:35:31.073768Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-07T17:35:31.099437Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1045:2883], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.099607Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1056:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.099720Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.109320Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1061:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.109644Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:31.123513Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:31.142902Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:31.143035Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:31.143086Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-07T17:35:31.274658Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:31.274791Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:35:31.274851Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-07T17:35:31.283099Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1059:2891], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:31.414639Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1119:2932] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:31.598622Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1223:2975], serverId# [4:1224:2976], sessionId# [0:0:0] 2026-01-07T17:35:31.599121Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-07T17:35:31.599437Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767807331599331 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:35:31.599646Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767807331599331 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-07T17:35:31.599778Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-07T17:35:31.613819Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-07T17:35:31.613929Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:35:31.619815Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1230:2981], serverId# [4:1231:2982], sessionId# [0:0:0] 2026-01-07T17:35:31.628771Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1232:2983], serverId# [4:1233:2984], sessionId# [0:0:0] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> MoveTable::WithCommitInProgress-Reboot |86.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |86.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> KqpSystemView::PartitionStatsRanges >> KqpSystemView::NodesSimple >> CdcStreamChangeCollector::OldImage [GOOD] >> CdcStreamChangeCollector::SchemaChanges >> KqpSysColV1::InnerJoinSelect >> TKeyValueTest::TestRenameToLongKey [GOOD] >> TResourcePoolTest::AlterResourcePool >> TResourcePoolTest::ReadOnlyMode >> TResourcePoolTest::DropResourcePool >> MoveTable::WithCommitInProgress-Reboot [GOOD] >> Cdc::KeysOnlyLog[PqRunner] |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |86.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> Cdc::KeysOnlyLog[YdsRunner] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:89:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2117] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:108:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:109:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:78:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:81:2057] recipient: [15:80:2112] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:83:2057] recipient: [15:80:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:82:2113] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:198:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:79:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:81:2112] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:84:2057] recipient: [17:81:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:83:2113] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:199:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:85:2057] recipient: [18:84:2115] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:87:2057] recipient: [18:84:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:86:2116] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:202:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:83:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:86:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:88:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:87:2116] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:203:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2118] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:91:2057] recipient: [21:88:2118] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2119] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:206:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:87:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:90:2057] recipient: [23:89:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:89:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:207:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress-Reboot [GOOD] Test command err: 2026-01-07T17:35:34.858923Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:35:34.892572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:35:34.892848Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:35:34.901842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:35:34.902077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:35:34.902310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:35:34.902457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:35:34.902561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:35:34.902652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:35:34.902747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:35:34.902852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:35:34.903009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:35:34.903130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:35:34.903237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:35:34.903356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:35:34.903476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:35:34.944437Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:35:34.945746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:35:34.945830Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:35:34.946064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:35:34.946261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:35:34.946376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:35:34.946449Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:35:34.946574Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:35:34.946770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:35:34.946839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:35:34.946885Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:35:34.947101Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:35:34.947335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:35:34.947389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:35:34.947458Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:35:34.947593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:35:34.947673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:35:34.947757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:35:34.947813Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:35:34.947868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:35:34.947918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:35:34.947958Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:35:34.948007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:35:34.948055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:35:34.948089Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:35:34.948327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:35:34.948459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:35:34.948509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:35:34.948662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:35:34.948725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:35:34.948765Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:35:34.948836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:35:34.948886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:35:34.948956Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:35:34.949030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:35:34.949074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:35:34.949125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:35:34.949326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:35:34.949397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ingested_at,saved_at,request_id; 2026-01-07T17:35:36.207473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:339:2351];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:35:36.207742Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:36.207944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:36.208173Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:36.208454Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:35:36.208679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:36.209071Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:36.209512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:345:2357] finished for tablet 9437184 2026-01-07T17:35:36.210098Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:339:2351];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.015},{"events":["f_ack"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":2017875,"name":"_full_task","f":2017875,"d_finished":0,"c":0,"l":2037054,"d":19179},"events":[{"name":"bootstrap","f":2018365,"d_finished":4105,"c":1,"l":2022470,"d":4105},{"a":2035896,"name":"ack","f":2033916,"d_finished":1770,"c":1,"l":2035686,"d":2928},{"a":2035879,"name":"processing","f":2023041,"d_finished":5851,"c":3,"l":2035689,"d":7026},{"name":"ProduceResults","f":2021380,"d_finished":3368,"c":6,"l":2036578,"d":3368},{"a":2036585,"name":"Finish","f":2036585,"d_finished":0,"c":0,"l":2037054,"d":469},{"name":"task_result","f":2023082,"d_finished":3997,"c":2,"l":2033780,"d":3997}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:36.210199Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:339:2351];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:35:36.210857Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:339:2351];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.015},{"events":["f_ack"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":2017875,"name":"_full_task","f":2017875,"d_finished":0,"c":0,"l":2037730,"d":19855},"events":[{"name":"bootstrap","f":2018365,"d_finished":4105,"c":1,"l":2022470,"d":4105},{"a":2035896,"name":"ack","f":2033916,"d_finished":1770,"c":1,"l":2035686,"d":3604},{"a":2035879,"name":"processing","f":2023041,"d_finished":5851,"c":3,"l":2035689,"d":7702},{"name":"ProduceResults","f":2021380,"d_finished":3368,"c":6,"l":2036578,"d":3368},{"a":2036585,"name":"Finish","f":2036585,"d_finished":0,"c":0,"l":2037730,"d":1145},{"name":"task_result","f":2023082,"d_finished":3997,"c":2,"l":2033780,"d":3997}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:35:36.210963Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:35:36.116300Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-07T17:35:36.211010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:35:36.211213Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-07T17:35:36.212170Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-07T17:35:36.212653Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {18446744073709551615:12} readable: {18446744073709551615:max} at tablet 9437184 2026-01-07T17:35:36.212820Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-07T17:35:36.212917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:35:36.213060Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TResourcePoolTest::DropResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:105:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:85:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:88:2057] recipient: [22:87:2117] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:90:2057] recipient: [22:87:2117] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:89:2118] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:205:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:88:2057] recipient: [23:87:2117] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:90:2057] recipient: [23:87:2117] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:89:2118] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:205:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:89:2057] recipient: [24:88:2117] Leader for TabletID 72057594037927937 is [24:90:2118] sender: [24:91:2057] recipient: [24:88:2117] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TResourcePoolTest::AlterResourcePool [GOOD] >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists >> BasicStatistics::NotFullStatisticsDatashard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:35:36.583607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:36.583707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:36.583781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:36.583827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:36.583868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:36.583895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:36.583955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:36.584019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:36.584764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:36.585077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:36.669617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:35:36.669670Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:36.688243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:36.688547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:36.688681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:36.695725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:36.695980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:36.696654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:36.696971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:36.700741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:36.700967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:36.702163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:36.702228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:36.702368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:36.702423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:36.702468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:36.702675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:36.867125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.869948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.870044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.870139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.870235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... h_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:37.601765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:35:37.601905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:35:37.601986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:35:37.602160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:37.602208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:35:37.602247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 40 2026-01-07T17:35:37.602285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 41 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:35:37.602788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:35:37.602841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:35:37.602945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:35:37.602987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:35:37.603023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:35:37.603051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:35:37.603087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:35:37.603124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:35:37.603157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:35:37.603187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:35:37.603287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:35:37.603362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-07T17:35:37.603399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2026-01-07T17:35:37.603427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2026-01-07T17:35:37.603446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-07T17:35:37.603914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:35:37.604071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:35:37.604107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:35:37.604156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-07T17:35:37.604218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:35:37.604650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:35:37.604693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:35:37.604761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:35:37.612205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:35:37.612322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:35:37.612367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:35:37.612418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-07T17:35:37.612474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:35:37.613239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:35:37.613322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:35:37.613354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:35:37.613382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2026-01-07T17:35:37.613419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:35:37.613500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:35:37.619643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:35:37.619841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:35:37.620468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:35:37.621000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:35:37.621234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:35:37.621284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:35:37.621738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:35:37.621843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:35:37.621878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:782:2771] TestWaitNotification: OK eventTxId 103 2026-01-07T17:35:37.622466Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:37.622675Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 256us result status StatusPathDoesNotExist 2026-01-07T17:35:37.622876Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ReadOnlyMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:137:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:136:2159] sender: [1:138:2058] recipient: [1:114:2145] 2026-01-07T17:33:33.556662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:33.556740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:33.556785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:33.556829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:33.556869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:33.556901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:33.557016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:33.557112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:33.558048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:33.558349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:33.680578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:33:33.680645Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:33.681585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:186:2058] recipient: [1:15:2062] 2026-01-07T17:33:33.693743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:33.694027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:33.694211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:33.699585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:33.699775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:33.700361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:33.700611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:33.702477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:33.702627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:33.703576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:33.703621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:33.703762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:33.703812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:33.703858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:33.703961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:33.819054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.819853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.819941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.819982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:33.820520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... ESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2026-01-07T17:35:33.249477Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2026-01-07T17:35:33.249514Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:35:33.249554Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:35:33.249634Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1002 2026-01-07T17:35:33.257546Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1246 } } CommitVersion { Step: 5000039 TxId: 1002 } 2026-01-07T17:35:33.257613Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2026-01-07T17:35:33.257749Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1246 } } CommitVersion { Step: 5000039 TxId: 1002 } 2026-01-07T17:35:33.257856Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1246 } } CommitVersion { Step: 5000039 TxId: 1002 } debug: NTableState::TProposedWaitParts operationId# 1002:0 2026-01-07T17:35:33.259054Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 747 RawX2: 309237648044 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-07T17:35:33.259098Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2026-01-07T17:35:33.259216Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 747 RawX2: 309237648044 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-07T17:35:33.259269Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:35:33.259348Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 747 RawX2: 309237648044 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-07T17:35:33.259404Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:33.259434Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-07T17:35:33.259469Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:35:33.259507Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1002:0 129 -> 240 2026-01-07T17:35:33.261907Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2026-01-07T17:35:33.262403Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2026-01-07T17:35:33.264395Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-07T17:35:33.264600Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-07T17:35:33.265006Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-07T17:35:33.265063Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1002:0 ProgressState 2026-01-07T17:35:33.265163Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1002:0 progress is 1/1 2026-01-07T17:35:33.265194Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-07T17:35:33.265235Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1002:0 progress is 1/1 2026-01-07T17:35:33.265268Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-07T17:35:33.265313Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2026-01-07T17:35:33.265358Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-07T17:35:33.265390Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1002:0 2026-01-07T17:35:33.265430Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1002:0 2026-01-07T17:35:33.265537Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2026-01-07T17:35:33.269032Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2026-01-07T17:35:33.269094Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2026-01-07T17:35:33.269406Z node 72 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2026-01-07T17:35:33.269493Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2026-01-07T17:35:33.269523Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:824:2795] TestWaitNotification: OK eventTxId 1002 2026-01-07T17:35:33.269863Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:33.270045Z node 72 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 213us result status StatusSuccess 2026-01-07T17:35:33.270574Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> TBackupCollectionTests::IndexCdcStreamCountRotation [GOOD] >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] >> Cdc::UuidExchange[PqRunner] >> KqpScanArrowInChanels::JoinWithParams [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:35:36.693780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:36.693862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:36.693925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:36.693970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:36.694001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:36.694035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:36.694101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:36.694165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:36.695036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:36.695370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:36.781847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:35:36.781911Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:36.801243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:36.801648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:36.801821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:36.825973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:36.826461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:36.827187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:36.827461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:36.835868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:36.836178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:36.837831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:36.837926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:36.838114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:36.838178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:36.838235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:36.838492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:36.981170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.982956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.983028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.983124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.983217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard: 72057594046678944 2026-01-07T17:35:38.337683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2026-01-07T17:35:38.337811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:35:38.338676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:35:38.338787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:35:38.338825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:35:38.338870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-07T17:35:38.338917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:35:38.340260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:35:38.340345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:35:38.340375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:35:38.340407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 2 2026-01-07T17:35:38.340438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-07T17:35:38.340516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2026-01-07T17:35:38.342660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2026-01-07T17:35:38.342864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:35:38.344988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2026-01-07T17:35:38.346376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:38.346513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:38.346583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2026-01-07T17:35:38.346773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 129:0 128 -> 240 2026-01-07T17:35:38.347015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:35:38.347100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-07T17:35:38.347597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2026-01-07T17:35:38.349535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:38.349577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:38.349781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-07T17:35:38.349907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:38.349943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:895:2852], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-07T17:35:38.349985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:895:2852], at schemeshard: 72057594046678944, txId: 129, path id: 43 2026-01-07T17:35:38.350386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2026-01-07T17:35:38.350437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2026-01-07T17:35:38.350559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-07T17:35:38.350594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:35:38.350635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-07T17:35:38.350690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:35:38.350787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-07T17:35:38.350837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:35:38.350872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-07T17:35:38.350905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-07T17:35:38.351013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-07T17:35:38.351063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-07T17:35:38.351097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2026-01-07T17:35:38.351128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 43], 3 2026-01-07T17:35:38.351777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:35:38.351870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:35:38.351915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:35:38.351954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-01-07T17:35:38.352007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:35:38.353175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:35:38.353269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:35:38.353304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:35:38.353334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2026-01-07T17:35:38.353403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-07T17:35:38.353479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-07T17:35:38.357566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-07T17:35:38.357730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:10.986573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:11.090600Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:11.097621Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:11.097904Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:11.098003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:11.569153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.690142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.690298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.729130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.809555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:12.533260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:12.534560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:12.534615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:12.534649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:12.534895Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.610230Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:13.444426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:34:16.751869Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:16.758874Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:34:16.762661Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:16.816228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.816366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.863581Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:34:16.870175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:17.124516Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:17.124625Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:17.126308Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.127042Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.127696Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.128750Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.128891Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.129289Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.129441Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.129585Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.129770Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:17.150668Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:17.403327Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:17.448945Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:34:17.449048Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:34:17.495082Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:34:17.496648Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:34:17.496898Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:34:17.496998Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:34:17.497060Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:34:17.497123Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:34:17.497191Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:34:17.497247Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:34:17.497961Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:34:17.500373Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:34:17.503040Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:34:17.521985Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:34:17.522052Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:34:17.522148Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:34:17.530887Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.531001Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.567638Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:34:17.568174Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:34:17.572451Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:34:17.621926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.644329Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:34:17.644487Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:34:17.665860Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:34:17.867493Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:34:17.938983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:34:18.454519Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:34:18.601728Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:34:18.601837Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:34:19.455442Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 20Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4115:3545], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:34:48.152380Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:34:48.152618Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:34:48.152709Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4098:3540], StatRequests.size() = 1 2026-01-07T17:34:48.152834Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... unblocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR 2026-01-07T17:34:48.353208Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:34:48.353336Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:34:48.401511Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4108:3544], schemeshard count = 1 ... waiting for stats update from SchemeShard 2026-01-07T17:34:50.823587Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:34:52.417995Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:34:52.418201Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-07T17:34:52.418418Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:34:52.418649Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:34:52.441703Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:34:52.441786Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:34:52.442058Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:34:52.455750Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics 2026-01-07T17:34:54.770533Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:34:56.895249Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:34:56.895650Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 8 2026-01-07T17:34:56.895947Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 ... waiting for TEvPropagateStatistics (done) 2026-01-07T17:34:56.896153Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-07T17:34:56.903013Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:104: [72075186224037894] EvServerDisconnected, pipe server id = [2:2209:2650] 2026-01-07T17:34:57.100141Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:57.256413Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:57.256522Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:4451:3670], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:57.262322Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4457:3675] 2026-01-07T17:34:57.263880Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4457:3675], schemeshard id = 72075186224037897 ... waiting for TEvPeriodicTableStats2 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for TEvPeriodicTableStats2 (done) ... waiting for stats update from SchemeShard 2026-01-07T17:35:00.467516Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:02.682154Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:35:02.682562Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 9 2026-01-07T17:35:02.682862Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:02.682970Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 9 2026-01-07T17:35:05.828313Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:07.820189Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:35:07.820630Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 10 2026-01-07T17:35:07.820985Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:07.821130Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-07T17:35:10.874632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:12.855787Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:35:12.856266Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 11 2026-01-07T17:35:12.856664Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:12.856824Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-07T17:35:12.878626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:35:12.878698Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:15.969880Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:16.171723Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:35:16.171793Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:35:16.171842Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:35:16.171886Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:35:19.171722Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:35:19.172106Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 12 2026-01-07T17:35:19.172339Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:19.172415Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-07T17:35:23.413793Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:25.610840Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:35:25.611017Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 2026-01-07T17:35:25.611138Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-07T17:35:25.611327Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:35:28.721729Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:31.069455Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:35:31.069828Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 14 2026-01-07T17:35:31.070088Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:31.070201Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 14 2026-01-07T17:35:31.125613Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:35:31.125717Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:35:31.126004Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:35:31.139413Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-07T17:35:34.590299Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:37.041267Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:35:37.041488Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 15 ... waiting for TEvPropagateStatistics (done) 2026-01-07T17:35:37.041892Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5483:4034]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:37.042104Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 15 2026-01-07T17:35:37.042256Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:35:37.042323Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5483:4034], StatRequests.size() = 1 2026-01-07T17:35:37.042516Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:35:36.506057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:36.506175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:36.506223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:36.506265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:36.506323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:36.506356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:36.506413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:36.506488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:36.507482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:36.507820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:36.611254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:35:36.611321Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:36.628887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:36.629396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:36.629586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:36.636794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:36.637184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:36.638058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:36.638375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:36.641573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:36.641778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:36.643068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:36.643135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:36.643273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:36.643327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:36.643376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:36.643582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:36.823731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.824817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.825834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.825988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:36.826892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... is done id#101:0 progress is 3/3 2026-01-07T17:35:39.305302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:35:39.305334Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-07T17:35:39.305357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:35:39.305382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2026-01-07T17:35:39.305422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-07T17:35:39.305507Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:35:39.305547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:35:39.305629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:35:39.305667Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-07T17:35:39.305689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-07T17:35:39.305734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:35:39.305767Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-07T17:35:39.305789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-07T17:35:39.305820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:35:39.305854Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2026-01-07T17:35:39.305890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-07T17:35:39.305921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2026-01-07T17:35:39.305945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 5 2026-01-07T17:35:39.305966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-07T17:35:39.307354Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:39.307459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:39.307503Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:39.307545Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:35:39.307588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:39.308844Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:39.308950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:39.308986Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:39.309039Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-07T17:35:39.309073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:39.309994Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:39.310071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:39.310100Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:39.310129Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-01-07T17:35:39.310177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:35:39.311459Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:39.311542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:39.311572Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:39.311617Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-07T17:35:39.311654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:35:39.311721Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:35:39.314729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:35:39.315272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:35:39.315505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:35:39.316408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:35:39.316653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:35:39.316708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:35:39.317118Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:35:39.317230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:35:39.317278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:730:2719] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:35:39.320686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpAlterResourcePool CreateResourcePool { Name: "MyResourcePool" Properties { Properties { key: "concurrent_query_limit" value: "20" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:35:39.320995Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_resource_pool.cpp:123: [72057594046678944] TAlterResourcePool Propose: opId# 102:0, path# /MyRoot/.metadata/workload_manager/pools/MyResourcePool 2026-01-07T17:35:39.321219Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), at schemeshard: 72057594046678944 2026-01-07T17:35:39.323670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:39.323941Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), operation: ALTER RESOURCE POOL, path: MyResourcePool TestModificationResult got TxId: 102, wait until txId: 102 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> KqpStreamLookup::ReadTableDuringSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 9978, MsgBus: 13872 2026-01-07T17:33:40.406684Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674197223060596:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:40.407071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:40.650531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:40.650621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:40.690418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:40.703013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:40.720491Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:40.740381Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674197223060475:2081] 1767807220402478 != 1767807220402481 2026-01-07T17:33:40.803377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:40.803405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:40.803412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:40.803499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:40.906338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:41.238612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:41.249400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:41.318398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:41.411646Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:41.445903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:41.599689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:41.665287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.241591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674210107964231:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.241709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.242014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674210107964241:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.242045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.573110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.606940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.641028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.713504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.762429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.807516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.870684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.925858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:44.017176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674214402932410:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:44.017262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:44.017672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674214402932415:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:44.017716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674214402932416:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:44.017794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:44.021223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:44.033982Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674214402932419:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:33:44.129394Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674214402932470:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:45.405698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674197223060596:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:45.441036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:33:45.620826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:27.368271Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:27.539774Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:27.557411Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7592674655480043536:2081] 1767807327312979 != 1767807327312982 2026-01-07T17:35:27.557608Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:27.557745Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:27.558645Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:27.564779Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:27.708486Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:27.708523Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:27.708537Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:27.708706Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:28.118543Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:28.326575Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:28.801223Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:28.820846Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:28.922165Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:29.186467Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:29.317424Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:32.321075Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592674655480043649:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:32.321194Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:33.416841Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674681249849202:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:33.417018Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:33.417472Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674681249849211:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:33.417560Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:33.527298Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.580804Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.631031Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.683008Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.737717Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.792558Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.860563Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.947160Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:34.094704Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674685544817380:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:34.094810Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:34.094880Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674685544817385:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:34.095200Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674685544817387:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:34.095244Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:34.102537Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:34.129323Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592674685544817388:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:35:34.224685Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592674685544817448:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:37.681728Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807337695, txId: 281474976710673] shutting down 2026-01-07T17:35:38.087031Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807338115, txId: 281474976710675] shutting down |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> CdcStreamChangeCollector::SchemaChanges [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteDataBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::SchemaChanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:14.887271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:14.972414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:14.985675Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:14.986098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:14.986141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:15.252130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:15.252264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:15.334503Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807312732261 != 1767807312732265 2026-01-07T17:35:15.346825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:15.395529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:15.479430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:15.772118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:15.786255Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:15.890901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:15.922090Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:35:15.922322Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:15.957476Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:15.957673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:15.959501Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:15.959603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:15.959661Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:15.960077Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:15.960237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:15.960334Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:35:15.971142Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:15.993846Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:15.994046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:15.994145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:35:15.994179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:15.994207Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:15.994238Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:15.994693Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:15.994812Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:15.994889Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:15.994930Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:15.994970Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:15.995009Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:15.995091Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:35:15.995554Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:15.995821Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:15.995937Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:35:15.997691Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:16.008532Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:16.008650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:16.126917Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:35:16.130913Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:16.130984Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:16.131329Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:16.131364Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:16.131400Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:16.131646Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:35:16.131747Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:16.131886Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:16.131941Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:35:16.133455Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:35:16.133800Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:16.136061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:35:16.136115Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:16.137124Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:35:16.137203Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:16.138499Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:16.138547Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:16.138601Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:16.138666Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:16.138721Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:16.138859Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:16.151965Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:16.153747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:16.153916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:35:16.153992Z node 1 :TX_DATASHARD DEBUG: datashar ... 38], version# 2, step# 2500, txId# 281474976715658, at tablet# 72075186224037888 2026-01-07T17:35:41.111812Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:41.196179Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-07T17:35:41.196307Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:41.196355Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:41.196401Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:41.196505Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:41.196575Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-07T17:35:41.196904Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:41.199396Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-07T17:35:41.199495Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:35:41.242147Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:41.242319Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:41.242405Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:41.243520Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:41.243734Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:41.254904Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:41.271858Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:41.487398Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:41.502179Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:35:41.532146Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:41.640184Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2957], serverId# [4:1167:2958], sessionId# [0:0:0] 2026-01-07T17:35:41.640594Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-07T17:35:41.640832Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767807341640725 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:35:41.641095Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-07T17:35:41.653792Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 32 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-07T17:35:41.653895Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:41.892184Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:35:41.895200Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:41.901108Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715662 ssId 72057594046644480 seqNo 2:3 2026-01-07T17:35:41.901238Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 3 current version# 2 expected version# 3 at tablet# 72075186224037888 txId# 281474976715662 2026-01-07T17:35:41.901297Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715662 at tablet 72075186224037888 2026-01-07T17:35:41.913545Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:42.043562Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715662 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:42.043642Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:42.043995Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:42.044064Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:42.044117Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [3500:281474976715662] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:42.044436Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 3500:281474976715662 keys extracted: 0 2026-01-07T17:35:42.044601Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:42.044848Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:42.044964Z node 4 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037888 version 3 2026-01-07T17:35:42.045986Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1849: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 38], version# 3, step# 3500, txId# 281474976715662, at tablet# 72075186224037888 2026-01-07T17:35:42.046146Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 0 Step: 3500 TxId: 281474976715662 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcSchemaChange Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:35:42.046621Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:42.056805Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2026-01-07T17:35:42.056915Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:42.058542Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:42.058645Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2026-01-07T17:35:42.058761Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:42.058829Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2026-01-07T17:35:42.058992Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2026-01-07T17:35:42.059058Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:42.066684Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2026-01-07T17:35:42.066796Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:35:42.104483Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1209:2995], serverId# [4:1210:2996], sessionId# [0:0:0] 2026-01-07T17:35:42.125463Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1212:2998], serverId# [4:1213:2999], sessionId# [0:0:0] |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |86.2%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> KqpSysColV1::InnerJoinSelect [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> KqpSystemView::PartitionStatsRanges [GOOD] |86.2%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> THiveTest::TestNoMigrationToSelf >> HttpRequest::ProbeBaseStatsServerless [GOOD] |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |86.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 7333, MsgBus: 62326 2026-01-07T17:35:35.425120Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674689813547565:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:35.425252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:35.756552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:35.768448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:35.768556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:35.844623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:35.958186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:36.016760Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:36.142990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:36.143021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:36.143052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:36.143201Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:36.436421Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:36.840741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:36.849626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:35:36.896682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:37.032673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:37.216034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:37.301274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:39.815108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674706993418579:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:39.815267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:39.815802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674706993418588:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:39.815907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.304794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.361216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.400449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.426854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674689813547565:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:40.426928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:40.505990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.565124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.659050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.733171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.829361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.966413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674711288386756:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.966518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.966900Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674711288386761:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.966939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674711288386762:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.967228Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.972203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:40.996022Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674711288386765:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:35:41.086606Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674715583354112:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:44.068338Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807344057, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 18839, MsgBus: 5780 2026-01-07T17:35:35.966324Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674693707266957:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:35.966856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:36.291788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:36.306266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:36.306414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:36.307976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:36.317952Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674693707266925:2081] 1767807335962324 != 1767807335962327 2026-01-07T17:35:36.327804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:36.488944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:36.488967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:36.488976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:36.489047Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:36.506598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:36.973225Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:37.080425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:37.087519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:35:37.149263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:37.343189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:37.568478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:37.663788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:39.874456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674710887137987:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:39.874695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:39.875344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674710887137997:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:39.875421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.333279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.383136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.424237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.478098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.531112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.579072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.726071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.806872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.953803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674715182106171:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.953916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.954316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674715182106176:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.954353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674715182106177:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.954483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:40.958493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:40.967567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674693707266957:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:40.967672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:40.977797Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674715182106180:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:35:41.072059Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674719477073530:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> KqpSystemView::NodesSimple [GOOD] |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> KqpResultSetFormats::ArrowFormat_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:108:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] ... r refreshed! new actor is[22:86:2116] Leader for TabletID 72057594037927937 is [22:86:2116] sender: [22:202:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:82:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:84:2115] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:87:2057] recipient: [23:84:2115] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:86:2116] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:202:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:83:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:85:2115] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:88:2057] recipient: [24:85:2115] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:87:2116] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:203:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:86:2057] recipient: [25:39:2086] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:89:2057] recipient: [25:88:2118] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:91:2057] recipient: [25:88:2118] !Reboot 72057594037927937 (actor [25:58:2099]) rebooted! !Reboot 72057594037927937 (actor [25:58:2099]) tablet resolver refreshed! new actor is[25:90:2119] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:206:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:52:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:59:2057] recipient: [26:52:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:76:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:86:2057] recipient: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:89:2057] recipient: [26:88:2118] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:91:2057] recipient: [26:88:2118] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:90:2119] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:206:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:87:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:90:2057] recipient: [27:89:2118] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:92:2057] recipient: [27:89:2118] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:91:2119] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:207:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:90:2057] recipient: [28:39:2086] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:93:2057] recipient: [28:92:2121] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:95:2057] recipient: [28:92:2121] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:94:2122] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:210:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:90:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:93:2057] recipient: [29:92:2121] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:95:2057] recipient: [29:92:2121] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:94:2122] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:210:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:91:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:94:2057] recipient: [30:93:2121] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:96:2057] recipient: [30:93:2121] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:95:2122] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:211:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:52:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:52:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:93:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:96:2057] recipient: [31:95:2123] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:98:2057] recipient: [31:95:2123] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:97:2124] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:213:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:93:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:214:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStatsServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:10.333698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:10.440942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:10.449628Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:10.450034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:10.450174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:10.879648Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.032008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.032149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.072555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.191685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:11.947546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:11.948697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:11.948748Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:11.948780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:11.949286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.016385Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:12.614994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:34:16.123553Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:16.143992Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:34:16.158868Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:16.240054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.240183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.286802Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:34:16.293797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:16.504787Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.504945Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.506492Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.507300Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.507848Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.508757Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.510309Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.510573Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.510800Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.511031Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.511190Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:16.536213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:16.899095Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:16.951989Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:34:16.952083Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:34:16.989218Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:34:16.990634Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:34:16.990882Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:34:16.990940Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:34:16.991005Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:34:16.991065Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:34:16.991114Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:34:16.991165Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:34:16.992040Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:34:17.006867Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.006992Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:17.038918Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:34:17.039770Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:34:17.144310Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:34:17.146505Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:34:17.170192Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:34:17.170276Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:34:17.170378Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:34:17.182755Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:34:17.188055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.200394Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:34:17.200616Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:34:17.216848Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:34:17.474915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:34:17.507473Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:34:18.003803Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:34:18.215290Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:34:18.215402Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:34:19.145741Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... sult] RequestId[ 51 ] 2026-01-07T17:35:24.947259Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 51, ReplyToActorId = [2:6266:5028], StatRequests.size() = 1 2026-01-07T17:35:26.263529Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 52 ], ReplyToActorId[ [2:6298:5043]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:26.263929Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 52 ] 2026-01-07T17:35:26.263979Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 52, ReplyToActorId = [2:6298:5043], StatRequests.size() = 1 2026-01-07T17:35:26.643568Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:35:26.643646Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:35:26.643903Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:35:26.663105Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:27.629856Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 53 ], ReplyToActorId[ [2:6336:5064]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:27.630287Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 53 ] 2026-01-07T17:35:27.630355Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 53, ReplyToActorId = [2:6336:5064], StatRequests.size() = 1 2026-01-07T17:35:28.766887Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:28.960323Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 54 ], ReplyToActorId[ [2:6370:5079]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:28.960687Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 54 ] 2026-01-07T17:35:28.960736Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 54, ReplyToActorId = [2:6370:5079], StatRequests.size() = 1 2026-01-07T17:35:29.714334Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-07T17:35:29.714405Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.952000s, at schemeshard: 72075186224037899 2026-01-07T17:35:29.714694Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-07T17:35:29.734697Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:30.274475Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 55 ], ReplyToActorId[ [2:6406:5096]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:30.274904Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 55 ] 2026-01-07T17:35:30.274959Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 55, ReplyToActorId = [2:6406:5096], StatRequests.size() = 1 2026-01-07T17:35:31.118522Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:35:31.118621Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:35:31.118657Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:35:31.118691Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:35:32.422989Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 56 ], ReplyToActorId[ [2:6449:5116]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:32.423447Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 56 ] 2026-01-07T17:35:32.423504Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 56, ReplyToActorId = [2:6449:5116], StatRequests.size() = 1 2026-01-07T17:35:33.119563Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:35:33.119842Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2026-01-07T17:35:33.119935Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-07T17:35:34.225955Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 57 ], ReplyToActorId[ [2:6487:5134]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:34.226334Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 57 ] 2026-01-07T17:35:34.226383Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 57, ReplyToActorId = [2:6487:5134], StatRequests.size() = 1 2026-01-07T17:35:35.125808Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:35:35.125873Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:35:35.126152Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:35:35.146727Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:36.928204Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 58 ], ReplyToActorId[ [2:6523:5153]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:36.928599Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 58 ] 2026-01-07T17:35:36.928648Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 58, ReplyToActorId = [2:6523:5153], StatRequests.size() = 1 2026-01-07T17:35:39.008695Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 59 ], ReplyToActorId[ [2:6557:5170]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:39.009056Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 59 ] 2026-01-07T17:35:39.009102Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 59, ReplyToActorId = [2:6557:5170], StatRequests.size() = 1 2026-01-07T17:35:40.793242Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:40.999731Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 60 ], ReplyToActorId[ [2:6601:5195]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:41.000016Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 60 ] 2026-01-07T17:35:41.000064Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 60, ReplyToActorId = [2:6601:5195], StatRequests.size() = 1 2026-01-07T17:35:41.810019Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-07T17:35:41.810086Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.696000s, at schemeshard: 72075186224037899 2026-01-07T17:35:41.810297Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 27, entries count: 1, are all stats full: 1 2026-01-07T17:35:41.838543Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:42.455229Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 61 ], ReplyToActorId[ [2:6641:5216]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:42.455512Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 61 ] 2026-01-07T17:35:42.455558Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 61, ReplyToActorId = [2:6641:5216], StatRequests.size() = 1 2026-01-07T17:35:43.836648Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 62 ], ReplyToActorId[ [2:6675:5231]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:43.837286Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 62 ] 2026-01-07T17:35:43.837338Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 62, ReplyToActorId = [2:6675:5231], StatRequests.size() = 1 2026-01-07T17:35:44.209471Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:35:44.209693Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-07T17:35:44.209789Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-07T17:35:44.299825Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:35:44.299916Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:35:44.300218Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:35:44.315380Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:45.179274Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 63 ], ReplyToActorId[ [2:6713:5252]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:45.179585Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 63 ] 2026-01-07T17:35:45.179625Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 63, ReplyToActorId = [2:6713:5252], StatRequests.size() = 1 2026-01-07T17:35:45.180442Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 64 ], ReplyToActorId[ [2:6716:5255]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:45.180699Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 64 ] 2026-01-07T17:35:45.180760Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 64, ReplyToActorId = [2:6716:5255], StatRequests.size() = 1 Answer: 'HTTP/1.1 200 Ok Content-Type: application/json Connection: Close { "row_count":1000, "bytes_size":11296 }' |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:52:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:203:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:86:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:89:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:91:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:90:2119] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:206:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:87:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:90:2057] recipient: [24:89:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:89:2118] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:207:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> TExternalDataSourceTest::ReadOnlyMode >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:46.577170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:46.705958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:46.728827Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:46.729433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:46.729503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:47.068377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:47.068532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:47.143125Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807343502325 != 1767807343502329 2026-01-07T17:35:47.155477Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:47.200248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:47.306251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:47.627855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:47.642958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:47.758820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:48.057161Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2026-01-07T17:35:48.057310Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-07T17:35:48.221066Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.163320s, errors=0 2026-01-07T17:35:48.221156Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 21107, MsgBus: 1283 2026-01-07T17:35:35.707493Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674689616703591:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:35.707548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:35.784098Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592674691790753761:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:35.795503Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:35.817192Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592674692125041697:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:35.817285Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:36.074240Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:36.075207Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:36.104305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:36.176963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:36.177062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:36.181842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:36.181953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:36.183596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:36.183682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:36.230607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:36.259752Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:35:36.259837Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:35:36.262128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:36.262353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:36.264245Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:36.280394Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:36.286038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:36.340761Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:36.590148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:36.590179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:36.590187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:36.590296Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:36.741259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:36.805390Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:36.833150Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:37.794873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:38.002252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:38.276490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:38.988314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:39.325017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:40.709067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674689616703591:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:40.709139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:40.782873Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592674691790753761:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:40.782949Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:40.814168Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592674692125041697:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:40.814233Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:41.899359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674715386509598:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:41.899485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:41.900197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674715386509608:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:41.900270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:42.357449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:42.449749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:42.532993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:42.680343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:42.805474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:42.993813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:43.133707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:43.274785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:43.522653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674723976445305:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:43.522755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:43.523122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674723976445310:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:43.523165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674723976445311:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:43.523196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:43.527547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:43.617217Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674723976445314:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725670 completed, doublechecking } 2026-01-07T17:35:43.713726Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674723976445398:4729] txid# 281474976725671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:46.287064Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807346252, txId: 281474976725673] shutting down |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TExternalDataSourceTest::CreateExternalDataSource >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> BasicStatistics::PersistenceWithStorageFailuresAndReboots [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2026-01-07T17:35:02.911415Z node 3 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:56:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:61:2057] recipient: [3:56:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:78:2057] recipient: [3:17:2064] 2026-01-07T17:35:03.220346Z node 4 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:55:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:61:2057] recipient: [4:55:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:78:2057] recipient: [4:17:2064] !Reboot 72057594037927937 (actor [4:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:80:2057] recipient: [4:42:2089] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:83:2057] recipient: [4:82:2114] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:85:2057] recipient: [4:82:2114] !Reboot 72057594037927937 (actor [4:60:2101]) rebooted! !Reboot 72057594037927937 (actor [4:60:2101]) tablet resolver refreshed! new actor is[4:84:2115] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:200:2057] recipient: [4:17:2064] 2026-01-07T17:35:05.114038Z node 5 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:55:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:61:2057] recipient: [5:55:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:78:2057] recipient: [5:17:2064] !Reboot 72057594037927937 (actor [5:60:2101]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:80:2057] recipient: [5:42:2089] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:83:2057] recipient: [5:82:2114] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:85:2057] recipient: [5:82:2114] !Reboot 72057594037927937 (actor [5:60:2101]) rebooted! !Reboot 72057594037927937 (actor [5:60:2101]) tablet resolver refreshed! new actor is[5:84:2115] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:200:2057] recipient: [5:17:2064] 2026-01-07T17:35:07.076017Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:55:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:61:2057] recipient: [6:55:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:78:2057] recipient: [6:17:2064] !Reboot 72057594037927937 (actor [6:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:81:2057] recipient: [6:42:2089] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:84:2057] recipient: [6:83:2114] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:86:2057] recipient: [6:83:2114] !Reboot 72057594037927937 (actor [6:60:2101]) rebooted! !Reboot 72057594037927937 (actor [6:60:2101]) tablet resolver refreshed! new actor is[6:85:2115] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:201:2057] recipient: [6:17:2064] 2026-01-07T17:35:08.947385Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:54:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:61:2057] recipient: [7:54:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:78:2057] recipient: [7:17:2064] !Reboot 72057594037927937 (actor [7:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:84:2057] recipient: [7:42:2089] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:60:2101]) rebooted! !Reboot 72057594037927937 (actor [7:60:2101]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:17:2064] 2026-01-07T17:35:10.843569Z node 8 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:55:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:61:2057] recipient: [8:55:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:78:2057] recipient: [8:17:2064] !Reboot 72057594037927937 (actor [8:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:84:2057] recipient: [8:42:2089] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:60:2101]) rebooted! !Reboot 72057594037927937 (actor [8:60:2101]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:17:2064] 2026-01-07T17:35:12.723490Z node 9 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:55:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:61:2057] recipient: [9:55:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:78:2057] recipient: [9:17:2064] !Reboot 72057594037927937 (actor [9:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:85:2057] recipient: [9:42:2089] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:60:2101]) rebooted! !Reboot 72057594037927937 (actor [9:60:2101]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:17:2064] 2026-01-07T17:35:14.647756Z node 10 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:55:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:61:2057] recipient: [10:55:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:78:2057] recipient: [10:17:2064] !Reboot 72057594037927937 (actor [10:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:88:2057] recipient: [10:42:2089] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:60:2101]) rebooted! !Reboot 72057594037927937 (actor [10:60:2101]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:17:2064] 2026-01-07T17:35:16.541862Z node 11 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:55:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:61:2057] recipient: [11:55:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:78:2057] recipient: [11:17:2064] !Reboot 72057594037927937 (actor [11:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:88:2057] recipient: [11:42:2089] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:60:2101]) rebooted! !Reboot 72057594037927937 (actor [11:60:2101]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:17:2064] 2026-01-07T17:35:18.621379Z node 12 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:54:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:61:2057] recipient: [12:54:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:78:2057] recipient: [12:17:2064] !Reboot 72057594037927937 (actor [12:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:89:2057] recipient: [12:42:2089] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:60:2101]) rebooted! !Reboot 72057594037927937 (actor [12:60:2101]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:212 ... pient: [18:42:2089] Leader for TabletID 72057594037927937 is [18:60:2101] sender: [18:100:2057] recipient: [18:99:2126] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:102:2057] recipient: [18:99:2126] !Reboot 72057594037927937 (actor [18:60:2101]) rebooted! !Reboot 72057594037927937 (actor [18:60:2101]) tablet resolver refreshed! new actor is[18:101:2127] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:217:2057] recipient: [18:17:2064] 2026-01-07T17:35:33.732799Z node 19 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:54:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:61:2057] recipient: [19:54:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:78:2057] recipient: [19:17:2064] !Reboot 72057594037927937 (actor [19:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:100:2057] recipient: [19:42:2089] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:103:2057] recipient: [19:102:2129] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:105:2057] recipient: [19:102:2129] !Reboot 72057594037927937 (actor [19:60:2101]) rebooted! !Reboot 72057594037927937 (actor [19:60:2101]) tablet resolver refreshed! new actor is[19:104:2130] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:220:2057] recipient: [19:17:2064] 2026-01-07T17:35:35.852051Z node 20 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:55:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:61:2057] recipient: [20:55:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:78:2057] recipient: [20:17:2064] !Reboot 72057594037927937 (actor [20:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:100:2057] recipient: [20:42:2089] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:103:2057] recipient: [20:102:2129] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:105:2057] recipient: [20:102:2129] !Reboot 72057594037927937 (actor [20:60:2101]) rebooted! !Reboot 72057594037927937 (actor [20:60:2101]) tablet resolver refreshed! new actor is[20:104:2130] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:220:2057] recipient: [20:17:2064] 2026-01-07T17:35:37.985697Z node 21 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:55:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:61:2057] recipient: [21:55:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:78:2057] recipient: [21:17:2064] !Reboot 72057594037927937 (actor [21:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:101:2057] recipient: [21:42:2089] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:104:2057] recipient: [21:103:2129] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:106:2057] recipient: [21:103:2129] !Reboot 72057594037927937 (actor [21:60:2101]) rebooted! !Reboot 72057594037927937 (actor [21:60:2101]) tablet resolver refreshed! new actor is[21:105:2130] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:221:2057] recipient: [21:17:2064] 2026-01-07T17:35:40.197861Z node 22 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:55:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:61:2057] recipient: [22:55:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:78:2057] recipient: [22:17:2064] !Reboot 72057594037927937 (actor [22:60:2101]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:102:2057] recipient: [22:42:2089] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:105:2057] recipient: [22:104:2130] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:107:2057] recipient: [22:104:2130] !Reboot 72057594037927937 (actor [22:60:2101]) rebooted! !Reboot 72057594037927937 (actor [22:60:2101]) tablet resolver refreshed! new actor is[22:106:2131] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:126:2057] recipient: [22:17:2064] 2026-01-07T17:35:40.632465Z node 23 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:61:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:78:2057] recipient: [23:17:2064] !Reboot 72057594037927937 (actor [23:60:2101]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:103:2057] recipient: [23:42:2089] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:106:2057] recipient: [23:105:2131] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:108:2057] recipient: [23:105:2131] !Reboot 72057594037927937 (actor [23:60:2101]) rebooted! !Reboot 72057594037927937 (actor [23:60:2101]) tablet resolver refreshed! new actor is[23:107:2132] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:127:2057] recipient: [23:17:2064] 2026-01-07T17:35:41.152637Z node 24 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:54:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:61:2057] recipient: [24:54:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:78:2057] recipient: [24:17:2064] !Reboot 72057594037927937 (actor [24:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:106:2057] recipient: [24:42:2089] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:109:2057] recipient: [24:108:2134] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:111:2057] recipient: [24:108:2134] !Reboot 72057594037927937 (actor [24:60:2101]) rebooted! !Reboot 72057594037927937 (actor [24:60:2101]) tablet resolver refreshed! new actor is[24:110:2135] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:226:2057] recipient: [24:17:2064] 2026-01-07T17:35:43.495603Z node 25 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:55:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:61:2057] recipient: [25:55:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:78:2057] recipient: [25:17:2064] !Reboot 72057594037927937 (actor [25:60:2101]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:106:2057] recipient: [25:42:2089] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:109:2057] recipient: [25:108:2134] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:111:2057] recipient: [25:108:2134] !Reboot 72057594037927937 (actor [25:60:2101]) rebooted! !Reboot 72057594037927937 (actor [25:60:2101]) tablet resolver refreshed! new actor is[25:110:2135] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:226:2057] recipient: [25:17:2064] 2026-01-07T17:35:45.709883Z node 26 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:54:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:61:2057] recipient: [26:54:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:78:2057] recipient: [26:17:2064] !Reboot 72057594037927937 (actor [26:60:2101]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:106:2057] recipient: [26:42:2089] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:109:2057] recipient: [26:108:2134] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:111:2057] recipient: [26:108:2134] !Reboot 72057594037927937 (actor [26:60:2101]) rebooted! !Reboot 72057594037927937 (actor [26:60:2101]) tablet resolver refreshed! new actor is[26:110:2135] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:226:2057] recipient: [26:17:2064] 2026-01-07T17:35:48.050949Z node 27 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:55:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:61:2057] recipient: [27:55:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:78:2057] recipient: [27:17:2064] !Reboot 72057594037927937 (actor [27:60:2101]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:111:2057] recipient: [27:42:2089] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:114:2057] recipient: [27:113:2138] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:116:2057] recipient: [27:113:2138] !Reboot 72057594037927937 (actor [27:60:2101]) rebooted! !Reboot 72057594037927937 (actor [27:60:2101]) tablet resolver refreshed! new actor is[27:115:2139] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:231:2057] recipient: [27:17:2064] 2026-01-07T17:35:50.187196Z node 28 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:55:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:61:2057] recipient: [28:55:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:78:2057] recipient: [28:17:2064] |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::DocApi[PqRunner] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalDataSourceTest::DropTableTwice >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::PersistenceWithStorageFailuresAndReboots [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:10.892616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:11.007301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:11.015220Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:11.015725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:11.015788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:11.447008Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:11.592563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:11.592780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:11.631597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:11.756590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:12.495184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:12.496002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:12.496043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:12.496081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:12.496464Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:12.569358Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:13.475327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:34:20.424692Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:20.424963Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:20.438233Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:34:20.438336Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-07T17:34:20.444333Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:20.444839Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:20.531870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:20.532018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:20.532868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:20.532984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:20.581056Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:34:20.585634Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:34:20.589346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:20.589726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:20.841343Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:20.842252Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:20.843007Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:20.844185Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:20.844347Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:20.844583Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:20.844989Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:20.845186Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:20.845334Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:34:21.080653Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:34:21.109468Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:34:21.133959Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:21.134119Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:21.135310Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:21.135407Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:21.151181Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:34:21.151858Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:21.154005Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:21.567733Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:21.742302Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:34:21.742437Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:34:21.926317Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:34:21.931361Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:34:21.931605Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:34:21.931667Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:34:21.931727Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:34:21.931786Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:34:21.931841Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:34:21.931906Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:34:21.932623Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:34:22.310330Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:22.310435Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2709:2648], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:34:22.481554Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2757:2683] 2026-01-07T17:34:22.483156Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2757:2683], schemeshard id = 72075186224037897 2026-01-07T17:34:22.570512Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2809:2707] 2026-01-07T17:34:22.570978Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:34:22.573434Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2811:2709] Owner: [3:2810:2708]. Describe result: PathErrorUnknown 2026-01-07T17:34:22.573488Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2811:2709] Owner: [3:2810:2708]. Creating table 2026-01-07T17:34:22.573584Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2811:2709] Owner: [3:2810:2708]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:34:22.577406Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2828:2714], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01 ... = 16, ReplyToActorId = [3:5907:3677], StatRequests.size() = 1 2026-01-07T17:35:31.536263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:35:31.536363Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:31.630987Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-07T17:35:31.631670Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 3 2026-01-07T17:35:31.632387Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:31.632504Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-07T17:35:31.632730Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-07T17:35:31.647032Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 17 ], ReplyToActorId[ [3:5943:3690]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:31.647348Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 17 ] 2026-01-07T17:35:31.647406Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 17, ReplyToActorId = [3:5943:3690], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-07T17:35:32.880776Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 18 ], ReplyToActorId[ [3:5976:3702]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:32.881133Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18 ] 2026-01-07T17:35:32.881184Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 18, ReplyToActorId = [3:5976:3702], StatRequests.size() = 1 2026-01-07T17:35:33.918030Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 19 ], ReplyToActorId[ [3:6007:3712]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:33.918397Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 19 ] 2026-01-07T17:35:33.918450Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 19, ReplyToActorId = [3:6007:3712], StatRequests.size() = 1 2026-01-07T17:35:35.081495Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:35.092920Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [3:6046:3725]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:35.093281Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-07T17:35:35.093328Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [3:6046:3725], StatRequests.size() = 1 2026-01-07T17:35:36.360318Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [3:6082:3735]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:36.360668Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-07T17:35:36.360738Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:6082:3735], StatRequests.size() = 1 2026-01-07T17:35:37.536587Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-07T17:35:37.537035Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 4 2026-01-07T17:35:37.537399Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:37.537466Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-07T17:35:37.537558Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-07T17:35:37.548667Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:6120:3749]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:37.549001Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-07T17:35:37.549044Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:6120:3749], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-07T17:35:38.806089Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:6157:3764]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:38.806471Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:35:38.806530Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:6157:3764], StatRequests.size() = 1 2026-01-07T17:35:39.966897Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:6188:3774]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:39.967233Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:35:39.967282Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:6188:3774], StatRequests.size() = 1 2026-01-07T17:35:41.291576Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:41.318106Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:6219:3784]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:41.318491Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:35:41.318549Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:6219:3784], StatRequests.size() = 1 2026-01-07T17:35:42.570038Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:6252:3796]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:42.570466Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:35:42.570521Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:6252:3796], StatRequests.size() = 1 2026-01-07T17:35:43.889640Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-07T17:35:43.890263Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-07T17:35:43.890652Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-07T17:35:43.890834Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:35:43.890901Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:43.913327Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6289:3808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:43.913718Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:35:43.913781Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6289:3808], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-07T17:35:45.121750Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6328:3824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:45.122045Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:35:45.122080Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6328:3824], StatRequests.size() = 1 2026-01-07T17:35:46.294013Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6361:3836]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:46.294357Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:35:46.294400Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6361:3836], StatRequests.size() = 1 2026-01-07T17:35:47.453965Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:35:47.466047Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6398:3847]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:47.466469Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:35:47.466944Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6398:3847], StatRequests.size() = 1 2026-01-07T17:35:48.619128Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6432:3859]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:48.619493Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:35:48.619543Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6432:3859], StatRequests.size() = 1 2026-01-07T17:35:49.623074Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:35:49.623167Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:35:49.623475Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:35:49.637737Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:35:49.736589Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-07T17:35:49.737102Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-07T17:35:49.737507Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:35:49.737578Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-07T17:35:49.737685Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:35:49.749193Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6466:3869]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:35:49.749535Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:35:49.749581Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6466:3869], StatRequests.size() = 1 row count: 7 (expected: 7) |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:51.099821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:51.099899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:51.099940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:51.099986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:51.100029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:51.100058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:51.100139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:51.100226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:51.101174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:51.101460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:51.239251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:51.239328Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:51.240206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:51.255960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:51.256784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:51.256995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:51.273487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:51.273802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:51.274419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:51.274688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:51.288402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:51.288605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:51.289773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:51.289851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:51.290021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:51.290064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:51.290157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:51.290328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:51.453413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.454536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.454653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.454740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.454806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.454868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.454967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.455027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.455089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.455162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.455221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.455311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.455454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.455520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.455612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... eshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:52.092692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_data_source.cpp:34: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000039 2026-01-07T17:35:52.092836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:35:52.093055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:52.093143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:52.094395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:35:52.096056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:35:52.097560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:52.097610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:52.097795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:35:52.097932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:35:52.098021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:52.098056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:35:52.098108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:35:52.098132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:35:52.098458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:35:52.098505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:35:52.098666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:35:52.098712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:35:52.098756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:35:52.098787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:35:52.098844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:35:52.098911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:35:52.098946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:35:52.098976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:35:52.099071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:35:52.099128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:35:52.099179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-07T17:35:52.099215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:35:52.100330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:52.100433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:52.100469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:35:52.100529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:35:52.100580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:52.101481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:52.101578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:52.101609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:35:52.101649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:35:52.101704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:52.101779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:35:52.107020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:35:52.107227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:35:52.107431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:35:52.107473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:35:52.107924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:35:52.108024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.108065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:738:2727] TestWaitNotification: OK eventTxId 102 2026-01-07T17:35:52.108591Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:52.108877Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 278us result status StatusSuccess 2026-01-07T17:35:52.109271Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:49.774813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:49.774901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:49.774948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:49.775001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:49.775044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:49.775088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:49.775166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:49.775227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:49.776125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:49.776422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:49.921583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:49.921675Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:49.922579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:49.959147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:49.959992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:49.960194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:49.972144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:49.972457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:49.973237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:49.973557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:49.981824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:49.982053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:49.983377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:49.983462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:49.983673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:49.983727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:49.983835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:49.984007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:50.147826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.148897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.149932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.150013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... onResult 2026-01-07T17:35:52.209269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.209319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2026-01-07T17:35:52.209381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.209405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.209464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-07T17:35:52.209583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 113, at schemeshard: 72057594046678944 2026-01-07T17:35:52.209627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.209647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.209742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 114, at schemeshard: 72057594046678944 2026-01-07T17:35:52.209812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.209840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.209960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 115, at schemeshard: 72057594046678944 2026-01-07T17:35:52.210026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2026-01-07T17:35:52.210092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.210123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.210196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 117, at schemeshard: 72057594046678944 2026-01-07T17:35:52.210299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.210322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.210402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 118, at schemeshard: 72057594046678944 2026-01-07T17:35:52.210463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.210488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.210638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.210664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.210733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2026-01-07T17:35:52.210807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.210829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.210907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-07T17:35:52.210966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.210989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.211079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-07T17:35:52.211190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-07T17:35:52.211235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.211258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.211335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.211360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.211484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.211510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.211576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-07T17:35:52.211674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.211708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.211868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.211896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.211982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.212004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.212137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.212163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.212209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.212229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [2:807:2796] 2026-01-07T17:35:52.212415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.212463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [2:807:2796] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 2026-01-07T17:35:52.216442Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:52.216707Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 316us result status StatusSuccess 2026-01-07T17:35:52.217111Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/other_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:49.711060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:49.711153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:49.711192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:49.711224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:49.711263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:49.711310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:49.711369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:49.711420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:49.712201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:49.712488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:49.840161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:49.840251Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:49.841156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:49.856592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:49.857710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:49.857895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:49.889385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:49.889811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:49.890612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:49.892888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:49.898426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:49.898638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:49.899780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:49.899870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:49.900065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:49.900125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:49.900230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:49.900395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:50.058211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.059966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.060063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.060126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.060250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:50.060330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... fCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:35:52.281446Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:52.281522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:52.281552Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:52.281583Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:35:52.281613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:35:52.282405Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:52.282479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:52.282506Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:52.282534Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:35:52.282564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:52.282628Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:35:52.284755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:35:52.285794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:35:52.286146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:35:52.286355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:35:52.286397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:35:52.286772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:35:52.286869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.286907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:747:2736] TestWaitNotification: OK eventTxId 101 2026-01-07T17:35:52.287331Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:52.287531Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 233us result status StatusSuccess 2026-01-07T17:35:52.287856Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-07T17:35:52.291056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:35:52.291214Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_external_data_source.cpp:116: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2026-01-07T17:35:52.291308Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2026-01-07T17:35:52.293572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:52.293816Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:35:52.294096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:35:52.294140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:35:52.294516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:35:52.294603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.294638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:755:2744] TestWaitNotification: OK eventTxId 103 2026-01-07T17:35:52.295055Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:52.295224Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 191us result status StatusSuccess 2026-01-07T17:35:52.295543Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { References { Path: "/MyRoot/ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:35:51.487129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:51.487237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:51.487291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:51.487331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:51.487367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:51.487399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:51.487485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:51.487559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:51.488520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:51.488850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:51.579069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:35:51.579118Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:51.595240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:51.595630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:51.595837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:51.604595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:51.604995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:51.605786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:51.606063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:51.609247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:51.609460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:51.610626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:51.610691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:51.610831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:51.610886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:51.610930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:51.611141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:51.764948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.765940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.766936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.767029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.767110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Id: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:35:52.501442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:35:52.501545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:35:52.501593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:35:52.501638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:35:52.501674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:35:52.501713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:35:52.501756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:35:52.501795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:35:52.501838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:35:52.501900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:52.501931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:35:52.501959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:35:52.501990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:35:52.502655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:52.502755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:52.502794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:52.502835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:35:52.502901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:52.503559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:52.503613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:52.503637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:52.503658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:35:52.503684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:35:52.503739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:35:52.506994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:35:52.507770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:35:52.507967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:35:52.508004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:35:52.508308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:35:52.508388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.508429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:706:2695] TestWaitNotification: OK eventTxId 101 2026-01-07T17:35:52.508802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:52.509029Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 177us result status StatusSuccess 2026-01-07T17:35:52.509354Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-07T17:35:52.512528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:35:52.512804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2026-01-07T17:35:52.512911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_data_source.cpp:202: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2026-01-07T17:35:52.513085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2026-01-07T17:35:52.515819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-07T17:35:52.516138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:35:52.516510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:35:52.516563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:35:52.517004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:35:52.517124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:35:52.517157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:714:2703] TestWaitNotification: OK eventTxId 102 >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> TExternalDataSourceTest::SchemeErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:45.302554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:45.480769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:45.508206Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:45.508738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:45.508807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:45.859971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:45.860119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:46.029914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807342298513 != 1767807342298517 2026-01-07T17:35:46.041700Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:46.094123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:46.219686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:46.601871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:46.617561Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:46.750598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:47.125145Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2026-01-07T17:35:47.125310Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-07T17:35:47.201306Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.075496s, errors=0 2026-01-07T17:35:47.201438Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-07T17:35:50.855321Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:50.861813Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:50.866075Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:50.866427Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:50.866625Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:51.113051Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:51.113193Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:51.138853Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807347784463 != 1767807347784467 2026-01-07T17:35:51.142353Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:51.187871Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:51.306139Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:51.613861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:51.634582Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:51.751416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:51.982482Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2026-01-07T17:35:51.982620Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-07T17:35:52.041338Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.058421s, errors=0 2026-01-07T17:35:52.041443Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:34:15.688525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:34:15.688631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:15.688679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:34:15.688712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:34:15.688745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:34:15.688773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:34:15.688833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:34:15.688906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:34:15.689762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:34:15.690068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:34:15.807265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:34:15.807323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:15.834725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:34:15.835102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:34:15.835270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:34:15.847499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:34:15.847912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:34:15.848665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:34:15.848976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:34:15.852823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:15.853026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:34:15.858307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:34:15.858398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:34:15.858543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:34:15.858595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:34:15.858688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:34:15.858901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:34:16.074232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.075990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.076058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.076128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.076205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:34:16.076299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 57] was 2 2026-01-07T17:35:49.576514Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710759:2 2026-01-07T17:35:49.576537Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710759:2 2026-01-07T17:35:49.576621Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 59] was 1 2026-01-07T17:35:49.577002Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [24:130:2154], Recipient [24:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-07T17:35:49.577037Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-07T17:35:49.577090Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:35:49.577122Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 59], at schemeshard: 72057594046678944 2026-01-07T17:35:49.577180Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 57] was 1 2026-01-07T17:35:49.577231Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 57], at schemeshard: 72057594046678944 2026-01-07T17:35:49.577272Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 7 2026-01-07T17:35:49.579909Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:35:49.579986Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [24:3109:4578] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710759 at schemeshard: 72057594046678944 2026-01-07T17:35:49.580320Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435106, Sender [24:3109:4578], Recipient [24:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvContinuousBackupCleanerResult 2026-01-07T17:35:49.580354Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5475: StateWork, processing event TEvPrivate::TEvContinuousBackupCleanerResult 2026-01-07T17:35:49.580463Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:35:49.580805Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:35:49.582149Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:35:49.595175Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [24:3178:4644], Recipient [24:130:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:35:49.595235Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:35:49.595256Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 2026-01-07T17:35:49.920158Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [24:130:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:35:49.920241Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:35:49.920530Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [24:130:2154], Recipient [24:130:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:35:49.920573Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:35:49.979039Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [24:3198:4664], Recipient [24:130:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:35:49.979146Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:35:49.979279Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:35:49.979638Z node 24 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 343us result status StatusSuccess 2026-01-07T17:35:49.980778Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 11 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 11 IsBackup: false CdcStreams { Name: "000000000000000A_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 46 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "09700101000000Z_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 48 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "19700101000019Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 61 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 50 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 56 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:52.433616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:52.433696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:52.433762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:52.433808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:52.433844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:52.433873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:52.433949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:52.434018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:52.434959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:52.435273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:52.563166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:52.563253Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:52.564110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:52.576072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:52.576879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:52.577074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:52.587702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:52.587987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:52.588695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:52.589016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:52.593206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:52.593399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:52.594480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:52.594545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:52.594721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:52.594767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:52.594864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:52.595025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:52.755540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.756668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.756812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.756883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.756981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.757893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... : 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:35:53.437141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:35:53.437503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:53.437628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:53.437679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:35:53.437979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:35:53.438042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:35:53.438248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:53.438338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:53.440614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:53.440660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:53.440861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:53.440910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:35:53.441213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:35:53.441265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:35:53.441362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:35:53.441396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:35:53.441434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:35:53.441462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:35:53.441497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:35:53.441533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:35:53.441570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:35:53.441615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:35:53.441689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:53.441726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:35:53.441771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:35:53.442454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:35:53.442652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:35:53.442697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:35:53.442739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:35:53.442785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:53.442885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:35:53.446858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:35:53.447299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:35:53.448437Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:677:2666] Bootstrap 2026-01-07T17:35:53.449605Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:677:2666] Become StateWork (SchemeCache [1:682:2671]) 2026-01-07T17:35:53.452638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:35:53.452986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2026-01-07T17:35:53.453082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-07T17:35:53.453157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-07T17:35:53.454249Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:677:2666] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:35:53.457876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:53.458147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2026-01-07T17:35:53.458677Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:35:53.458894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:35:53.458939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:35:53.459322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:35:53.459422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:35:53.459459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:692:2681] TestWaitNotification: OK eventTxId 101 2026-01-07T17:35:53.459917Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:53.460112Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 222us result status StatusPathDoesNotExist 2026-01-07T17:35:53.460333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:50.392425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:50.506283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:50.524709Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:50.525218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:50.525285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:50.823067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:50.823215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:50.894425Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807347406093 != 1767807347406097 2026-01-07T17:35:50.909883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:50.956206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:51.045300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:51.339023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:51.353555Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:51.463366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:51.764299Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2026-01-07T17:35:51.764430Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2026-01-07T17:35:51.768250Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-07T17:35:51.768324Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-07T17:35:51.768373Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-07T17:35:51.768392Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-07T17:35:51.768451Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-07T17:35:51.768502Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-07T17:35:51.774192Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=OGYzMGE1NzQtYmZlNzA2YzYtYmVkNTZhYjMtNzZiYzYyNjg= 2026-01-07T17:35:51.774268Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=N2E4YjNkOTMtMjBjMjE0M2MtY2U2MTk0OTctZjVlNjRmMDA= 2026-01-07T17:35:51.777874Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=YjM0MmFiNi1lYTNkZWRlYy1jNmQ3YWUzMi03NDI3YWU4Yg== 2026-01-07T17:35:51.777949Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=M2YxNTY4YWYtMjg4ZjdhNjEtYzQ5OGYzNS0xMjAyMjYwNg== 2026-01-07T17:35:51.779700Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=MzY5YmM4MGQtZmM0YTE5MmUtZWY0Mzg2ODYtZWRlZTJkNmM= 2026-01-07T17:35:51.791776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2825], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.792006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.792097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.792162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.792222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.792301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.792411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.794223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.794556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:51.800354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:51.868308Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1011:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:35:51.870136Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1013:2865] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:35:51.870765Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1017:2869] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:35:51.871415Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:35:51.992004Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:51.992119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:51.992202Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:51.992258Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:51.992310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1009:2861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:52.027783Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:52.523136Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1767807352.523088s, errors=0 2026-01-07T17:35:52.523479Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1767807352523 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:35:52.537130Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1167:2965] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:52.601861Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1767807352.601820s, errors=0 2026-01-07T17:35:52.602170Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1767807352601 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:35:52.616071Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1214:2987] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:52.681557Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1767807352.681511s, errors=0 2026-01-07T17:35:52.681718Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1767807352681 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:35:52.696020Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1261:3009] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:52.762607Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1767807352.762570s, errors=0 2026-01-07T17:35:52.762892Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1767807352762 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:35:52.776910Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1308:3031] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:52.847451Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1767807352.847405s, errors=0 2026-01-07T17:35:52.847753Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1767807352847 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:35:52.847822Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 1.079738s, oks# 20, errors# 0 2026-01-07T17:35:52.847961Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:51.234864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:51.234976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:51.235036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:51.235082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:51.235122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:51.235183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:51.235284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:51.235358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:51.236291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:51.236606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:51.359579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:51.359657Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:51.360351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:51.370055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:51.370736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:51.370880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:51.383717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:51.384108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:51.384899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:51.385310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:51.393434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:51.393658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:51.394863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:51.394944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:51.395144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:51.395206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:51.395312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:51.395486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:51.535198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.540180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.540408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.540484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.540551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.540620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.540721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.540811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.540887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.541174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.541270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.541380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.541454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.541609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:51.541697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... MESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:53.414909Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:35:53.414954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:53.415826Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:53.415913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:35:53.415947Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:35:53.415975Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:35:53.416005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:53.416074Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:35:53.422912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:35:53.423967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:35:53.424177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:35:53.424223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:35:53.424591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:35:53.424692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:35:53.424734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:715:2704] TestWaitNotification: OK eventTxId 101 2026-01-07T17:35:53.425343Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:53.425581Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 406us result status StatusSuccess 2026-01-07T17:35:53.425957Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-07T17:35:53.429207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:35:53.429513Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2026-01-07T17:35:53.429595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2026-01-07T17:35:53.429737Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:35:53.432326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-07T17:35:53.432578Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:35:53.432891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:35:53.432975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:35:53.433352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:35:53.433452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:35:53.433492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:723:2712] TestWaitNotification: OK eventTxId 102 2026-01-07T17:35:53.433927Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:53.434138Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 234us result status StatusSuccess 2026-01-07T17:35:53.434506Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:47.521697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:47.646045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:47.668634Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:47.669362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:47.669434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:47.974942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:47.975089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:48.058400Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807344027288 != 1767807344027292 2026-01-07T17:35:48.074723Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:48.124733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:48.229161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:48.526603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:48.540236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:48.645981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:48.930445Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-07T17:35:48.930609Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-07T17:35:48.990064Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.058986s, errors=0 2026-01-07T17:35:48.990218Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-07T17:35:52.235032Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:52.240775Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:52.244706Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:52.245010Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:52.245155Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:52.472173Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:52.472263Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:52.491660Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807349426412 != 1767807349426416 2026-01-07T17:35:52.495581Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:52.541059Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:52.622514Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:52.929062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:52.943783Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:53.058022Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:53.305415Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-07T17:35:53.305610Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2026-01-07T17:35:53.366654Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.060656s, errors=0 2026-01-07T17:35:53.366747Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> TExternalDataSourceTest::SchemeErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:52.428381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:52.428459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:52.428490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:52.428521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:52.428554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:52.428589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:52.428638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:52.428684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:52.429551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:52.429884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:52.543999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:52.544093Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:52.544885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:52.557506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:52.558332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:52.558545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:52.569627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:52.569939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:52.570623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:52.570949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:52.575539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:52.575756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:52.576785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:52.576850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:52.577024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:52.577065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:52.577165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:52.577328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:52.718969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.719739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.719822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.719891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.719958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:52.720498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:54.557974Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:54.558133Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 179us result status StatusSuccess 2026-01-07T17:35:54.558383Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:54.558938Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:54.559054Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 149us result status StatusSuccess 2026-01-07T17:35:54.559297Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:54.559651Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:54.559776Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 148us result status StatusSuccess 2026-01-07T17:35:54.559950Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:54.560428Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:54.560586Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 188us result status StatusSuccess 2026-01-07T17:35:54.560859Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestNotEnoughResources >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> KqpStreamLookup::ReadTableDuringSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:47.178962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:47.294234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:47.315082Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:47.315605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:47.315668Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:47.625382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:47.625520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:47.710076Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807343980217 != 1767807343980221 2026-01-07T17:35:47.723326Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:47.773341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:47.911739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:48.234331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:48.248240Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:48.358778Z node 1 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# BrandNewTable in dir# /Root 2026-01-07T17:35:48.643984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:864:2747], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:48.644169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:48.644629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:882:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:48.644697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:48.659985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:48.929037Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2026-01-07T17:35:48.930473Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:861:2744], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2026-01-07T17:35:48.942499Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:861:2744], subTag: 1} TUpsertActor finished in 0.011689s, errors=0 2026-01-07T17:35:48.942896Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-07T17:35:48.943086Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:861:2744], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2026-01-07T17:35:49.001703Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:861:2744], subTag: 3} TUpsertActor finished in 0.058330s, errors=0 2026-01-07T17:35:49.001796Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:971:2822] with tag# 3 ... waiting for SysViewsRoster update finished 2026-01-07T17:35:52.053519Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:52.059281Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:52.062616Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:52.062930Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:52.063089Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:52.290956Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:52.291091Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:52.315148Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807349399848 != 1767807349399852 2026-01-07T17:35:52.319326Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:52.364453Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:52.448384Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:52.723658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:52.737875Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:52.841852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:53.107866Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2026-01-07T17:35:53.108026Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 2026-01-07T17:35:53.538722Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.430258s, errors=0 2026-01-07T17:35:53.538843Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 2026-01-07T17:35:53.544812Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:174: TLoad# 0 drops table# table in dir# /Root 2026-01-07T17:35:53.564455Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:994:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:53.564652Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:53.565128Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1005:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:53.565199Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:53.755378Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# table in dir# /Root 2026-01-07T17:35:53.757378Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1045:2895], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:53.775015Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:53.775530Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1049:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:53.775615Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:53.787388Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:53.836270Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:35:54.031559Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2026-01-07T17:35:54.031862Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:991:2849], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2026-01-07T17:35:54.043910Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:991:2849], subTag: 1} TUpsertActor finished in 0.011751s, errors=0 2026-01-07T17:35:54.044291Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-07T17:35:54.044589Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:991:2849], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2026-01-07T17:35:54.103047Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:991:2849], subTag: 3} TUpsertActor finished in 0.057790s, errors=0 2026-01-07T17:35:54.103149Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:1139:2970] with tag# 3 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:54.099063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:54.099183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:54.099236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:54.099279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:54.099320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:54.099363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:54.099448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:54.099522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:54.100556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:54.100867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:54.203109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:54.203194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:54.204077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:54.216390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:54.217245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:54.217435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:54.236233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:54.236563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:54.237231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:54.237543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:54.242004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:54.242217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:54.243315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:54.243388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:54.243569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:54.243617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:54.243713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:54.243847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:54.405444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.406363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.406525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.406609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.406680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.406794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.406887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.406937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.406983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.407038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.407090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.407151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.407229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.407360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:54.407416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... 56Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2026-01-07T17:35:55.180432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-07T17:35:55.180592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2026-01-07T17:35:55.182931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:55.183251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-07T17:35:55.186873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:35:55.187260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2026-01-07T17:35:55.187370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-07T17:35:55.187735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2026-01-07T17:35:55.190435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:55.190705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2026-01-07T17:35:55.193844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:35:55.194200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2026-01-07T17:35:55.194318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-07T17:35:55.194475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2026-01-07T17:35:55.201816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:55.202136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2026-01-07T17:35:55.205453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:35:55.205754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2026-01-07T17:35:55.205850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2026-01-07T17:35:55.205969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-07T17:35:55.208580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:55.208847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:48.290234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:48.394370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:48.412781Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:48.413258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:48.413308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:48.692756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:48.692915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:48.772541Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807345288386 != 1767807345288390 2026-01-07T17:35:48.784918Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:48.832591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:48.928785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:49.228897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:49.248137Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:49.354763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:49.648649Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2026-01-07T17:35:49.648774Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-07T17:35:49.744421Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.095263s, errors=0 2026-01-07T17:35:49.744528Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-07T17:35:53.519127Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:53.525690Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:53.529852Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:53.530193Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:53.530364Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:53.793824Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:53.794036Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:53.822470Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807350371326 != 1767807350371330 2026-01-07T17:35:53.826375Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:53.873458Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:53.956666Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:54.262190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:54.277454Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:54.387003Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:54.626345Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2026-01-07T17:35:54.626474Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2026-01-07T17:35:54.729690Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.102841s, errors=0 2026-01-07T17:35:54.729817Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:45.629777Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:45.801281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:45.833862Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:45.834437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:45.834512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:46.156041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:46.156182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:46.252746Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807342310662 != 1767807342310666 2026-01-07T17:35:46.269604Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:46.318109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:46.420865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:46.847149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:46.861527Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:46.998575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:47.371935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:953:2811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:47.372085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:963:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:47.372183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:47.373472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:969:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:47.373587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:47.378926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:47.519087Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:47.602133Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1025:2858] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp/unittest >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:55.401135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:55.401203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:55.401235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:55.401265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:55.401307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:55.401332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:55.401404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:55.401457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:55.402323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:55.402598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:55.530654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:55.530748Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:55.531619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:55.544308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:55.545158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:55.545324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:55.555743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:55.556075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:55.556773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:55.557113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:55.562735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:55.562945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:55.564079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:55.564162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:55.564347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:55.564409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:55.564514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:55.564686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:55.706187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.707964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.708071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.708180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.708313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:55.708391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... rRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-07T17:35:56.570815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:56.570945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:56.570998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-07T17:35:56.571125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:56.571212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:35:56.571420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:56.571488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:35:56.572168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:35:56.573686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:35:56.575119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:56.575187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:56.575421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:35:56.575590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:56.575627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-07T17:35:56.575667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 38 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:35:56.575985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:35:56.576033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:35:56.576140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:35:56.576191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:35:56.576240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:35:56.576284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:35:56.576323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:35:56.576376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:35:56.576417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:35:56.576450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:35:56.576526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:56.576575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-07T17:35:56.576615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-07T17:35:56.576650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:35:56.577048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:35:56.577132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:35:56.577180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:35:56.577221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:35:56.577267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:35:56.577542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:35:56.577612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:35:56.577687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:56.577995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:35:56.578069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:35:56.578170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:35:56.578210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:35:56.578242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:56.578321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:35:56.583043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:35:56.583188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:35:56.583313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:35:56.583589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:35:56.583639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:35:56.584120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:35:56.584248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:35:56.584289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:795:2784] TestWaitNotification: OK eventTxId 104 2026-01-07T17:35:56.584970Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:56.585231Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 257us result status StatusPathDoesNotExist 2026-01-07T17:35:56.585403Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:44.357458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:44.479030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:44.513218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:44.513800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:44.513891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:44.864795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:44.865345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:44.952000Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807340924464 != 1767807340924468 2026-01-07T17:35:44.970348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:45.022088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:45.125903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:45.458021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:45.481584Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:45.618668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:46.120175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1000:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:46.120307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1010:2853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:46.120396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:46.120991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:46.121169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:46.129650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:46.283958Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1015:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:35:46.356831Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1073:2896] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] |86.3%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestGetStatusWorks [GOOD] >> THiveTest::TestNotEnoughResources [GOOD] >> THiveTest::TestRestartTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] Test command err: Trying to start YDB, gRPC: 2227, MsgBus: 12163 2026-01-07T17:33:39.826790Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674193861767833:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:39.826849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:40.056156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:40.061149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:40.061241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:40.078195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:40.168512Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674193861767800:2081] 1767807219825453 != 1767807219825456 2026-01-07T17:33:40.179645Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:40.229134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:40.229178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:40.229188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:40.229300Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:40.312846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:40.640587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:40.653610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:33:40.727829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:40.832863Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:40.860793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:40.998908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:41.049776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:42.779028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674206746671574:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:42.779167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:42.779633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674206746671584:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:42.779708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.111791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.146389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.191958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.222040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.250495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.300106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.365959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.460429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.543416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674211041639750:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.543503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.544021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674211041639755:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.544059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674211041639756:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.544185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.548615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:43.560030Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674211041639759:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:33:43.630352Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674211041639810:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:44.826973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674193861767833:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:44.827113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280;columns=3; Trying to start YDB, gRPC: 64256, MsgBus: 11963 2026-01-07T17: ... pp:689) 2026-01-07T17:35:42.215633Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:42.306085Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:42.416601Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:42.624483Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674720786272081:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:42.624632Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:42.625104Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674720786272086:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:42.625199Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674720786272087:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:42.625305Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:42.632940Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:42.655310Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592674720786272090:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:35:42.760195Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592674720786272141:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=248;columns=2; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; Trying to start YDB, gRPC: 18463, MsgBus: 8809 2026-01-07T17:35:48.664638Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7592674747910271290:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:48.665698Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:48.708252Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:48.809405Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:48.809550Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:48.812066Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:48.832192Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:48.871088Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:35:48.871117Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:35:48.881797Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:48.899687Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:48.899722Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:48.899734Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:48.899828Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:49.672099Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:49.886075Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:49.921585Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:53.664099Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7592674747910271290:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:53.664194Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:54.735261Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674773680076376:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:54.735363Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674773680076356:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:54.735553Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:54.739827Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674773680076394:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:54.740012Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:54.742222Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:54.766810Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7592674773680076393:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:35:54.856616Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7592674773680076447:2852] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3440;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2832;columns=4; |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:35:55.761222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:35:55.761310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:55.761354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:35:55.761407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:35:55.761468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:35:55.761524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:35:55.761597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:35:55.761681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:35:55.762660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:55.762943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:35:55.892758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:35:55.892830Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:55.893712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:35:55.906757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:35:55.907593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:35:55.907777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:35:55.919033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:35:55.919405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:35:55.920245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:55.920608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:35:55.925155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:55.925317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:35:55.926207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:55.926256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:55.926379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:35:55.926418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:55.926507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:35:55.926681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:35:56.066152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.067324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.067457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.067532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.067599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.067703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.067805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.067878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.067945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.068037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.068116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.068218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.068298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.068445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:35:56.068532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... nByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-07T17:35:57.964302Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:35:57.964430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 8589936753 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:35:57.964496Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000039 2026-01-07T17:35:57.964618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:57.964705Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:35:57.964875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:57.964965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:35:57.966649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:35:57.967446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:35:57.968811Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:35:57.968855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:35:57.969031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:35:57.969179Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:35:57.969218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:35:57.969257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:35:57.969487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:35:57.969532Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:35:57.969640Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:35:57.969677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:35:57.969721Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:35:57.969786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:35:57.969831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:35:57.969875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:35:57.969914Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:35:57.969951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:35:57.970040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:35:57.970088Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:35:57.970137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-07T17:35:57.970178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:35:57.970767Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:57.970876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:57.970914Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:35:57.970964Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:35:57.971011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:35:57.971379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:35:57.971426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:35:57.971500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:35:57.971718Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:57.971807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:35:57.971846Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:35:57.971875Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:35:57.971904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:35:57.971973Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:35:57.976012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:35:57.976147Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:35:57.976217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:35:57.976420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:35:57.976463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:35:57.976849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:35:57.976993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:35:57.977049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:745:2734] TestWaitNotification: OK eventTxId 102 2026-01-07T17:35:57.977529Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:35:57.977732Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 231us result status StatusPathDoesNotExist 2026-01-07T17:35:57.977905Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 1:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:93:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:92:2120] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:208:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:88:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:91:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:93:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:92:2120] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:208:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:89:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:92:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:94:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:93:2120] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:209:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:52:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:52:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:92:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:95:2057] recipient: [31:94:2122] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:97:2057] recipient: [31:94:2122] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:96:2123] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:212:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:92:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:95:2057] recipient: [32:94:2122] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:97:2057] recipient: [32:94:2122] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:96:2123] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:212:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:78:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:81:2057] recipient: [35:80:2112] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:83:2057] recipient: [35:80:2112] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:82:2113] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:198:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:79:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:82:2057] recipient: [37:81:2112] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:84:2057] recipient: [37:81:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:83:2113] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:199:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:81:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:84:2057] recipient: [38:83:2114] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:86:2057] recipient: [38:83:2114] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:85:2115] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:201:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:84:2057] recipient: [39:83:2114] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:86:2057] recipient: [39:83:2114] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:85:2115] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:201:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2114] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:87:2057] recipient: [40:84:2114] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2115] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... r refreshed! new actor is[31:83:2113] Leader for TabletID 72057594037927937 is [31:83:2113] sender: [31:199:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:85:2057] recipient: [32:84:2115] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:87:2057] recipient: [32:84:2115] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:86:2116] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:202:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:83:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:86:2057] recipient: [34:85:2115] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:88:2057] recipient: [34:85:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:87:2116] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:105:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:85:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:88:2057] recipient: [36:87:2117] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:90:2057] recipient: [36:87:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:89:2118] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:205:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2117] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:91:2057] recipient: [37:88:2117] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2118] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:108:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:88:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:91:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:93:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:92:2120] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:208:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:88:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:91:2057] recipient: [39:90:2119] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:93:2057] recipient: [39:90:2119] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:92:2120] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:208:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:89:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:92:2057] recipient: [40:91:2119] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:94:2057] recipient: [40:91:2119] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:93:2120] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:209:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:92:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:95:2057] recipient: [41:94:2122] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:97:2057] recipient: [41:94:2122] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:96:2123] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:212:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:92:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:95:2057] recipient: [42:94:2122] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:97:2057] recipient: [42:94:2122] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:96:2123] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:212:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:52:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:52:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> TColumnShardTestReadWrite::WriteReadZSTD >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TColumnShardTestReadWrite::WriteRead >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot >> TColumnShardTestReadWrite::CompactionGCFailingBs >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> EvWrite::WriteWithSplit >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> TColumnShardTestReadWrite::ReadWithProgramNoProjection >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink >> Normalizers::PortionsNormalizer >> TColumnShardTestReadWrite::ReadSomePrograms >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] Test command err: Trying to start YDB, gRPC: 29477, MsgBus: 17310 2026-01-07T17:33:40.240090Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674197718319526:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:40.240213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:33:40.460526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:33:40.470617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:33:40.470746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:33:40.545878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:33:40.606850Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:40.607574Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674197718319495:2081] 1767807220238343 != 1767807220238346 2026-01-07T17:33:40.660221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:33:40.660242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:33:40.660249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:33:40.660340Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:33:40.665728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:33:41.073833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:33:41.132980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:41.244533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:41.249265Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:33:41.382077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:41.445655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.221746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674210603223256:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.221868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.223139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674210603223266:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.223229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.573090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.609054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.642154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.674967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.713463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.754320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.816641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.873407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:33:43.970269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674210603224140:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.970381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.970825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674210603224146:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.970881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674210603224145:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.970926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:33:43.974874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:33:43.986313Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674210603224149:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:33:44.044409Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674214898191496:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:33:45.241399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674197718319526:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:33:45.241505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;prior ... checking } 2026-01-07T17:35:06.623695Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7592674568816856200:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:06.654974Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592674547342018807:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:06.655076Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=336;columns=1; Trying to start YDB, gRPC: 31553, MsgBus: 15147 2026-01-07T17:35:07.775709Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7592674572398392678:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:07.775858Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:07.797280Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:07.886521Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:07.907110Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:07.907239Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:07.934984Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:07.989672Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:07.995784Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:07.995822Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:07.995834Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:07.995969Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:08.604376Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:08.783357Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:12.736251Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674593873230002:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:12.736327Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592674593873230026:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:12.736411Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:12.741037Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:12.752822Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592674593873230029:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:35:12.775926Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592674572398392678:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:12.776036Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:12.832384Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592674593873230094:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=344;columns=1; Trying to start YDB, gRPC: 1223, MsgBus: 11167 2026-01-07T17:35:13.890091Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7592674598618683770:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:13.890163Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:13.990988Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:14.006420Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:14.006556Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:14.007428Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:14.048021Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:14.124332Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:14.124366Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:14.124383Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:14.124502Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:14.247314Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:14.856676Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:14.897176Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:18.680272Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674620093521101:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:18.680319Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674620093521109:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:18.680399Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:18.680739Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592674620093521115:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:18.680875Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:18.685335Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:18.700871Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7592674620093521116:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:35:18.758808Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7592674620093521168:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=120;columns=1; 2026-01-07T17:35:18.890251Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7592674598618683770:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:18.890408Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> TColumnShardTestReadWrite::WriteReadExoticTypes >> TColumnShardTestReadWrite::WriteRead [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> Normalizers::SubColumnsPortionsCleanerNormalizer+useSubcolumns >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 7138, MsgBus: 21851 2026-01-07T17:34:15.117929Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674348455285241:2238];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:15.121781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:15.368099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:15.396783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:15.396887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:15.519039Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:15.520729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:15.552046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:15.778774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:15.778802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:15.778814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:15.778922Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:16.129256Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:16.826999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:16.841974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:34:16.907889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.061720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.243806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.315339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:19.497979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674365635156096:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.498085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.498413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674365635156106:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.498454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.848584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:19.889811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:19.972908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.022695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.096227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.118854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674348455285241:2238];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:20.118910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:20.181877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.256536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.346856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.471737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674369930124283:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.471826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.472235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674369930124288:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.472272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674369930124289:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.472532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.476471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:20.491838Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674369930124292:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:34:20.559388Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674369930124344:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:23.581456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:08.259830Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:08.344805Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592674574293135832:2081] 1767807308242096 != 1767807308242099 2026-01-07T17:35:08.365567Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:08.380865Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:08.381010Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:08.386055Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:08.427606Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:08.427629Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:08.427637Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:08.427728Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:08.454205Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:08.869652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:08.877505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:08.933927Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:09.117093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:09.193955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:09.316734Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:11.167671Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674587178039591:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.167735Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.167939Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674587178039600:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.167972Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.263589Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:11.290914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:11.322164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:11.349212Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:11.377638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:11.407416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:11.437541Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:11.479107Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:11.608576Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674587178040472:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.608656Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.608752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674587178040477:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.608806Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674587178040479:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.608838Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:11.611858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:11.620637Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592674587178040481:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:35:11.677697Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592674587178040532:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:13.161663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:13.261957Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592674574293135859:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:13.263752Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:35:23.314244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:35:23.314270Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] Test command err: 2026-01-07T17:36:02.788532Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:02.818468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:02.818680Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:02.825540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:02.825790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:02.826017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:02.826163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:02.826272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:02.826374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:02.826473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:02.826594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:02.826688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:02.826811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.826925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:02.827022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:02.827125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:02.855176Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:02.855718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:02.855765Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:02.855896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.856042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:02.856097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:02.856127Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:02.856234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:02.856331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:02.856373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:02.856401Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:02.856585Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.856648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:02.856707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:02.856740Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:02.856837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:02.856909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:02.856992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:02.857039Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:02.857095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:02.857132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:02.857159Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:02.857195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:02.857229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:02.857276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:02.857481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:02.857582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:02.857606Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:02.857705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:02.857741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.857778Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.857815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:02.857853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:02.857885Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:02.857930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:02.857962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:02.857989Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:02.858076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:02.858136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... imestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=100; 2026-01-07T17:36:03.660831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=100;batch_columns=level,timestamp; 2026-01-07T17:36:03.661140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:329:2341];bytes=1200;rows=100;faults=0;finished=0;fault=0;schema=level: int32 timestamp: timestamp[us]; 2026-01-07T17:36:03.661315Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:03.661470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:03.661615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:03.661775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:03.661937Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:03.662122Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:03.662386Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:337:2349] finished for tablet 9437184 2026-01-07T17:36:03.662889Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:329:2341];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1238165,"name":"_full_task","f":1238165,"d_finished":0,"c":0,"l":1254520,"d":16355},"events":[{"name":"bootstrap","f":1238439,"d_finished":2239,"c":1,"l":1240678,"d":2239},{"a":1253808,"name":"ack","f":1252381,"d_finished":1344,"c":1,"l":1253725,"d":2056},{"a":1253797,"name":"processing","f":1240956,"d_finished":4929,"c":3,"l":1253727,"d":5652},{"name":"ProduceResults","f":1239909,"d_finished":2444,"c":6,"l":1254228,"d":2444},{"a":1254234,"name":"Finish","f":1254234,"d_finished":0,"c":0,"l":1254520,"d":286},{"name":"task_result","f":1240973,"d_finished":3534,"c":2,"l":1252283,"d":3534}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:03.662999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:329:2341];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:03.663419Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:329:2341];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1238165,"name":"_full_task","f":1238165,"d_finished":0,"c":0,"l":1255113,"d":16948},"events":[{"name":"bootstrap","f":1238439,"d_finished":2239,"c":1,"l":1240678,"d":2239},{"a":1253808,"name":"ack","f":1252381,"d_finished":1344,"c":1,"l":1253725,"d":2649},{"a":1253797,"name":"processing","f":1240956,"d_finished":4929,"c":3,"l":1253727,"d":6245},{"name":"ProduceResults","f":1239909,"d_finished":2444,"c":6,"l":1254228,"d":2444},{"a":1254234,"name":"Finish","f":1254234,"d_finished":0,"c":0,"l":1255113,"d":879},{"name":"task_result","f":1240973,"d_finished":3534,"c":2,"l":1252283,"d":3534}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:03.663509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:03.593213Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-07T17:36:03.663553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:03.663718Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; 2026-01-07T17:36:03.664323Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-07T17:36:03.664664Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1767807363768:100} readable: {1767807363768:max} at tablet 9437184 2026-01-07T17:36:03.664776Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2026-01-07T17:36:03.665061Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1767807363768:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2026-01-07T17:36:03.665164Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1767807363768:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2026-01-07T17:36:03.665290Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1767807363768:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: program has no projections; |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2026-01-07T17:36:03.672837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:03.704169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:03.704401Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:03.711656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:03.711877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:03.712071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:03.712210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:03.712338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:03.712453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:03.712549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:03.712683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:03.712793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:03.712912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:03.713087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:03.713210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:03.713346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:03.741158Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:03.742107Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:03.742166Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:03.742353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:03.742525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:03.742594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:03.742640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:03.742757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:03.742835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:03.742884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:03.742917Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:03.743079Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:03.743135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:03.743175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:03.743203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:03.743304Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:03.743371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:03.743417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:03.743463Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:03.743529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:03.743566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:03.743600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:03.743643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:03.743706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:03.743744Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:03.743956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:03.744060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:03.744093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:03.744223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:03.744266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:03.744294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:03.744342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:03.744381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:03.744421Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:03.744478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:03.744534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:03.744565Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:03.744734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:03.744792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 2944;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:36:04.328176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767807364645;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136769821132960;op_tx=10:TX_KIND_SCHEMA;min=1767807364645;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767807364645;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=136975982282944;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-07T17:36:04.328226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767807364645;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136769821132960;op_tx=10:TX_KIND_SCHEMA;min=1767807364645;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767807364645;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=136975982282944;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-07T17:36:04.328475Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:36:04.328605Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767807364645 at tablet 9437184, mediator 0 2026-01-07T17:36:04.328651Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-07T17:36:04.328943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:36:04.329021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:36:04.329055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:36:04.329134Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:36:04.336499Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767807364645;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-07T17:36:04.336621Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:04.336752Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-07T17:36:04.336833Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-07T17:36:04.337226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:36:04.354006Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-07T17:36:04.378093Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2026-01-07T17:36:04.380953Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=1;last=1; 2026-01-07T17:36:04.381021Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=3200;operation_id=575b78ca-ebef11f0-a7492a1e-89722630;in_flight=1;size_in_flight=3200; 2026-01-07T17:36:04.392396Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=1;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=3768;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-07T17:36:04.394116Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=3200;event=data_write_finished;writing_id=575b78ca-ebef11f0-a7492a1e-89722630; 2026-01-07T17:36:04.394376Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=60;data_size=20;sum=60;count=1; 2026-01-07T17:36:04.394449Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=156;data_size=132;sum=156;count=2;size_of_meta=112; 2026-01-07T17:36:04.394544Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=236;data_size=212;sum=236;count=1;size_of_portion=192; 2026-01-07T17:36:04.395061Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-07T17:36:04.395190Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-07T17:36:04.407165Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-07T17:36:04.407388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:04.420469Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767807364651 at tablet 9437184, mediator 0 2026-01-07T17:36:04.420531Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2026-01-07T17:36:04.420758Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:88;progress_tx_id=100;lock_id=1;broken=0; 2026-01-07T17:36:04.432710Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2026-01-07T17:36:04.432825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=100;lock_id=1;broken=0; 2026-01-07T17:36:04.433042Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-07T17:36:04.433094Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-07T17:36:04.433452Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:04.433519Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:04.433604Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:04.433664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:04.433721Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:36:04.447335Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:04.447440Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:04.447514Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:04.447631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:04.448137Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1767807364651:100} readable: {1767807364651:max} at tablet 9437184 2026-01-07T17:36:04.460197Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2026-01-07T17:36:04.462208Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=100;scan_id=0;gen=0;table=;snapshot={1767807364651:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> Normalizers::PortionsNormalizer [GOOD] >> Normalizers::RemoveDeleteFlagNormalizer >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2026-01-07T17:36:01.860155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:01.886522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:01.886709Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:01.892093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:01.892281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:01.892429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:01.892507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:01.892565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:01.892628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:01.892714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:01.892807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:01.892904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:01.893002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.893093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:01.893189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:01.893282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:01.912963Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:01.913247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:01.913314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:01.913477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.913611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:01.913670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:01.913699Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:01.913784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:01.913828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:01.913856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:01.913875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:01.913997Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.914038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:01.914065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:01.914081Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:01.914148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:01.914183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:01.914207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:01.914223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:01.914262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:01.914294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:01.914332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:01.914370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:01.914397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:01.914414Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:01.914545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:01.914579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:01.914598Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:01.914671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:01.914707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.914729Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.914774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:01.914812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:01.914831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:01.914866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:01.914903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:01.914933Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:01.915025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:01.915051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... s=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-07T17:36:04.884077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:36:04.884261Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:36:04.884384Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:04.884476Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:04.884580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:04.884728Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:04.884840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:04.884978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:04.885349Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:475:2486] finished for tablet 9437184 2026-01-07T17:36:04.885770Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:474:2485];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":3451899,"name":"_full_task","f":3451899,"d_finished":0,"c":0,"l":3462414,"d":10515},"events":[{"name":"bootstrap","f":3452091,"d_finished":1155,"c":1,"l":3453246,"d":1155},{"a":3461700,"name":"ack","f":3460699,"d_finished":904,"c":1,"l":3461603,"d":1618},{"a":3461691,"name":"processing","f":3453349,"d_finished":2784,"c":3,"l":3461605,"d":3507},{"name":"ProduceResults","f":3452910,"d_finished":1675,"c":6,"l":3462012,"d":1675},{"a":3462017,"name":"Finish","f":3462017,"d_finished":0,"c":0,"l":3462414,"d":397},{"name":"task_result","f":3453359,"d_finished":1842,"c":2,"l":3460577,"d":1842}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:04.885839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:04.886342Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:474:2485];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":3451899,"name":"_full_task","f":3451899,"d_finished":0,"c":0,"l":3462876,"d":10977},"events":[{"name":"bootstrap","f":3452091,"d_finished":1155,"c":1,"l":3453246,"d":1155},{"a":3461700,"name":"ack","f":3460699,"d_finished":904,"c":1,"l":3461603,"d":2080},{"a":3461691,"name":"processing","f":3453349,"d_finished":2784,"c":3,"l":3461605,"d":3969},{"name":"ProduceResults","f":3452910,"d_finished":1675,"c":6,"l":3462012,"d":1675},{"a":3462017,"name":"Finish","f":3462017,"d_finished":0,"c":0,"l":3462876,"d":859},{"name":"task_result","f":3453359,"d_finished":1842,"c":2,"l":3460577,"d":1842}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:04.886518Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:04.871929Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-07T17:36:04.886552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:04.886792Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2026-01-07T17:35:55.776795Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674777829849887:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:55.777281Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:55.823312Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:35:55.824262Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592674776148172153:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:55.824475Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:55.831140Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:35:55.975205Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:56.018107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:56.057361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:56.057465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:56.079260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:56.079353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:56.099289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:56.144164Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:35:56.147826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:56.149861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:56.179387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:56.331399Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:56.729714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001772/r3tmp/yandexj8NosX.tmp 2026-01-07T17:35:56.729744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001772/r3tmp/yandexj8NosX.tmp 2026-01-07T17:35:56.729936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001772/r3tmp/yandexj8NosX.tmp 2026-01-07T17:35:56.730047Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:56.793082Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:56.812636Z INFO: TTestServer started on Port 14050 GrpcPort 1946 2026-01-07T17:35:56.828175Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:1946 === TenantModeEnabled() = 1 === Init PQ - start server on port 1946 2026-01-07T17:35:57.129399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:35:57.129581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:35:57.129788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:35:57.129806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:35:57.129976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:35:57.130020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:57.132327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:35:57.132455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:35:57.132622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:35:57.132672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:35:57.132688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-07T17:35:57.132702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-07T17:35:57.133577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:35:57.133600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-07T17:35:57.133618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:35:57.135405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:35:57.135439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:35:57.135459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-07T17:35:57.137156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:35:57.137189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:35:57.137210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T17:35:57.137230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-07T17:35:57.141077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:35:57.142780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-07T17:35:57.142893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:35:57.145162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767807357190, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:35:57.145274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807357190 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:35:57.145292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T17:35:57.145509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-07T17:35:57.145536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T17:35:57.145663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:35:57.145710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-07T17:35:57.147342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:35:57.147365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublis ... 046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 47 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976720664 2026-01-07T17:36:04.930357Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976720664 2026-01-07T17:36:04.930373Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 47], version: 5 2026-01-07T17:36:04.930389Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 47] was 2 2026-01-07T17:36:04.930625Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 48 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720664 2026-01-07T17:36:04.930689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 48 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720664 2026-01-07T17:36:04.930706Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720664 2026-01-07T17:36:04.930717Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 48], version: 2 2026-01-07T17:36:04.930727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 48] was 4 2026-01-07T17:36:04.930768Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720664, subscribers: 1 2026-01-07T17:36:04.930795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7592674817330406669:2358] 2026-01-07T17:36:04.931875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 2026-01-07T17:36:04.931915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2026-01-07T17:36:05.037818Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-07T17:36:05.037845Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 1 2026-01-07T17:36:05.038097Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2026-01-07T17:36:05.038205Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:33120 2026-01-07T17:36:05.038224Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:33120 proto=v1 topic=Root/acc/topic1 durationSec=0 2026-01-07T17:36:05.038231Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-07T17:36:05.040342Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 1 sessionId: describe result for acl check 2026-01-07T17:36:05.040455Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-07T17:36:05.040466Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-07T17:36:05.040471Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-07T17:36:05.040495Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7592674821625374161:2362] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-07T17:36:05.040515Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-07T17:36:05.040851Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-07T17:36:05.041005Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|525eb0dc-ba5b0deb-fb0f4ae0-cf26a7d0_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2026-01-07T17:36:05.041456Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|525eb0dc-ba5b0deb-fb0f4ae0-cf26a7d0_0 2026-01-07T17:36:05.042353Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: 12345678|525eb0dc-ba5b0deb-fb0f4ae0-cf26a7d0_0 grpc read done: success: 0 data: 2026-01-07T17:36:05.042371Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: 12345678|525eb0dc-ba5b0deb-fb0f4ae0-cf26a7d0_0 grpc read failed 2026-01-07T17:36:05.042491Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 1 sessionId: 12345678|525eb0dc-ba5b0deb-fb0f4ae0-cf26a7d0_0 2026-01-07T17:36:05.042505Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: 12345678|525eb0dc-ba5b0deb-fb0f4ae0-cf26a7d0_0 is DEAD Finish: 0 === InitializeWritePQService done 2026-01-07T17:36:05.042708Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === PersQueueClient === InitializePQ completed 2026-01-07T17:36:05.050371Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-07T17:36:05.050403Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-07T17:36:05.050708Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2026-01-07T17:36:05.050793Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:33120 2026-01-07T17:36:05.050808Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:33120 proto=v1 topic=topic1 durationSec=0 2026-01-07T17:36:05.050814Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-07T17:36:05.052466Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-07T17:36:05.052615Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-07T17:36:05.052630Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-07T17:36:05.052639Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-07T17:36:05.052674Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7592674821625374181:2371] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-07T17:36:05.052694Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-07T17:36:05.053112Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-07T17:36:05.053170Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|6d4dd779-86db5321-15e753b2-cc23413_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2026-01-07T17:36:05.053520Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|6d4dd779-86db5321-15e753b2-cc23413_0 2026-01-07T17:36:05.054788Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: 12345678|6d4dd779-86db5321-15e753b2-cc23413_0 grpc read done: success: 0 data: 2026-01-07T17:36:05.054811Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: 12345678|6d4dd779-86db5321-15e753b2-cc23413_0 grpc read failed 2026-01-07T17:36:05.055412Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 2 sessionId: 12345678|6d4dd779-86db5321-15e753b2-cc23413_0 2026-01-07T17:36:05.055435Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: 12345678|6d4dd779-86db5321-15e753b2-cc23413_0 is DEAD 2026-01-07T17:36:05.055740Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |86.3%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] |86.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |86.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:93:2057] recipient: [26:92:2121] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:95:2057] recipient: [26:92:2121] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:94:2122] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:210:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:91:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:94:2057] recipient: [27:93:2121] Leader for TabletID 72057594037927937 is [27:95:2122] sender: [27:96:2057] recipient: [27:93:2121] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:78:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:81:2057] recipient: [30:80:2112] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:83:2057] recipient: [30:80:2112] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:82:2113] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:198:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:52:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:52:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:78:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:81:2057] recipient: [31:80:2112] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:83:2057] recipient: [31:80:2112] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:82:2113] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:198:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:79:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:81:2112] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:84:2057] recipient: [32:81:2112] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:83:2113] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:199:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:82:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:85:2057] recipient: [34:84:2115] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:87:2057] recipient: [34:84:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:86:2116] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:202:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:83:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:86:2057] recipient: [35:85:2115] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:88:2057] recipient: [35:85:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:87:2116] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:203:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2118] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:91:2057] recipient: [36:88:2118] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2119] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2118] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:91:2057] recipient: [37:88:2118] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2119] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:206:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:87:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:90:2057] recipient: [38:89:2118] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:92:2057] recipient: [38:89:2118] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:91:2119] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:207:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for Tab ... t: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:84:2115] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:87:2057] recipient: [35:84:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:86:2116] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:202:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:83:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:85:2115] Leader for TabletID 72057594037927937 is [36:87:2116] sender: [36:88:2057] recipient: [36:85:2115] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:52:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:52:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:203:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:86:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:89:2057] recipient: [45:88:2118] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:91:2057] recipient: [45:88:2118] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:90:2119] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:206:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:86:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:89:2057] recipient: [46:88:2118] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:91:2057] recipient: [46:88:2118] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:90:2119] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:206:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:87:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:90:2057] recipient: [47:89:2118] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:92:2057] recipient: [47:89:2118] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:91:2119] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:207:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> TTxAllocatorClientTest::ZeroRange [GOOD] >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2026-01-07T17:36:01.873499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:01.905386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:01.905636Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:01.912885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:01.913173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:01.913421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:01.913542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:01.913654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:01.913780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:01.913884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:01.914007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:01.914131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:01.914232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.914363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:01.914479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:01.914595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:01.943013Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:01.945093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:01.945178Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:01.945377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.945549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:01.945633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:01.945676Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:01.945783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:01.945853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:01.945899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:01.945930Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:01.946142Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.946210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:01.946253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:01.946284Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:01.946382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:01.946456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:01.946503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:01.946536Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:01.946589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:01.946624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:01.946670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:01.946716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:01.946752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:01.946777Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:01.946999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:01.947109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:01.947144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:01.947273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:01.947327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.947359Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.947420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:01.947474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:01.947513Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:01.947564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:01.947602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:01.947634Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:01.947777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:01.947838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... amp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-07T17:36:07.174405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:36:07.174640Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:36:07.174788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:07.174903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:07.175035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:07.175202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:07.175355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:07.175488Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:07.175774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2026-01-07T17:36:07.176158Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.008},{"events":["f_ack"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":5735304,"name":"_full_task","f":5735304,"d_finished":0,"c":0,"l":5746599,"d":11295},"events":[{"name":"bootstrap","f":5735482,"d_finished":1281,"c":1,"l":5736763,"d":1281},{"a":5745958,"name":"ack","f":5744508,"d_finished":1322,"c":1,"l":5745830,"d":1963},{"a":5745948,"name":"processing","f":5736923,"d_finished":3513,"c":3,"l":5745833,"d":4164},{"name":"ProduceResults","f":5736410,"d_finished":2206,"c":6,"l":5746291,"d":2206},{"a":5746296,"name":"Finish","f":5746296,"d_finished":0,"c":0,"l":5746599,"d":303},{"name":"task_result","f":5736934,"d_finished":2145,"c":2,"l":5744267,"d":2145}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:07.176214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:07.176557Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.008},{"events":["f_ack"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":5735304,"name":"_full_task","f":5735304,"d_finished":0,"c":0,"l":5747036,"d":11732},"events":[{"name":"bootstrap","f":5735482,"d_finished":1281,"c":1,"l":5736763,"d":1281},{"a":5745958,"name":"ack","f":5744508,"d_finished":1322,"c":1,"l":5745830,"d":2400},{"a":5745948,"name":"processing","f":5736923,"d_finished":3513,"c":3,"l":5745833,"d":4601},{"name":"ProduceResults","f":5736410,"d_finished":2206,"c":6,"l":5746291,"d":2206},{"a":5746296,"name":"Finish","f":5746296,"d_finished":0,"c":0,"l":5747036,"d":740},{"name":"task_result","f":5736934,"d_finished":2145,"c":2,"l":5744267,"d":2145}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:07.176605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:07.162164Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4512;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4512;selected_rows=0; 2026-01-07T17:36:07.176644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:07.176852Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2026-01-07T17:34:40.828514Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:34:40.829299Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:34:40.830070Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:34:40.831736Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.832282Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:34:40.842896Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.843073Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.843150Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.843242Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:34:40.843369Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.843469Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:34:40.843631Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:34:40.844421Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-07T17:34:40.844953Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.845047Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:34:40.845151Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-07T17:34:40.845222Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14470, MsgBus: 11377 2026-01-07T17:31:27.819447Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673624823154142:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:27.819608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:27.876183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:28.390291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:28.390387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:28.424874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:28.442070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:28.489069Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592673624823153971:2081] 1767807087761720 != 1767807087761723 2026-01-07T17:31:28.489458Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:28.601107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:28.601158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:31:28.601171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:31:28.601249Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:31:28.703210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:28.765082Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:31:29.195189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:31:29.202276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:31:29.265179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:31:31.824361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673642003024387:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.824505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.829101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673642003024399:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.829177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592673642003024400:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.829520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:31:31.834252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:31:31.850112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-07T17:31:31.853145Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592673642003024403:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:31:31.954719Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592673642003024454:2767] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:31:32.380892Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976715661;task_id=2;memory=1048576; 2026-01-07T17:31:32.385081Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976715661, task: 2. [Mem] memory 1048576 NOT granted 2026-01-07T17:31:32.397793Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592673646297991792:2364], TxId: 281474976715661, task: 2. Ctx: { CheckpointId : . TraceId : 01kecr713defs63thk2v92sx8g. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OGMyYmJmMzYtY2NmYWNmYy0yMzA2NjA0ZC1kZTUwMWJlZQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-z2c6jslqle, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-07T17:31:32.375983Z }, code: 2029 }. 2026-01-07T17:31:32.398491Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592673646297991791:2363], TxId: 281474976715661, task: 1. Ctx: { CheckpointId : . TraceId : 01kecr713defs63thk2v92sx8g. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OGMyYmJmMzYtY2NmYWNmYy0yMzA2NjA0ZC1kZTUwMWJlZQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7592673646297991780:2347], status: OVERLOADED, reason: {
: Error: Terminate execution } 2026-01-07T17:31:32.402681Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OGMyYmJmMzYtY2NmYWNmYy0yMzA2NjA0ZC1kZTUwMWJlZQ==, ActorId: [1:7592673642003024382:2347], ActorState: ExecuteState, LegacyTraceId: 01kecr713defs63thk2v92sx8g, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-z2c6jslqle, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-07T17:31:32.375983Z }\n" issue_code: 2029 severity: 1 } trace_id#
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-z2c6jslqle, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-07T17:31:32.375983Z } , code: 2029 Trying to start YDB, gRPC: 8649, MsgBus: 61703 2026-01-07T17:31:33.561899Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673651322749877:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:31:33.561940Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:31:33.609866Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:31:33.753097Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:31:33.797321Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592673651322749853:2081] 1767807093547798 != 1767807093547801 2026-01-07T17:31:33.809919Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:31:33.810003Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:31:33.816636Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:31:33.828996Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:31:33.940595Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:31:34.065704Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:31:34.065730Z node 2 :NET_CLASSIFIER WA ... =, ActorId: [5:7592674322774177354:2528], ActorState: ExecuteState, LegacyTraceId: 01kecrewvp7gjw44azy3ayf40n, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 464ms" severity: 1 }{ message: "Cancelling after 464ms during execution" severity: 1 } trace_id# 2026-01-07T17:35:51.025598Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==, ActorId: [5:7592674322774177354:2528], ActorState: ExecuteState, LegacyTraceId: 01kecrexryeva6meytxrycph79, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 466ms" severity: 1 }{ message: "Cancelling after 464ms during compilation" severity: 1 } trace_id# 2026-01-07T17:35:52.447990Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [5:7592674765155819083:2528] TxId: 281474976711089. Ctx: { TraceId: 01kecrez566gt2vn684kd2n3dh, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 469ms } {
: Error: Cancelling after 473ms during execution } ] trace_id# 2026-01-07T17:35:52.448174Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674765155819095:6020], TxId: 281474976711089, task: 6. Ctx: { CheckpointId : . TraceId : 01kecrez566gt2vn684kd2n3dh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7592674765155819083:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:52.448531Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674765155819098:6023], TxId: 281474976711089, task: 9. Ctx: { CheckpointId : . TraceId : 01kecrez566gt2vn684kd2n3dh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592674765155819083:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:52.449952Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==, ActorId: [5:7592674322774177354:2528], ActorState: ExecuteState, LegacyTraceId: 01kecrez566gt2vn684kd2n3dh, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 469ms" severity: 1 }{ message: "Cancelling after 473ms during execution" severity: 1 } trace_id# 2026-01-07T17:35:54.660373Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [5:7592674773745753900:2528] TxId: 281474976711099. Ctx: { TraceId: 01kecrf1a9eqage1etc4g4af2p, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 474ms } {
: Error: Cancelling after 474ms during execution } ] trace_id# 2026-01-07T17:35:54.660562Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674773745753915:6099], TxId: 281474976711099, task: 9. Ctx: { CheckpointId : . TraceId : 01kecrf1a9eqage1etc4g4af2p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592674773745753900:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:54.673355Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==, ActorId: [5:7592674322774177354:2528], ActorState: ExecuteState, LegacyTraceId: 01kecrf1a9eqage1etc4g4af2p, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 474ms" severity: 1 }{ message: "Cancelling after 474ms during execution" severity: 1 } trace_id# 2026-01-07T17:35:56.530075Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==, ActorId: [5:7592674322774177354:2528], ActorState: ExecuteState, LegacyTraceId: 01kecrf34h1cyn0e0k4hf80cxe, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request timeout exceeded." issue_code: 2021 severity: 1 } trace_id# 2026-01-07T17:35:58.776990Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [5:7592674790925623466:2528] TxId: 281474976711116. Ctx: { TraceId: 01kecrf5ag041281qx5vjf6t69, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 483ms } {
: Error: Cancelling after 487ms during execution } ] trace_id# 2026-01-07T17:35:58.777226Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623478:6224], TxId: 281474976711116, task: 6. Ctx: { CheckpointId : . TraceId : 01kecrf5ag041281qx5vjf6t69. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.777664Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623479:6225], TxId: 281474976711116, task: 7. Ctx: { CheckpointId : . TraceId : 01kecrf5ag041281qx5vjf6t69. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.777789Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623476:6222], TxId: 281474976711116, task: 4. Ctx: { CheckpointId : . TraceId : 01kecrf5ag041281qx5vjf6t69. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.778340Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623480:6226], TxId: 281474976711116, task: 8. Ctx: { CheckpointId : . TraceId : 01kecrf5ag041281qx5vjf6t69. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.778375Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037896 Cancelled read: {[5:7592674790925623488:6225], 0} 2026-01-07T17:35:58.778432Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623473:6219], TxId: 281474976711116, task: 1. Ctx: { TraceId : 01kecrf5ag041281qx5vjf6t69. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.778730Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623474:6220], TxId: 281474976711116, task: 2. Ctx: { TraceId : 01kecrf5ag041281qx5vjf6t69. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.778977Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623475:6221], TxId: 281474976711116, task: 3. Ctx: { CheckpointId : . TraceId : 01kecrf5ag041281qx5vjf6t69. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.779051Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623481:6227], TxId: 281474976711116, task: 9. Ctx: { CheckpointId : . TraceId : 01kecrf5ag041281qx5vjf6t69. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.779237Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592674790925623477:6223], TxId: 281474976711116, task: 5. Ctx: { CheckpointId : . TraceId : 01kecrf5ag041281qx5vjf6t69. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592674790925623466:2528], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:35:58.779339Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037892 Cancelled read: {[5:7592674790925623484:6221], 0} 2026-01-07T17:35:58.779411Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037897 Cancelled read: {[5:7592674790925623489:6226], 0} 2026-01-07T17:35:58.779485Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037890 Cancelled read: {[5:7592674790925623482:6219], 0} 2026-01-07T17:35:58.779544Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037891 Cancelled read: {[5:7592674790925623483:6220], 0} 2026-01-07T17:35:58.779912Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MjY5ZTI5MzItM2ViNTliMy0zOTZmNWQ5Yi1jZDJmZTViMQ==, ActorId: [5:7592674322774177354:2528], ActorState: ExecuteState, LegacyTraceId: 01kecrf5ag041281qx5vjf6t69, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 483ms" severity: 1 }{ message: "Cancelling after 487ms during execution" severity: 1 } trace_id# |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |86.3%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |86.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2026-01-07T17:36:05.120598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:05.142348Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:05.142534Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:05.148424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:05.148608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:05.148771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:05.148857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:05.148916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:05.149015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:05.149094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:05.149180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:05.149266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:05.149337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:05.149434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:05.149507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:05.149573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:05.170221Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:05.170922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:05.170987Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:05.171164Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:05.171298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:05.171352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:05.171385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:05.171469Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:05.171526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:05.171563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:05.171598Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:05.171737Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:05.171788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:05.171818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:05.171853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:05.171940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:05.171999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:05.172046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:05.172069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:05.172117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:05.172154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:05.172183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:05.172213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:05.172237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:05.172254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:05.172416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:05.172497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:05.172523Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:05.172662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:05.172708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:05.172731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:05.172811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:05.172855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:05.172897Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:05.172971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:05.173011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:05.173042Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:05.173157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:05.173203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-07T17:36:08.279511Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:36:08.279730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2026-01-07T17:36:08.279884Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:08.280007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:08.280131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:08.280296Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:08.280430Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:08.280558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:08.280823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2026-01-07T17:36:08.281321Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":3607460,"name":"_full_task","f":3607460,"d_finished":0,"c":0,"l":3621262,"d":13802},"events":[{"name":"bootstrap","f":3607748,"d_finished":1274,"c":1,"l":3609022,"d":1274},{"a":3620675,"name":"ack","f":3619404,"d_finished":1147,"c":1,"l":3620551,"d":1734},{"a":3620665,"name":"processing","f":3609173,"d_finished":4030,"c":3,"l":3620553,"d":4627},{"name":"ProduceResults","f":3608657,"d_finished":2160,"c":6,"l":3620976,"d":2160},{"a":3620980,"name":"Finish","f":3620980,"d_finished":0,"c":0,"l":3621262,"d":282},{"name":"task_result","f":3609192,"d_finished":2833,"c":2,"l":3619249,"d":2833}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:08.281426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:08.281898Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":3607460,"name":"_full_task","f":3607460,"d_finished":0,"c":0,"l":3621878,"d":14418},"events":[{"name":"bootstrap","f":3607748,"d_finished":1274,"c":1,"l":3609022,"d":1274},{"a":3620675,"name":"ack","f":3619404,"d_finished":1147,"c":1,"l":3620551,"d":2350},{"a":3620665,"name":"processing","f":3609173,"d_finished":4030,"c":3,"l":3620553,"d":5243},{"name":"ProduceResults","f":3608657,"d_finished":2160,"c":6,"l":3620976,"d":2160},{"a":3620980,"name":"Finish","f":3620980,"d_finished":0,"c":0,"l":3621878,"d":898},{"name":"task_result","f":3609192,"d_finished":2833,"c":2,"l":3619249,"d":2833}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:08.281968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:08.263378Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2026-01-07T17:36:08.282004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:08.282217Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink >> THiveTest::TestResetServerlessComputeResourcesMode |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink >> Cdc::VirtualTimestamps[YdsRunner] >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> Normalizers::SubColumnsPortionsCleanerNormalizer+useSubcolumns [GOOD] >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns |86.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> TColumnShardTestReadWrite::ReadStale >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy |86.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestReassignNonexistentTablet >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TColumnShardTestReadWrite::ReadStale [GOOD] >> TKeyValueTest::TestConcatToLongKey [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> THiveTest::TestReassignNonexistentTablet [GOOD] >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns [GOOD] >> TColumnShardTestReadWrite::WriteStandalone >> BasicStatistics::TwoDatabases [GOOD] >> Normalizers::SchemaVersionsNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> EvWrite::WriteWithSplit [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> THiveTest::TestLockedTabletsMustNotRestart >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] >> Normalizers::ChunksV0MetaNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2026-01-07T17:36:10.312826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:10.350024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:10.350295Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:10.357979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:10.358257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:10.358505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:10.358654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:10.358770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:10.358865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:10.358982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:10.359107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:10.359232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:10.359347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:10.359446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:10.359589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:10.359732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:10.390470Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:10.391161Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:10.391223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:10.391398Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:10.391578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:10.391680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:10.391732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:10.391841Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:10.391914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:10.391962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:10.391992Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:10.392175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:10.392243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:10.392291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:10.392321Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:10.392490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:10.392561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:10.392620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:10.392650Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:10.392704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:10.392840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:10.392879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:10.392948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:10.393013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:10.393043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:10.393275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:10.393332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:10.393363Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:10.393479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:10.393536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:10.393579Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:10.393639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:10.393691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:10.393735Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:10.393792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:10.393831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:10.393868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:10.394014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:10.394094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tab ... ;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:36:13.679734Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2026-01-07T17:36:13.679772Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:13.679812Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:13.680208Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:13.680353Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:13.680396Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:36:13.680506Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-07T17:36:13.680562Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-07T17:36:13.680820Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-07T17:36:13.680970Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:13.681086Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:13.681209Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:13.681456Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:13.681563Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:13.681662Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:13.681850Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:464:2476] finished for tablet 9437184 2026-01-07T17:36:13.682280Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":3991028,"name":"_full_task","f":3991028,"d_finished":0,"c":0,"l":3999961,"d":8933},"events":[{"name":"bootstrap","f":3991265,"d_finished":1023,"c":1,"l":3992288,"d":1023},{"a":3999500,"name":"ack","f":3998250,"d_finished":1043,"c":1,"l":3999293,"d":1504},{"a":3999490,"name":"processing","f":3992424,"d_finished":2603,"c":3,"l":3999296,"d":3074},{"name":"ProduceResults","f":3991938,"d_finished":1765,"c":6,"l":3999742,"d":1765},{"a":3999751,"name":"Finish","f":3999751,"d_finished":0,"c":0,"l":3999961,"d":210},{"name":"task_result","f":3992438,"d_finished":1512,"c":2,"l":3997893,"d":1512}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:13.682336Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:13.682699Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":3991028,"name":"_full_task","f":3991028,"d_finished":0,"c":0,"l":4000429,"d":9401},"events":[{"name":"bootstrap","f":3991265,"d_finished":1023,"c":1,"l":3992288,"d":1023},{"a":3999500,"name":"ack","f":3998250,"d_finished":1043,"c":1,"l":3999293,"d":1972},{"a":3999490,"name":"processing","f":3992424,"d_finished":2603,"c":3,"l":3999296,"d":3542},{"name":"ProduceResults","f":3991938,"d_finished":1765,"c":6,"l":3999742,"d":1765},{"a":3999751,"name":"Finish","f":3999751,"d_finished":0,"c":0,"l":4000429,"d":678},{"name":"task_result","f":3992438,"d_finished":1512,"c":2,"l":3997893,"d":1512}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:13.682776Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:13.670910Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-07T17:36:13.682815Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:13.682930Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] >> Cdc::DocApi[TopicRunner] [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> Cdc::HugeKey[PqRunner] >> Cdc::VirtualTimestamps[TopicRunner] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteStandalone [GOOD] >> Normalizers::SchemaVersionsNormalizer [GOOD] >> Normalizers::ChunksV0MetaNormalizer [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> Normalizers::CleanUnusedTablesNormalizer >> Normalizers::RemoveWriteIdNormalizer >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> DataShardVolatile::CompactedVolatileChangesCommit |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |86.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns [GOOD] Test command err: 2026-01-07T17:36:05.434768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:05.466852Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:05.467097Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:05.474411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanSubColumnsPortions; 2026-01-07T17:36:05.474673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:05.474872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:05.475067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:05.475174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:05.475275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:05.475386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:05.475507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:05.475620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:05.475741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:05.475872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:05.475985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:05.476086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:05.476199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:05.504963Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:05.505395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=CleanSubColumnsPortions; 2026-01-07T17:36:05.505460Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-07T17:36:05.505793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=89; 2026-01-07T17:36:05.505908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2026-01-07T17:36:05.506020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=59; 2026-01-07T17:36:05.506130Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2026-01-07T17:36:05.506309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanSubColumnsPortions;id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:05.506401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-07T17:36:05.506444Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-07T17:36:05.506624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:05.506727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:05.506785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:05.506821Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-07T17:36:05.506906Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:05.506976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:05.507037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:05.507092Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:05.507288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:05.507370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:05.507419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:05.507469Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:05.507604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:05.507674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:05.507723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:05.507755Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:05.507829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:05.507871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:05.507901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:05.507968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:05.508091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:05.508131Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:05.508355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:05.508412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:05.508452Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:05.508616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:05.508669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:05.508698Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstr ... mes=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:36:15.912612Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=138;finished=1; 2026-01-07T17:36:15.912666Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:15.912706Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:15.913101Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:15.913230Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:138;schema=id: uint64 json_payload: binary;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:15.913267Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:36:15.913377Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=138; 2026-01-07T17:36:15.913430Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=138;batch_columns=id,json_payload; 2026-01-07T17:36:15.913560Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:290:2291];bytes=283016;rows=4162;faults=0;finished=0;fault=0;schema=id: uint64 json_payload: binary; 2026-01-07T17:36:15.913671Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:15.913796Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:15.913956Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:15.914086Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:15.914178Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:15.914280Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:15.914548Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:292:2292] finished for tablet 9437184 2026-01-07T17:36:15.915075Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:290:2291];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.042},{"events":["l_task_result"],"t":1.002},{"events":["l_ProduceResults"],"t":1.003},{"events":["l_ack","l_processing","f_Finish","l_Finish"],"t":1.004}],"full":{"a":9862592,"name":"_full_task","f":9862592,"d_finished":0,"c":0,"l":10866914,"d":1004322},"events":[{"name":"bootstrap","f":9862942,"d_finished":1876,"c":1,"l":9864818,"d":1876},{"a":10866364,"name":"ack","f":9905099,"d_finished":528032,"c":251,"l":10866277,"d":528582},{"a":10866344,"name":"processing","f":9865078,"d_finished":932691,"c":503,"l":10866294,"d":933261},{"name":"ProduceResults","f":9864246,"d_finished":757557,"c":756,"l":10866590,"d":757557},{"a":10866595,"name":"Finish","f":10866595,"d_finished":0,"c":0,"l":10866914,"d":319},{"name":"task_result","f":9865097,"d_finished":398366,"c":252,"l":10865017,"d":398366}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:15.915156Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:290:2291];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:15.915622Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:290:2291];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.042},{"events":["l_task_result"],"t":1.002},{"events":["l_ProduceResults"],"t":1.003},{"events":["l_ack","l_processing","f_Finish","l_Finish"],"t":1.004}],"full":{"a":9862592,"name":"_full_task","f":9862592,"d_finished":0,"c":0,"l":10867491,"d":1004899},"events":[{"name":"bootstrap","f":9862942,"d_finished":1876,"c":1,"l":9864818,"d":1876},{"a":10866364,"name":"ack","f":9905099,"d_finished":528032,"c":251,"l":10866277,"d":529159},{"a":10866344,"name":"processing","f":9865078,"d_finished":932691,"c":503,"l":10866294,"d":933838},{"name":"ProduceResults","f":9864246,"d_finished":757557,"c":756,"l":10866590,"d":757557},{"a":10866595,"name":"Finish","f":10866595,"d_finished":0,"c":0,"l":10867491,"d":896},{"name":"task_result","f":9865097,"d_finished":398366,"c":252,"l":10865017,"d":398366}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:15.915699Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:14.908346Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=1409432;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=1409432;selected_rows=0; 2026-01-07T17:36:15.981433Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:15.981725Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;; |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2026-01-07T17:36:14.322758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:14.350044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:14.350374Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:14.358258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:14.358505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:14.358737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:14.358865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:14.358967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:14.359115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:14.359250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:14.359367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:14.359495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:14.359604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:14.359763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:14.359882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:14.360019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:14.382909Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:14.383468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:14.383520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:14.383658Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:14.383784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:14.383839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:14.383877Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:14.383967Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:14.384017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:14.384046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:14.384069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:14.384205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:14.384263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:14.384314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:14.384339Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:14.384425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:14.384489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:14.384537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:14.384566Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:14.384614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:14.384640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:14.384663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:14.384688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:14.384714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:14.384739Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:14.384905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:14.385025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:14.385054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:14.385185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:14.385231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:14.385274Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:14.385321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:14.385376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:14.385425Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:14.385474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:14.385515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:14.385547Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:14.385664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:14.385719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... esource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-07T17:36:18.242728Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:36:18.242944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2026-01-07T17:36:18.243098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:18.243220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:18.243340Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:18.243502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:18.243632Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:18.243800Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:18.244165Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2026-01-07T17:36:18.244630Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":4339678,"name":"_full_task","f":4339678,"d_finished":0,"c":0,"l":4351837,"d":12159},"events":[{"name":"bootstrap","f":4339913,"d_finished":1692,"c":1,"l":4341605,"d":1692},{"a":4351089,"name":"ack","f":4349835,"d_finished":1127,"c":1,"l":4350962,"d":1875},{"a":4351077,"name":"processing","f":4341769,"d_finished":3256,"c":3,"l":4350964,"d":4016},{"name":"ProduceResults","f":4341064,"d_finished":2222,"c":6,"l":4351440,"d":2222},{"a":4351444,"name":"Finish","f":4351444,"d_finished":0,"c":0,"l":4351837,"d":393},{"name":"task_result","f":4341783,"d_finished":2086,"c":2,"l":4349690,"d":2086}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:18.244698Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:18.245095Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":4339678,"name":"_full_task","f":4339678,"d_finished":0,"c":0,"l":4352354,"d":12676},"events":[{"name":"bootstrap","f":4339913,"d_finished":1692,"c":1,"l":4341605,"d":1692},{"a":4351089,"name":"ack","f":4349835,"d_finished":1127,"c":1,"l":4350962,"d":2392},{"a":4351077,"name":"processing","f":4341769,"d_finished":3256,"c":3,"l":4350964,"d":4533},{"name":"ProduceResults","f":4341064,"d_finished":2222,"c":6,"l":4351440,"d":2222},{"a":4351444,"name":"Finish","f":4351444,"d_finished":0,"c":0,"l":4352354,"d":910},{"name":"task_result","f":4341783,"d_finished":2086,"c":2,"l":4349690,"d":2086}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:18.245192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:18.228241Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2026-01-07T17:36:18.245260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:18.245515Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] Test command err: 2026-01-07T17:36:03.703917Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:03.731990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:03.732219Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:03.739407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2026-01-07T17:36:03.739646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2026-01-07T17:36:03.739778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:03.739925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:03.740057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:03.740151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:03.740212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:03.740305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:03.740382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:03.740442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:03.740553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:03.740709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:03.740859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:03.741000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:03.741118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:03.765725Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:03.766017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=14;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2026-01-07T17:36:03.766068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-07T17:36:03.766341Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2026-01-07T17:36:03.766436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:03.766499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:03.766531Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-07T17:36:03.766755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2026-01-07T17:36:03.766834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2026-01-07T17:36:03.766895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2026-01-07T17:36:03.766969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2026-01-07T17:36:03.767035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:03.767087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-07T17:36:03.767125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-07T17:36:03.767226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:03.767336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:03.767374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:03.767412Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-07T17:36:03.767489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:03.767538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:03.767573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:03.767599Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:03.767738Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:03.767807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:03.767840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:03.767860Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:03.767927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:03.767961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:03.767996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:03.768063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:03.768100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:03.768130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:03.768168Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:03.768224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:03.768251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:03.768274Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:03.768460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline ... ng;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:36:12.340820Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-07T17:36:12.340852Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:12.340900Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:12.341397Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:12.341517Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:12.341569Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:36:12.341661Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-07T17:36:12.341696Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-07T17:36:12.341841Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-07T17:36:12.342010Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:12.342215Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:12.342406Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:12.342560Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:12.342694Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:12.342843Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:12.343138Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:330:2330] finished for tablet 9437184 2026-01-07T17:36:12.343572Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:328:2329];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.076},{"events":["l_task_result"],"t":2.203},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":2.205}],"full":{"a":7007770,"name":"_full_task","f":7007770,"d_finished":0,"c":0,"l":9213750,"d":2205980},"events":[{"name":"bootstrap","f":7008079,"d_finished":1687,"c":1,"l":7009766,"d":1687},{"a":9213104,"name":"ack","f":7083872,"d_finished":1192618,"c":421,"l":9213009,"d":1193264},{"a":9213091,"name":"processing","f":7009968,"d_finished":2067580,"c":843,"l":9213013,"d":2068239},{"name":"ProduceResults","f":7009243,"d_finished":1205738,"c":1266,"l":9213431,"d":1205738},{"a":9213438,"name":"Finish","f":9213438,"d_finished":0,"c":0,"l":9213750,"d":312},{"name":"task_result","f":7009991,"d_finished":863706,"c":422,"l":9211500,"d":863706}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:12.343633Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:12.344015Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:328:2329];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.076},{"events":["l_task_result"],"t":2.203},{"events":["l_ProduceResults","f_Finish"],"t":2.205},{"events":["l_ack","l_processing","l_Finish"],"t":2.206}],"full":{"a":7007770,"name":"_full_task","f":7007770,"d_finished":0,"c":0,"l":9214230,"d":2206460},"events":[{"name":"bootstrap","f":7008079,"d_finished":1687,"c":1,"l":7009766,"d":1687},{"a":9213104,"name":"ack","f":7083872,"d_finished":1192618,"c":421,"l":9213009,"d":1193744},{"a":9213091,"name":"processing","f":7009968,"d_finished":2067580,"c":843,"l":9213013,"d":2068719},{"name":"ProduceResults","f":7009243,"d_finished":1205738,"c":1266,"l":9213431,"d":1205738},{"a":9213438,"name":"Finish","f":9213438,"d_finished":0,"c":0,"l":9214230,"d":792},{"name":"task_result","f":7009991,"d_finished":863706,"c":422,"l":9211500,"d":863706}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:12.344083Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:10.135536Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-07T17:36:12.344121Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:12.344277Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRenameWorksNewApi |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2026-01-07T17:36:11.585996Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:11.610881Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:11.611118Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:11.618280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:11.618526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:11.618732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:11.618848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:11.618942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:11.619081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:11.619198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:11.619307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:11.619422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:11.619518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:11.619644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:11.619753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:11.619856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:11.651171Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:11.651759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:11.651815Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:11.652025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:11.652177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:11.652245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:11.652288Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:11.652405Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:11.652479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:11.652541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:11.652573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:11.652758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:11.652830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:11.652896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:11.652955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:11.653051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:11.653139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:11.653203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:11.653244Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:11.653295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:11.653340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:11.653366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:11.653407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:11.653443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:11.653471Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:11.653715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:11.653881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:11.653917Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:11.654059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:11.654103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:11.654139Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:11.654208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:11.654275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:11.654317Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:11.654370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:11.654410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:11.654448Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:11.654644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:11.654706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-07T17:36:17.196831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:36:17.197169Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:36:17.197400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:17.197578Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:17.197788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:17.198076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:17.198293Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:17.198472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:17.198880Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2026-01-07T17:36:17.199434Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.032},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.035}],"full":{"a":5990845,"name":"_full_task","f":5990845,"d_finished":0,"c":0,"l":6026511,"d":35666},"events":[{"name":"bootstrap","f":5991163,"d_finished":1914,"c":1,"l":5993077,"d":1914},{"a":6025612,"name":"ack","f":6023646,"d_finished":1741,"c":1,"l":6025387,"d":2640},{"a":6025589,"name":"processing","f":5993263,"d_finished":4924,"c":3,"l":6025390,"d":5846},{"name":"ProduceResults","f":5992459,"d_finished":3081,"c":6,"l":6026066,"d":3081},{"a":6026071,"name":"Finish","f":6026071,"d_finished":0,"c":0,"l":6026511,"d":440},{"name":"task_result","f":5993285,"d_finished":3112,"c":2,"l":6023367,"d":3112}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:17.199542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:17.200099Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.032},{"events":["l_ProduceResults","f_Finish"],"t":0.035},{"events":["l_ack","l_processing","l_Finish"],"t":0.036}],"full":{"a":5990845,"name":"_full_task","f":5990845,"d_finished":0,"c":0,"l":6027167,"d":36322},"events":[{"name":"bootstrap","f":5991163,"d_finished":1914,"c":1,"l":5993077,"d":1914},{"a":6025612,"name":"ack","f":6023646,"d_finished":1741,"c":1,"l":6025387,"d":3296},{"a":6025589,"name":"processing","f":5993263,"d_finished":4924,"c":3,"l":6025390,"d":6502},{"name":"ProduceResults","f":5992459,"d_finished":3081,"c":6,"l":6026066,"d":3081},{"a":6026071,"name":"Finish","f":6026071,"d_finished":0,"c":0,"l":6027167,"d":1096},{"name":"task_result","f":5993285,"d_finished":3112,"c":2,"l":6023367,"d":3112}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:17.200462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:17.159254Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-07T17:36:17.200515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:17.200808Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] Test command err: 2026-01-07T17:35:47.108424Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:35:47.139976Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:35:47.140293Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:35:47.141248Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:35:47.141671Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:35:47.142866Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:35:47.142932Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:35:47.143960Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:53:2077] ControllerId# 72057594037932033 2026-01-07T17:35:47.144004Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:35:47.144116Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:35:47.144274Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:35:47.158134Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:35:47.158229Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:35:47.160664Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:61:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.160859Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:62:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.161036Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:63:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.161184Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:64:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.161307Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:65:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.161445Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:66:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.161584Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:67:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.161620Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:35:47.161718Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:53:2077] 2026-01-07T17:35:47.161756Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:53:2077] 2026-01-07T17:35:47.161814Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:35:47.161861Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:35:47.162839Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:35:47.162922Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:35:47.165875Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:35:47.166155Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:35:47.166539Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:35:47.166790Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:35:47.167691Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:78:2076] ControllerId# 72057594037932033 2026-01-07T17:35:47.167724Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:35:47.167788Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:35:47.167912Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:35:47.177858Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:35:47.177926Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:35:47.180778Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:84:2080] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.181015Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:85:2081] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.181156Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:86:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.181297Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:87:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.181432Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:88:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.181592Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:89:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.181760Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:90:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:35:47.181789Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:35:47.181864Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:78:2076] 2026-01-07T17:35:47.181893Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:78:2076] 2026-01-07T17:35:47.181954Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:35:47.181994Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:35:47.182437Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:35:47.182668Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:35:47.191669Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:53:2077] 2026-01-07T17:35:47.191836Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:35:47.193683Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:35:47.193757Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:43:2064] 2026-01-07T17:35:47.193789Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:43:2064] 2026-01-07T17:35:47.194471Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:35:47.195005Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:78:2076] 2026-01-07T17:35:47.195062Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:35:47.195307Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:35:47.195382Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:35:47.195616Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:35:47.195796Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:97:2093] 2026-01-07T17:35:47.195826Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:97:2093] 2026-01-07T17:35:47.195909Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:35:47.195935Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:35:47.196265Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:35:47.196456Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:35:47.196545Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:35:47.196619Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:97:2093] 2026-01-07T17:35:47.196685Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageC ... us# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-07T17:36:16.501329Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2026-01-07T17:36:16.501389Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:36:16.501913Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{18, redo 164b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2026-01-07T17:36:16.501974Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:36:16.502271Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-07T17:36:16.502355Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:36:16.502555Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:36:16.502652Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:36:16.503123Z node 28 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [758c562ff377dcdd] received {EvVPutResult Status# OK ID# [72075186224037888:2:1:1:28672:89:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 80700 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} from# [80000001:1:0:0:0] Marker# BPP01 2026-01-07T17:36:16.503237Z node 28 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [758c562ff377dcdd] Result# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483649 Marker# BPP12 2026-01-07T17:36:16.503294Z node 28 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [758c562ff377dcdd] SendReply putResult# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:36:16.503469Z node 28 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-07T17:36:16.503629Z node 28 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72075186224037888 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:36:16.503774Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:2} commited cookie 2 for step 1 2026-01-07T17:36:16.504101Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:179: [5be63a1c46f51038] bootstrap ActorId# [28:548:2209] Group# 2147483648 TabletId# 72075186224037888 Channel# 0 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2026-01-07T17:36:16.504235Z node 28 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [28:491:2162] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:0] collect=[2:0] cookie# 0 2026-01-07T17:36:16.504335Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:179: [e7148f1ebe2147a8] bootstrap ActorId# [28:549:2210] Group# 2147483649 TabletId# 72075186224037888 Channel# 1 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2026-01-07T17:36:16.504390Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:182: [e7148f1ebe2147a8] Keep# [72075186224037888:1:2:1:8192:289:0] Marker# DSPC04 2026-01-07T17:36:16.504496Z node 28 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [28:511:2179] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:1] collect=[2:0] Keep: [72075186224037888:1:2:1:8192:289:0] cookie# 0 2026-01-07T17:36:16.506361Z node 28 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [5be63a1c46f51038] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 0 VDisk# [80000000:1:0:0:0]} Marker# DSPC01 2026-01-07T17:36:16.506471Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [5be63a1c46f51038] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2026-01-07T17:36:16.507529Z node 28 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [e7148f1ebe2147a8] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 1 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2026-01-07T17:36:16.507595Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [e7148f1ebe2147a8] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2026-01-07T17:36:16.508027Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [27:550:2318] 2026-01-07T17:36:16.508094Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [27:550:2318] 2026-01-07T17:36:16.508213Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [27:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:36:16.508292Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 27 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [27:331:2201] 2026-01-07T17:36:16.508374Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [27:550:2318] 2026-01-07T17:36:16.508437Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:550:2318] 2026-01-07T17:36:16.508500Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:550:2318] 2026-01-07T17:36:16.508575Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:550:2318] 2026-01-07T17:36:16.508715Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:550:2318] 2026-01-07T17:36:16.508856Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:550:2318] 2026-01-07T17:36:16.508921Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:550:2318] 2026-01-07T17:36:16.509005Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:550:2318] 2026-01-07T17:36:16.509106Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:550:2318] 2026-01-07T17:36:16.509161Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:550:2318] 2026-01-07T17:36:16.509248Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:463:2298] EventType# 268959750 2026-01-07T17:36:16.509504Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} queued, type NKikimr::NHive::TTxSyncTablets 2026-01-07T17:36:16.509591Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:36:16.509727Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:36:16.509824Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:36:16.510061Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-07T17:36:16.510176Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:36:16.510286Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:36:16.510382Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:36:16.510803Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [27:553:2321] 2026-01-07T17:36:16.510865Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [27:553:2321] 2026-01-07T17:36:16.510975Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [27:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:36:16.511060Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 27 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [27:331:2201] 2026-01-07T17:36:16.511140Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [27:553:2321] 2026-01-07T17:36:16.511211Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:553:2321] 2026-01-07T17:36:16.511300Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:553:2321] 2026-01-07T17:36:16.511385Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:553:2321] 2026-01-07T17:36:16.511547Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:553:2321] 2026-01-07T17:36:16.511718Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:553:2321] 2026-01-07T17:36:16.511797Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:553:2321] 2026-01-07T17:36:16.511854Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:553:2321] 2026-01-07T17:36:16.511941Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:553:2321] 2026-01-07T17:36:16.512017Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:553:2321] 2026-01-07T17:36:16.512101Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:552:2320] EventType# 268697616 |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:23.904328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:24.044689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:24.053888Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:24.054542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:24.054616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:24.540084Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:24.701270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:24.701449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:24.747449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:24.892233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:25.645489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:25.646857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:25.646912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:25.646945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:25.647476Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:25.720367Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:26.376884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:35:29.738804Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:29.746296Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-07T17:35:29.795876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:29.796005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:29.840570Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:35:29.843060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:30.051040Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:30.051161Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:30.052539Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.053432Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.053942Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.054965Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.055205Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.055318Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.055531Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.055757Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.055866Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:30.097328Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:30.517207Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:30.606454Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:35:30.606574Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:35:30.633715Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:35:30.635064Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:35:30.635293Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:35:30.635350Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:35:30.635401Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:35:30.635448Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:35:30.635502Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:35:30.635551Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:35:30.636235Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:35:30.638004Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:1994:2455] 2026-01-07T17:35:30.642129Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2026-01-07T17:35:30.652398Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Describe result: PathErrorUnknown 2026-01-07T17:35:30.652472Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Creating table 2026-01-07T17:35:30.652566Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2026-01-07T17:35:30.692083Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2231:2598], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:35:30.692514Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:30.692604Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2233:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:30.745572Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2306:2653] 2026-01-07T17:35:30.745896Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2306:2653], schemeshard id = 72075186224037897 2026-01-07T17:35:30.769943Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:2214:2589] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-07T17:35:30.780730Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-07T17:35:31.058338Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:35:31.212402Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database1/.metadata/script_executions 2026-01-07T17:35:31.567512Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2026-01-07T17:35:31.569874Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2627:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:35:31.573625Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:31.583288Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:35:31.583868Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Subscribe on create table tx: 281474976720658 2026-01-07T17:35:31.605740Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2198:2580] Owner: [ ... d[ 22 ] 2026-01-07T17:36:03.314490Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:5783:3359], StatRequests.size() = 1 2026-01-07T17:36:04.148415Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:36:04.148971Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-07T17:36:04.149462Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-07T17:36:04.149545Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:36:04.253625Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:5824:3369]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:04.254102Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:36:04.254153Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:5824:3369], StatRequests.size() = 1 2026-01-07T17:36:05.241061Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:5869:3381]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:05.241482Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:36:05.241533Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:5869:3381], StatRequests.size() = 1 2026-01-07T17:36:06.206853Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:5912:3391]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:06.207246Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:36:06.207314Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:5912:3391], StatRequests.size() = 1 2026-01-07T17:36:07.127383Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:36:07.311524Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:36:07.311631Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:36:07.311675Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:36:07.311719Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:36:07.427933Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:5959:3404]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:07.428283Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:36:07.428323Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:5959:3404], StatRequests.size() = 1 2026-01-07T17:36:08.515204Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6007:3416]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:08.515554Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:36:08.515607Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6007:3416], StatRequests.size() = 1 2026-01-07T17:36:09.562100Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:36:09.562331Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 6 2026-01-07T17:36:09.562427Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:36:09.562740Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:36:09.619210Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:36:09.619295Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:36:09.619532Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:09.635249Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:36:09.760979Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6050:3428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:09.761336Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:36:09.761386Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6050:3428], StatRequests.size() = 1 2026-01-07T17:36:10.839934Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6096:3440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:10.840288Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:36:10.840333Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6096:3440], StatRequests.size() = 1 2026-01-07T17:36:11.418065Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-07T17:36:11.418134Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224038898 2026-01-07T17:36:11.418394Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:11.432364Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-07T17:36:12.135392Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6139:3450]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:12.135733Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:36:12.135773Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6139:3450], StatRequests.size() = 1 2026-01-07T17:36:13.067203Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:36:13.336845Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6182:3460]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:13.337195Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:36:13.337239Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6182:3460], StatRequests.size() = 1 2026-01-07T17:36:14.371724Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6227:3474]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:14.371937Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:36:14.371992Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6227:3474], StatRequests.size() = 1 2026-01-07T17:36:15.222196Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:36:15.222532Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 7 2026-01-07T17:36:15.222736Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:36:15.223529Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:36:15.281710Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:36:15.281775Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:36:15.282192Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:15.296896Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:36:15.369036Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [3:6272:3486]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:15.369414Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:36:15.369468Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [3:6272:3486], StatRequests.size() = 1 2026-01-07T17:36:15.370037Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6274:3394]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:15.374571Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:36:15.374687Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6284:3398] 2026-01-07T17:36:15.374762Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6284:3398] 2026-01-07T17:36:15.375946Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:6290:3399] 2026-01-07T17:36:15.376373Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6284:3398], server id = [2:6290:3399], tablet id = 72075186224038895, status = OK 2026-01-07T17:36:15.376452Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:6290:3399], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:36:15.376535Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:36:15.376843Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:36:15.377155Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6274:3394], StatRequests.size() = 1 2026-01-07T17:36:15.377276Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2117] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:91:2057] recipient: [36:88:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2118] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:52:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:52:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:105:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:85:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:88:2057] recipient: [45:87:2117] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:90:2057] recipient: [45:87:2117] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:89:2118] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:205:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:85:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:88:2057] recipient: [46:87:2117] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:90:2057] recipient: [46:87:2117] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:89:2118] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:205:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:86:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:89:2057] recipient: [47:88:2117] Leader for TabletID 72057594037927937 is [47:90:2118] sender: [47:91:2057] recipient: [47:88:2117] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2026-01-07T17:36:13.635375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:13.663942Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:13.664181Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:13.674807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:13.675009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:13.675201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:13.675306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:13.675379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:13.675450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:13.675509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:13.675572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:13.675643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:13.675722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:13.675814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:13.675910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:13.675977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:13.702395Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:13.702973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:13.703019Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:13.703187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:13.703339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:13.703412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:13.703492Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:13.703572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:13.703620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:13.703649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:13.703670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:13.703817Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:13.703868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:13.703908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:13.703932Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:13.704012Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:13.704061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:13.704103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:13.704122Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:13.704162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:13.704208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:13.704237Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:13.704275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:13.704299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:13.704316Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:13.704456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:13.704562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:13.704588Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:13.704704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:13.704751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:13.704782Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:13.704827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:13.704873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:13.704910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:13.705003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:13.705057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:13.705102Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:13.705247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:13.705303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-07T17:36:14.420798Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-07T17:36:14.421171Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:14.421227Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:14.421295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:14.421335Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:14.421382Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:36:14.432132Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:14.432207Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:14.432251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:14.432337Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:14.432764Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 1 version: {1767807014615:max} readable: {1767807374615:max} at tablet 9437184 2026-01-07T17:36:14.444267Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2026-01-07T17:36:14.445656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=constructor.cpp:17;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2026-01-07T17:36:14.447801Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2026-01-07T17:36:14.447933Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2026-01-07T17:36:14.449928Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6},{"from":8},{"from":10},{"from":12},{"from":14},{"from":16},{"from":18},{"from":20},{"from":22},{"from":24},{"from":26},{"from":28}]},{"owner_id":18,"inputs":[{"from":29}]},{"owner_id":2,"inputs":[{"from":29}]},{"owner_id":20,"inputs":[{"from":29}]},{"owner_id":4,"inputs":[{"from":29}]},{"owner_id":22,"inputs":[{"from":29}]},{"owner_id":6,"inputs":[{"from":29}]},{"owner_id":24,"inputs":[{"from":29}]},{"owner_id":8,"inputs":[{"from":29}]},{"owner_id":26,"inputs":[{"from":29}]},{"owner_id":10,"inputs":[{"from":29}]},{"owner_id":28,"inputs":[{"from":29}]},{"owner_id":29,"inputs":[{"from":30}]},{"owner_id":12,"inputs":[{"from":29}]},{"owner_id":30,"inputs":[]},{"owner_id":14,"inputs":[{"from":29}]},{"owner_id":16,"inputs":[{"from":29}]}],"nodes":{"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":33,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":33,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":33,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043","t":"Projection"},"w":462,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":33,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":33,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":33,"id":16},"24":{"p":{"i":"4294967041","p":{"address":{"name":"_yql_tx_id","id":4294967041}},"o":"4294967041","t":"AssembleOriginalData"},"w":33,"id":24},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":33,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":33,"id":10},"29":{"p":{"i":"0","p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"4294967040,4294967041,1,4294967042,2,4294967043,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":28,"id":29},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":33,"id":6},"30":{"p":{"p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":30},"22":{"p":{"i":"4294967040","p":{"address":{"name":"_yql_plan_step","id":4294967040}},"o":"4294967040","t":"AssembleOriginalData"},"w":33,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":33,"id":12},"28":{"p":{"i":"4294967043","p":{"address":{"name":"_yql_delete_flag","id":4294967043}},"o":"4294967043","t":"AssembleOriginalData"},"w":33,"id":28},"26":{"p":{"i":"4294967042","p":{"address":{"name":"_yql_write_id","id":4294967042}},"o":"4294967042","t":"AssembleOriginalData"},"w":33,"id":26}}}; 2026-01-07T17:36:14.451392Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1767807014615:max}. CS min read snapshot: {1767807074615:max}. now: 2026-01-07T17:36:14.451318Z; 2026-01-07T17:36:14.463097Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767807014615:max} readable: {1767807374615:max} at tablet 9437184 2026-01-07T17:36:14.474921Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:36:14.475123Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2026-01-07T17:36:14.475222Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2026-01-07T17:36:14.475869Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2026-01-07T17:36:14.477581Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807014615:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1767807014615:max}. CS min read snapshot: {1767807074615:max}. now: 2026-01-07T17:36:14.477505Z; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:78:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:81:2057] recipient: [37:80:2112] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:83:2057] recipient: [37:80:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:82:2113] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:198:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:79:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:82:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:84:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:83:2113] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:199:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:82:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:85:2057] recipient: [39:84:2115] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:87:2057] recipient: [39:84:2115] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:86:2116] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:202:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2115] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:87:2057] recipient: [40:84:2115] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2116] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:83:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:86:2057] recipient: [41:85:2115] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:88:2057] recipient: [41:85:2115] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:87:2116] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:203:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:86:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:89:2057] recipient: [42:88:2118] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:91:2057] recipient: [42:88:2118] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:90:2119] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:206:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:52:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:52:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:86:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:89:2057] recipient: [43:88:2118] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:91:2057] recipient: [43:88:2118] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:90:2119] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:206:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:87:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:90:2057] recipient: [44:89:2118] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:92:2057] recipient: [44:89:2118] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:91:2119] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:207:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2026-01-07T17:36:16.929502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:16.959329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:16.959569Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:16.967480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:16.967753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:16.967994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:16.968114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:16.968216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:16.968311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:16.968437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:16.968586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:16.968706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:16.968814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:16.969280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:16.969451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:16.969566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:16.994696Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:16.995332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:16.995392Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:16.995573Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:16.995722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:16.995778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:16.995814Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:16.995892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:16.995943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:16.995980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:16.996024Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:16.996167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:16.996240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:16.996290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:16.996319Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:16.996425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:16.996491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:16.996569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:16.996599Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:16.996658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:16.996718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:16.996758Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:16.996813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:16.996851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:16.996895Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:16.997138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:16.997253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:16.997286Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:16.997409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:16.997451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:16.997480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:16.997527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:16.997567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:16.997602Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:16.997665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:16.997702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:16.997745Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:16.997919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:16.997999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-07T17:36:21.445923Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> Normalizers::RemoveWriteIdNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ChunksV0MetaNormalizer [GOOD] Test command err: 2026-01-07T17:36:02.360853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:02.383660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:02.383867Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:02.389938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:02.390190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:02.390360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:02.390443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:02.390511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:02.390579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:02.390695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:02.390809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:02.390879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:02.390940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.391012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:02.391102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:02.391184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:02.410766Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:02.411109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:02.411160Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:02.411306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.411446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:02.411515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:02.411553Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:02.411634Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:02.411695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:02.411736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:02.411759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:02.411881Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.411926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:02.411951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:02.411975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:02.412038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:02.412074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:02.412102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:02.412122Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:02.412164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:02.412201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:02.412230Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:02.412276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:02.412310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:02.412330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:02.412476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:02.412551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:02.412573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:02.412659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:02.412686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.412704Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.412734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:02.412760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:02.412786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:02.412814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:02.412836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:02.412857Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:02.413017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:02.413067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... RING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:36:21.799072Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-07T17:36:21.799132Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:21.799183Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:21.799886Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:21.800100Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:21.800146Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:36:21.800299Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-07T17:36:21.800361Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-07T17:36:21.800555Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:320:2321];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-07T17:36:21.800726Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:21.800871Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:21.801087Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:21.801277Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:21.801392Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:21.801510Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:21.801886Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:322:2322] finished for tablet 9437184 2026-01-07T17:36:21.802512Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:320:2321];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.056},{"events":["l_task_result"],"t":1.693},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.695}],"full":{"a":18213381,"name":"_full_task","f":18213381,"d_finished":0,"c":0,"l":19909198,"d":1695817},"events":[{"name":"bootstrap","f":18213635,"d_finished":1315,"c":1,"l":18214950,"d":1315},{"a":19908447,"name":"ack","f":18270142,"d_finished":820524,"c":421,"l":19908337,"d":821275},{"a":19908432,"name":"processing","f":18215174,"d_finished":1570439,"c":843,"l":19908341,"d":1571205},{"name":"ProduceResults","f":18214495,"d_finished":1126372,"c":1266,"l":19908755,"d":1126372},{"a":19908768,"name":"Finish","f":19908768,"d_finished":0,"c":0,"l":19909198,"d":430},{"name":"task_result","f":18215190,"d_finished":739058,"c":422,"l":19906424,"d":739058}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:21.802633Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:320:2321];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:21.803164Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:320:2321];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.056},{"events":["l_task_result"],"t":1.693},{"events":["l_ProduceResults","f_Finish"],"t":1.695},{"events":["l_ack","l_processing","l_Finish"],"t":1.696}],"full":{"a":18213381,"name":"_full_task","f":18213381,"d_finished":0,"c":0,"l":19909915,"d":1696534},"events":[{"name":"bootstrap","f":18213635,"d_finished":1315,"c":1,"l":18214950,"d":1315},{"a":19908447,"name":"ack","f":18270142,"d_finished":820524,"c":421,"l":19908337,"d":821992},{"a":19908432,"name":"processing","f":18215174,"d_finished":1570439,"c":843,"l":19908341,"d":1571922},{"name":"ProduceResults","f":18214495,"d_finished":1126372,"c":1266,"l":19908755,"d":1126372},{"a":19908768,"name":"Finish","f":19908768,"d_finished":0,"c":0,"l":19909915,"d":1147},{"name":"task_result","f":18215190,"d_finished":739058,"c":422,"l":19906424,"d":739058}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:21.803263Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:20.104717Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-07T17:36:21.803313Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:21.803485Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> Normalizers::CleanUnusedTablesNormalizer [GOOD] >> TPQTestSlow::TestOnDiskStoredSourceIds >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest >> BasicStatistics::DedicatedTimeIntervals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:137:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:136:2159] sender: [1:138:2058] recipient: [1:114:2145] 2026-01-07T17:33:26.148158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:33:26.148234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.148268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:33:26.148301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:33:26.148332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:33:26.148359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:33:26.148449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:33:26.148519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:33:26.149305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:33:26.149560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:33:26.271800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:33:26.271869Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:33:26.272671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:186:2058] recipient: [1:15:2062] 2026-01-07T17:33:26.288063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:33:26.288292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:33:26.288445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:33:26.295359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:33:26.295570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:33:26.296209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:33:26.296429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:33:26.298466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.298629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:33:26.299695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:33:26.299766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:33:26.299937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:33:26.299981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:33:26.300036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:33:26.300130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:33:26.451345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.452946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.453007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.453098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:33:26.453192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... in: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1345 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-07T17:36:18.684166Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2026-01-07T17:36:18.684321Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1345 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-07T17:36:18.684445Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1345 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2026-01-07T17:36:18.685140Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 842 RawX2: 420906797819 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-07T17:36:18.685184Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2026-01-07T17:36:18.685303Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 842 RawX2: 420906797819 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-07T17:36:18.685363Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:36:18.685451Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 842 RawX2: 420906797819 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-07T17:36:18.685513Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:36:18.685556Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:36:18.685600Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:36:18.685646Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:36:18.685682Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-07T17:36:18.688542Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:36:18.688734Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:36:18.689112Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:36:18.689175Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2026-01-07T17:36:18.689231Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-07T17:36:18.689279Z node 98 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2026-01-07T17:36:18.689352Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-07T17:36:18.689395Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 240 -> 240 2026-01-07T17:36:18.694889Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-07T17:36:18.694963Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2026-01-07T17:36:18.695075Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-07T17:36:18.695111Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:36:18.695160Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-07T17:36:18.695192Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:36:18.695232Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-07T17:36:18.695278Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-07T17:36:18.695320Z node 98 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-07T17:36:18.695357Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-07T17:36:18.695524Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:36:18.695564Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-07T17:36:18.699334Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-07T17:36:18.699395Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-07T17:36:18.699780Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-07T17:36:18.699873Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-07T17:36:18.699909Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [98:939:2899] TestWaitNotification: OK eventTxId 1003 2026-01-07T17:36:18.700353Z node 98 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:36:18.700581Z node 98 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 248us result status StatusSuccess 2026-01-07T17:36:18.701094Z node 98 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPQTestSlow::TestWriteVeryBigMessage >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveWriteIdNormalizer [GOOD] Test command err: 2026-01-07T17:36:16.992562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:17.028401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:17.028643Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:17.036405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2026-01-07T17:36:17.036682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:17.036953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:17.037144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:17.037266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:17.037380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:17.037492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:17.037622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:17.037733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:17.037835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:17.037963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:17.038122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:17.038255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:17.038359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:17.070478Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:17.070890Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2026-01-07T17:36:17.070952Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-07T17:36:17.071276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:17.071375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-07T17:36:17.071465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-07T17:36:17.071692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:17.071812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:17.071871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:17.071908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-07T17:36:17.072012Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:17.072075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:17.072121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:17.072154Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:17.072353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:17.072444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:17.072500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:17.072541Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:17.072671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:17.072736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:17.072783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:17.072846Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:17.072918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:17.072985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:17.073020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:17.073064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:17.073111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:17.073138Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:17.073398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:17.073444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:17.073473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:17.073671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:17.073726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:17.073761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:17.073837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:17.073893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:17.073927Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=n ... {{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:36:24.954472Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-07T17:36:24.954512Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:24.954548Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:24.955203Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:24.955351Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:24.955409Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:36:24.955545Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-07T17:36:24.955606Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-07T17:36:24.955805Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-07T17:36:24.955943Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:24.956096Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:24.956268Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:24.956422Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:24.956521Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:24.956613Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:24.957049Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:330:2330] finished for tablet 9437184 2026-01-07T17:36:24.957660Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:328:2329];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.059},{"events":["l_task_result"],"t":1.398},{"events":["l_ProduceResults","f_Finish"],"t":1.4},{"events":["l_ack","l_processing","l_Finish"],"t":1.401}],"full":{"a":7199943,"name":"_full_task","f":7199943,"d_finished":0,"c":0,"l":8601234,"d":1401291},"events":[{"name":"bootstrap","f":7200229,"d_finished":1576,"c":1,"l":7201805,"d":1576},{"a":8600514,"name":"ack","f":7259598,"d_finished":655843,"c":421,"l":8600425,"d":656563},{"a":8600502,"name":"processing","f":7201987,"d_finished":1275947,"c":843,"l":8600429,"d":1276679},{"name":"ProduceResults","f":7201340,"d_finished":920259,"c":1266,"l":8600756,"d":920259},{"a":8600763,"name":"Finish","f":8600763,"d_finished":0,"c":0,"l":8601234,"d":471},{"name":"task_result","f":7202007,"d_finished":608244,"c":422,"l":8598680,"d":608244}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:24.957756Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:24.958371Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:328:2329];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.059},{"events":["l_task_result"],"t":1.398},{"events":["l_ProduceResults","f_Finish"],"t":1.4},{"events":["l_ack","l_processing","l_Finish"],"t":1.402}],"full":{"a":7199943,"name":"_full_task","f":7199943,"d_finished":0,"c":0,"l":8601956,"d":1402013},"events":[{"name":"bootstrap","f":7200229,"d_finished":1576,"c":1,"l":7201805,"d":1576},{"a":8600514,"name":"ack","f":7259598,"d_finished":655843,"c":421,"l":8600425,"d":657285},{"a":8600502,"name":"processing","f":7201987,"d_finished":1275947,"c":843,"l":8600429,"d":1277401},{"name":"ProduceResults","f":7201340,"d_finished":920259,"c":1266,"l":8600756,"d":920259},{"a":8600763,"name":"Finish","f":8600763,"d_finished":0,"c":0,"l":8601956,"d":1193},{"name":"task_result","f":7202007,"d_finished":608244,"c":422,"l":8598680,"d":608244}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:24.958467Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:23.554298Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-07T17:36:24.958520Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:24.958719Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::DedicatedTimeIntervals [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:23.145941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:23.258693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:23.266470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:529:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:23.266597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:23.266714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:23.712461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:23.859304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:23.859408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:23.899480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:24.010618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:24.762566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:24.763706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:24.763762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:24.763804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:24.764270Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:24.829593Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:25.427058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:35:28.821638Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:28.828643Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-07T17:35:28.832696Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:28.864834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:28.864987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:28.894612Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:35:28.902348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:29.115343Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.116117Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.117680Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.118522Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.118836Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.118970Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.119166Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.119403Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.119534Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:29.251930Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database1/.metadata/script_executions 2026-01-07T17:35:29.307644Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:29.307772Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:29.321462Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:29.469977Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:29.519525Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:35:29.519637Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:35:29.556392Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:35:29.556678Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:35:29.556848Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:35:29.556895Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:35:29.556964Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:35:29.557014Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:35:29.557055Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:35:29.557093Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:35:29.557597Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:35:29.704988Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:29.705100Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2286:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:29.735108Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2316:2629] 2026-01-07T17:35:29.736196Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2316:2629], schemeshard id = 72075186224037897 2026-01-07T17:35:29.884898Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2430:2693] 2026-01-07T17:35:29.885457Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2026-01-07T17:35:29.888363Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. Describe result: PathErrorUnknown 2026-01-07T17:35:29.888427Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. Creating table 2026-01-07T17:35:29.888527Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2026-01-07T17:35:29.892433Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2449:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:35:29.897510Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:29.907089Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:35:29.907248Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. Subscribe on create table tx: 281474976720657 2026-01-07T17:35:29.942241Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:35:30.198025Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:35:30.994761Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:35:31.125824Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:35:31.125937Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2432:2695] Owner: [3:2431:2694]. Column diff is empty, finishing 2026-01-07T17:35:31.832222Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWake ... 7T17:36:12.678123Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6085:3437], StatRequests.size() = 1 2026-01-07T17:36:14.132248Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6125:3447]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:14.132585Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:36:14.132627Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6125:3447], StatRequests.size() = 1 2026-01-07T17:36:14.221487Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:36:14.222126Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 10 2026-01-07T17:36:14.222612Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-07T17:36:14.222685Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2026-01-07T17:36:14.322763Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:36:14.322864Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-07T17:36:14.323193Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:14.337461Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:36:15.734712Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6173:3461]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:15.735073Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:36:15.735123Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6173:3461], StatRequests.size() = 1 2026-01-07T17:36:16.546486Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-07T17:36:16.546563Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2026-01-07T17:36:16.546890Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:16.562225Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-07T17:36:16.981828Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:36:17.331381Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6214:3471]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:17.331651Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:36:17.331684Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6214:3471], StatRequests.size() = 1 2026-01-07T17:36:18.671968Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6257:3481]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:18.672411Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:36:18.672461Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6257:3481], StatRequests.size() = 1 2026-01-07T17:36:18.706106Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:36:18.706409Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 11 2026-01-07T17:36:18.707212Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:36:18.707588Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-07T17:36:18.765714Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:36:18.765779Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-07T17:36:18.765990Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:18.781279Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:36:19.902590Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6304:3497]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:19.903138Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:36:19.903192Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6304:3497], StatRequests.size() = 1 2026-01-07T17:36:19.903811Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6306:3395]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:19.907238Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:36:19.907340Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6316:3399] 2026-01-07T17:36:19.907395Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6316:3399] 2026-01-07T17:36:19.908805Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6316:3399], server id = [2:6323:3400], tablet id = 72075186224038895, status = OK 2026-01-07T17:36:19.908881Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:6323:3400] 2026-01-07T17:36:19.909255Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:6323:3400], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:36:19.909329Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:36:19.909613Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:36:19.909704Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6306:3395], StatRequests.size() = 1 2026-01-07T17:36:19.909791Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:36:20.003018Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224038895] EvFastPropagateCheck 2026-01-07T17:36:20.003106Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:36:20.058956Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:6316:3399], schemeshard count = 1 2026-01-07T17:36:20.524391Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224038895] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:36:20.524622Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-07T17:36:20.525287Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 1 2026-01-07T17:36:20.547442Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-07T17:36:20.547548Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2026-01-07T17:36:20.547907Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:20.562763Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-07T17:36:20.862382Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:36:22.541003Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:36:22.541256Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 12 2026-01-07T17:36:22.541969Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:36:22.542337Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-07T17:36:22.609319Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:36:22.609405Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-07T17:36:22.609726Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:22.624209Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:36:22.849031Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224038895] EvPropagateTimeout 2026-01-07T17:36:24.422065Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224038895] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:36:24.422283Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 2 2026-01-07T17:36:24.422989Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 2 2026-01-07T17:36:24.459378Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-07T17:36:24.459459Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2026-01-07T17:36:24.459837Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:36:24.480596Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-07T17:36:24.796761Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> SlowTopicAutopartitioning::CDC_Write >> Backup::ProposeBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanUnusedTablesNormalizer [GOOD] Test command err: 2026-01-07T17:36:04.219593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:04.244579Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:04.244804Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:04.252476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2026-01-07T17:36:04.252736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:04.252997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:04.253188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:04.253296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:04.253403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:04.253580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:04.253694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:04.253797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:04.253889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:04.254013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:04.254165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:04.254288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:04.254393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:04.283270Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:04.283642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2026-01-07T17:36:04.283713Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-07T17:36:04.284013Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2026-01-07T17:36:04.284150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:04.284239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-07T17:36:04.284302Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-07T17:36:04.284440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:04.284530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:04.284575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:04.284610Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-07T17:36:04.284720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:04.284780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:04.284819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:04.284847Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:04.285088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:04.285172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:04.285216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:04.285261Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:04.285380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:04.285442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:04.285494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:04.285537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:04.285590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:04.285627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:04.285653Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:04.285694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:04.285740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:04.285818Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:04.286017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:04.286056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:04.286105Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:04.286243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:04.286304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:04.286351Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:04.286426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:04.286465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksM ... y=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:36:25.245208Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-07T17:36:25.245247Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:25.245286Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:25.245896Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:25.246100Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:25.246150Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:36:25.246313Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-07T17:36:25.246379Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-07T17:36:25.246601Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-07T17:36:25.246745Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:25.246883Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:25.247054Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:25.247204Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:25.247302Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:25.247395Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:25.247744Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:330:2330] finished for tablet 9437184 2026-01-07T17:36:25.248336Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:328:2329];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.061},{"events":["l_task_result"],"t":1.146},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.148}],"full":{"a":20405593,"name":"_full_task","f":20405593,"d_finished":0,"c":0,"l":21554242,"d":1148649},"events":[{"name":"bootstrap","f":20405907,"d_finished":1750,"c":1,"l":20407657,"d":1750},{"a":21553603,"name":"ack","f":20466687,"d_finished":544705,"c":421,"l":21553507,"d":545344},{"a":21553591,"name":"processing","f":20407870,"d_finished":1014783,"c":843,"l":21553511,"d":1015434},{"name":"ProduceResults","f":20407111,"d_finished":765895,"c":1266,"l":21553833,"d":765895},{"a":21553844,"name":"Finish","f":21553844,"d_finished":0,"c":0,"l":21554242,"d":398},{"name":"task_result","f":20407890,"d_finished":457491,"c":422,"l":21551727,"d":457491}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:25.248433Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:25.249035Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:328:2329];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.061},{"events":["l_task_result"],"t":1.146},{"events":["l_ProduceResults","f_Finish"],"t":1.148},{"events":["l_ack","l_processing","l_Finish"],"t":1.149}],"full":{"a":20405593,"name":"_full_task","f":20405593,"d_finished":0,"c":0,"l":21554910,"d":1149317},"events":[{"name":"bootstrap","f":20405907,"d_finished":1750,"c":1,"l":20407657,"d":1750},{"a":21553603,"name":"ack","f":20466687,"d_finished":544705,"c":421,"l":21553507,"d":546012},{"a":21553591,"name":"processing","f":20407870,"d_finished":1014783,"c":843,"l":21553511,"d":1016102},{"name":"ProduceResults","f":20407111,"d_finished":765895,"c":1266,"l":21553833,"d":765895},{"a":21553844,"name":"Finish","f":21553844,"d_finished":0,"c":0,"l":21554910,"d":1066},{"name":"task_result","f":20407890,"d_finished":457491,"c":422,"l":21551727,"d":457491}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:25.249138Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:24.096550Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-07T17:36:25.249188Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:25.249373Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Secret::SimpleQueryService [GOOD] >> TColumnShardTestReadWrite::WriteExoticTypes >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> TColumnShardTestReadWrite::WriteReadNoCompression >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:06.610325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:06.731534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:06.750765Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:06.751416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:06.751485Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:07.037260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:07.037440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:07.135788Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807243697229 != 1767807243697233 2026-01-07T17:34:07.144977Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:07.190538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:07.281261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:07.822332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:07.823736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:07.823789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:07.823834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:07.824079Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:07.837717Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2026-01-07T17:34:19.663804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.663969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1027:2876], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.664057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.665144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1030:2879], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.665263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:19.671501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:19.695307Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1031:2880], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:34:19.727269Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1083:2913] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:19.995180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:21.149505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:22.672364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:23.745762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.585356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:25.146949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:26.835447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2026-01-07T17:34:27.445866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-07T17:34:31.518282Z node 1 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrc4zs0dykyvd14ghg1bte", SessionId: ydb://session/3?node_id=1&id=YWFlYWUwMGYtMTJlM2E3MTEtZjkwMjFhNWUtY2U5ZjgwMg==, Slow query, duration: 11.862785s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`", parameters: 0b REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2026-01-07T17:34:32.041959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:34:32.042025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-07T17:35:31.815638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715736:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.168983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:34.852252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:35:35.380019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 E0107 17:36:29.693076419 141578 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2026-01-07T17:36:29.692831333+00:00"} |86.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> Secret::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 192 112 28 48 32 24 16 24 56 |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099 ... 57] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:142:2057] recipient: [56:141:2158] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:144:2057] recipient: [56:141:2158] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:143:2159] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:259:2057] recipient: [56:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] !Reboot 72057594037927937 (actor [57:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:140:2057] recipient: [57:39:2086] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:143:2057] recipient: [57:142:2158] Leader for TabletID 72057594037927937 is [57:144:2159] sender: [57:145:2057] recipient: [57:142:2158] !Reboot 72057594037927937 (actor [57:58:2099]) rebooted! !Reboot 72057594037927937 (actor [57:58:2099]) tablet resolver refreshed! new actor is[57:144:2159] Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:59:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:76:2057] recipient: [58:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:52:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:52:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:81:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:84:2057] recipient: [63:83:2114] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:86:2057] recipient: [63:83:2114] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:85:2115] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:201:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:81:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:84:2057] recipient: [64:83:2114] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:86:2057] recipient: [64:83:2114] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:85:2115] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:201:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:82:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:85:2057] recipient: [65:84:2114] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:87:2057] recipient: [65:84:2114] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:86:2115] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:202:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:85:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:88:2057] recipient: [66:87:2117] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:90:2057] recipient: [66:87:2117] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:89:2118] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:205:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:52:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:52:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:205:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:86:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:89:2057] recipient: [68:88:2117] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:91:2057] recipient: [68:88:2117] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:90:2118] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:206:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:03.595055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:03.816844Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:03.842956Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:03.843987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:03.844053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:04.430276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:04.430440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:04.540564Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807240891050 != 1767807240891054 2026-01-07T17:34:04.553803Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:04.622301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:04.758185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:05.528754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:05.530107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:05.530153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:05.530195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:05.530373Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:05.544062Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2026-01-07T17:34:18.272785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1013:2867], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.273023Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.273621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1040:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.273706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.309841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:18.619410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1132:2947], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.619525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.620064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1136:2951], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.620158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.620234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2954], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:18.625927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:18.752705Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1141:2956], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:34:19.751375Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1233:3021] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:21.127954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:22.528566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:23.395627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.144336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:24.627840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:26.820740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2026-01-07T17:34:27.431011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2026-01-07T17:34:32.090222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:34:32.090299Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-07T17:35:32.074953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715744:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.672427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715753:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:35.840612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715766:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:35:36.431374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715769:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |86.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> TColumnShardTestReadWrite::WriteReadModifications |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2026-01-07T17:36:30.006247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:30.042287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:30.042523Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:30.050300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:30.050538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:30.050798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:30.050953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:30.051073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:30.051181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:30.051303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:30.051429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:30.051562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:30.051701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.051817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:30.051923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:30.052028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:30.089731Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:30.090192Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:30.090268Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:30.090473Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.090645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:30.090720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:30.090759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:30.090868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:30.090930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:30.090968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:30.090996Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:30.091176Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.091248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:30.091292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:30.091326Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:30.091413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:30.091464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:30.091533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:30.091565Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:30.091622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:30.091657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:30.091690Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:30.091726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:30.091766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:30.091791Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:30.092013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:30.092058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:30.092091Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:30.092193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:30.092234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.092261Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.092334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:30.092378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:30.092419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:30.092462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:30.092499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:30.092531Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:30.092661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:30.092701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... , mediator 0 2026-01-07T17:36:32.798900Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] execute at tablet 9437184 2026-01-07T17:36:32.798959Z node 2 :TX_COLUMNSHARD ERROR: ctor_logger.h:56: TxPlanStep[5] Ignore old txIds [112] for step 1767807393044 last planned step 1767807393044 at tablet 9437184 2026-01-07T17:36:32.812976Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tx_id=0;lock_id=222;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-07T17:36:32.813053Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tx_id=0;lock_id=222;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-07T17:36:32.813182Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] complete at tablet 9437184 2026-01-07T17:36:32.813577Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767807393044:max} readable: {1767807393044:max} at tablet 9437184 2026-01-07T17:36:32.813721Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:36:32.813985Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807393044:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2026-01-07T17:36:32.814076Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807393044:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2026-01-07T17:36:32.814899Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807393044:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2026-01-07T17:36:32.816797Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807393044:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:36:32.824651Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767807393044:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[2:184:2196];trace_detailed=; 2026-01-07T17:36:32.825484Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1,2;column_names=field,key;);; 2026-01-07T17:36:32.825897Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2026-01-07T17:36:32.826272Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:32.826433Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:32.826669Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:32.826838Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:32.827002Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:32.827212Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:184:2196] finished for tablet 9437184 2026-01-07T17:36:32.827739Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:183:2195];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":3448907,"name":"_full_task","f":3448907,"d_finished":0,"c":0,"l":3451643,"d":2736},"events":[{"name":"bootstrap","f":3449165,"d_finished":1688,"c":1,"l":3450853,"d":1688},{"a":3451010,"name":"ack","f":3451010,"d_finished":0,"c":0,"l":3451643,"d":633},{"a":3450986,"name":"processing","f":3450986,"d_finished":0,"c":0,"l":3451643,"d":657},{"name":"ProduceResults","f":3450457,"d_finished":720,"c":2,"l":3451405,"d":720},{"a":3451410,"name":"Finish","f":3451410,"d_finished":0,"c":0,"l":3451643,"d":233}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:32.827824Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:183:2195];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:32.828256Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:183:2195];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3448907,"name":"_full_task","f":3448907,"d_finished":0,"c":0,"l":3452244,"d":3337},"events":[{"name":"bootstrap","f":3449165,"d_finished":1688,"c":1,"l":3450853,"d":1688},{"a":3451010,"name":"ack","f":3451010,"d_finished":0,"c":0,"l":3452244,"d":1234},{"a":3450986,"name":"processing","f":3450986,"d_finished":0,"c":0,"l":3452244,"d":1258},{"name":"ProduceResults","f":3450457,"d_finished":720,"c":2,"l":3451405,"d":720},{"a":3451410,"name":"Finish","f":3451410,"d_finished":0,"c":0,"l":3452244,"d":834}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:32.828367Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:32.816762Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:36:32.828421Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:32.828574Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-07T17:36:31.253304Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-07T17:36:31.343675Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:31.343781Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:31.343871Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:31.343974Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:181:2057] recipient: [1:14:2061] 2026-01-07T17:36:31.369567Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-07T17:36:31.369719Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:36:31.397853Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-07T17:36:31.398149Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:31.399705Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-07T17:36:31.399919Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:36:31.400045Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:36:31.400689Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:36:31.402900Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:187:2142] 2026-01-07T17:36:31.405825Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:36:31.405911Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-07T17:36:31.406028Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:187:2142] 2026-01-07T17:36:31.406092Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:36:31.407758Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:36:31.409286Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:36:31.409353Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:36:31.409401Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:36:31.409495Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:36:31.409575Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:31.409660Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:36:31.409752Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-07T17:36:31.409805Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:36:31.409850Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:36:31.409914Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:36:31.409973Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:36:31.410345Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:36:31.410414Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-07T17:36:31.410499Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:36:31.410721Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:36:31.410995Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:188:2142] 2026-01-07T17:36:31.413370Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:36:31.413427Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-07T17:36:31.413465Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:188:2142] 2026-01-07T17:36:31.413512Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:36:31.414728Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:36:31.416072Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-07T17:36:31.416122Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:36:31.416160Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:36:31.416205Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:36:31.416242Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:31.416280Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:36:31.416346Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-07T17:36:31.416395Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-07T17:36:31.416428Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:36:31.416458Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:36:31.416506Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:36:31.416709Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:36:31.416746Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-07T17:36:31.416811Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:36:31.417143Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:36:31.417379Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:36:31.417586Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:36:31.417713Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937 ... ot data from cache. Partition 0 offset 6 partno 0 count 1 parts_count 10 source 1 size 5243650 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-07T17:36:36.018212Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 17. All 2 blobs are from cache. 2026-01-07T17:36:36.018282Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' 2026-01-07T17:36:36.018322Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 6 partno 0 count 1 parts 10 suffix '0' 2026-01-07T17:36:36.018414Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-07T17:36:36.018535Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018615Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018651Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018679Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018709Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 1 size 635356 from pos 0 cbcount 2 2026-01-07T17:36:36.018738Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018767Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018795Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018822Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018850Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018877Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018905Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018933Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018964Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.018992Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 1 size 123358 from pos 0 cbcount 1 2026-01-07T17:36:36.019056Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019087Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019117Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019143Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019169Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019196Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019222Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019252Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019276Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019303Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:36.019329Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 1 size 123358 from pos 0 cbcount 1 2026-01-07T17:36:36.019450Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-07T17:36:36.019482Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-07T17:36:36.019518Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 4:6 isTruncatedBlob 0 2026-01-07T17:36:36.025073Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 4 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 1 2026-01-07T17:36:36.044572Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 5 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:36:36.045667Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-07T17:36:36.046761Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00000_0000000001_00016 2026-01-07T17:36:36.046865Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000001_00006_0000000002_00014 2026-01-07T17:36:36.046943Z node 3 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-07T17:36:36.047175Z node 3 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-07T17:36:36.047238Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:36:36.047359Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 283 2026-01-07T17:36:36.047448Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 6 count 2 size 271 2026-01-07T17:36:36.047484Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00016(+) to d0000000000_00000000000000000000_00000_0000000001_00016(+) 2026-01-07T17:36:36.047514Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000001_00006_0000000002_00014(+) to d0000000000_00000000000000000001_00006_0000000002_00014(+) 2026-01-07T17:36:36.056730Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 0 count 1 actorID [3:138:2142] 2026-01-07T17:36:36.056815Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 1 count 2 actorID [3:138:2142] 2026-01-07T17:36:36.056856Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 1 size 3072032 actorID [3:138:2142] is actual 1 2026-01-07T17:36:36.056948Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 1 size 283 actorID [3:138:2142] 2026-01-07T17:36:36.056987Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 4 count 2 size 7415109 actorID [3:138:2142] is actual 1 2026-01-07T17:36:36.057044Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 2 size 271 actorID [3:138:2142] 2026-01-07T17:36:36.057145Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 16 suffix '0' size 283 2026-01-07T17:36:36.057190Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 1 partno 6 count 2 parts 14 suffix '0' size 271 2026-01-07T17:36:36.057232Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 3072032 2026-01-07T17:36:36.064093Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 7415109 2026-01-07T17:36:36.071681Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 283 2026-01-07T17:36:36.071776Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 271 2026-01-07T17:36:36.072169Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:36:36.072202Z node 3 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-07T17:36:36.072231Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response Write 3 done Got compacter offset = -1 2026-01-07T17:36:36.076100Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:313:2300], now have 1 active actors on pipe 2026-01-07T17:36:36.076156Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:36:36.076184Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:36:36.076296Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 6 for user __ydb_compaction_consumer 2026-01-07T17:36:36.076584Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:315:2302], now have 1 active actors on pipe Got start offset = 3 |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> Normalizers::InsertedPortionsCleanerNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-07T17:36:27.061727Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-07T17:36:27.150854Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:27.150963Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:27.151059Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:27.151149Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:181:2057] recipient: [1:14:2061] 2026-01-07T17:36:27.187124Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-07T17:36:27.187294Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:36:27.217086Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-07T17:36:27.217271Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:27.218386Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-07T17:36:27.218591Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:36:27.218683Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:36:27.219261Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:36:27.219732Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:187:2142] 2026-01-07T17:36:27.222564Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:36:27.222640Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-07T17:36:27.222685Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:187:2142] 2026-01-07T17:36:27.222753Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:36:27.224285Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:36:27.225773Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:36:27.225834Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:36:27.226000Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:36:27.226075Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:36:27.226149Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:27.226219Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:36:27.226304Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-07T17:36:27.226346Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:36:27.226383Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:36:27.226416Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:36:27.226459Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:36:27.226694Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:36:27.226734Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-07T17:36:27.226814Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:36:27.227021Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:36:27.227272Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:188:2142] 2026-01-07T17:36:27.228376Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:36:27.228406Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-07T17:36:27.228428Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:188:2142] 2026-01-07T17:36:27.228454Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:36:27.229217Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:36:27.230214Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-07T17:36:27.230255Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:36:27.230282Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:36:27.230319Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:36:27.230352Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:27.230380Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:36:27.230444Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-07T17:36:27.230495Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-07T17:36:27.230527Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:36:27.230551Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:36:27.230610Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:36:27.230779Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:36:27.230813Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-07T17:36:27.230870Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:36:27.231104Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:36:27.231321Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:36:27.231549Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:36:27.231731Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937 ... UE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2026-01-07T17:36:37.676395Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.676431Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.676460Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.676489Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.676518Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.676549Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.676579Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.676608Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.676640Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2026-01-07T17:36:37.676811Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-07T17:36:37.676892Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-07T17:36:37.676945Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:0 isTruncatedBlob 1 2026-01-07T17:36:37.704475Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:0 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-07T17:36:37.704595Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 3:16 2026-01-07T17:36:37.704962Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 28 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 3 partno 16 count 4294967295 size 4294967295 endOffset 4 max time lag 0ms effective offset 3 2026-01-07T17:36:37.705363Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 28 added 2 blobs, size 12781161 count 1 last offset 3, current partition end offset: 4 2026-01-07T17:36:37.705404Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 28. Send blob request. 2026-01-07T17:36:37.705503Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 16 count 0 parts_count 16 source 0 size 8191635 accessed 1 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-07T17:36:37.705547Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 32 count 1 parts_count 8 source 1 size 4589526 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-07T17:36:37.705593Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 28. All 2 blobs are from cache. 2026-01-07T17:36:37.705679Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 16 count 0 parts 16 suffix '0' 2026-01-07T17:36:37.705723Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 32 count 1 parts 8 suffix '0' 2026-01-07T17:36:37.705797Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-07T17:36:37.705943Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2026-01-07T17:36:37.706085Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.706117Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.706148Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.706179Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.706213Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.706243Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.706273Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.706301Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-07T17:36:37.706329Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2026-01-07T17:36:37.706448Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-07T17:36:37.706484Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-07T17:36:37.706520Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:16 isTruncatedBlob 1 2026-01-07T17:36:37.727273Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:16 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 1 2026-01-07T17:36:37.759272Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00032_0000000001_00015 2026-01-07T17:36:37.759430Z node 3 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-07T17:36:37.759699Z node 3 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-07T17:36:37.759759Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:36:37.759876Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 0 size 271 2026-01-07T17:36:37.759966Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 32 count 1 size 187 2026-01-07T17:36:37.760051Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 0 size 8191590 2026-01-07T17:36:37.760087Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000002_00016_0000000000_00016(+) to d0000000000_00000000000000000002_00016_0000000000_00016(+) 2026-01-07T17:36:37.760115Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00032_0000000001_00015(+) to d0000000000_00000000000000000000_00032_0000000001_00015(+) 2026-01-07T17:36:37.792248Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 0 size 271 actorID [3:138:2142] 2026-01-07T17:36:37.792324Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 2 count 1 size 4589526 actorID [3:138:2142] is actual 1 2026-01-07T17:36:37.792410Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 187 actorID [3:138:2142] 2026-01-07T17:36:37.792446Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 0 size 8191635 actorID [3:138:2142] is actual 1 2026-01-07T17:36:37.792519Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 0 size 8191590 actorID [3:138:2142] 2026-01-07T17:36:37.792663Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 4589526 2026-01-07T17:36:37.794371Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191635 2026-01-07T17:36:37.804135Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 0 parts 16 suffix '0' size 271 2026-01-07T17:36:37.804237Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 187 2026-01-07T17:36:37.804283Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191590 2026-01-07T17:36:37.804702Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:36:37.804749Z node 3 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-07T17:36:37.804789Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2026-01-07T17:36:37.827312Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:401:2377], now have 1 active actors on pipe 2026-01-07T17:36:37.827416Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:36:37.827460Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:36:37.827570Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 3 for user __ydb_compaction_consumer 2026-01-07T17:36:37.827895Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:403:2379], now have 1 active actors on pipe Got start offset = 2 |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream >> TColumnShardTestReadWrite::Write >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2026-01-07T17:36:35.594016Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:35.633767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:35.634058Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:35.642763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:35.643035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:35.643275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:35.643398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:35.643511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:35.643619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:35.643752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:35.643887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:35.644014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:35.644133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:35.644259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:35.644385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:35.644506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:35.673476Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:35.674159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:35.674271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:35.674474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:35.674635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:35.674712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:35.674759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:35.674903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:35.674982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:35.675030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:35.675065Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:35.675267Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:35.675346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:35.675396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:35.675429Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:35.675530Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:35.675595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:35.675663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:35.675721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:35.675777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:35.675816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:35.675848Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:35.675911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:35.675960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:35.675991Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:35.676234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:35.676355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:35.676388Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:35.676518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:35.676567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:35.676599Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:35.676644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:35.676689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:35.676731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:35.676804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:35.676846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:35.676879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:35.677152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:35.677228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2026-01-07T17:36:37.261355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-07T17:36:37.261405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=2;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=2;prepared=1; 2026-01-07T17:36:37.261574Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=3;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=0;SRCS:[{1,13},{2,14},{3,15},];}};]};SF:0;PR:0;); 2026-01-07T17:36:37.261621Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=0; 2026-01-07T17:36:37.261656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:37.261698Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:37.261787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2026-01-07T17:36:37.261823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-07T17:36:37.261882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=1;prepared=1; 2026-01-07T17:36:37.261916Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:30;event=finish_source;source_idx=1; 2026-01-07T17:36:37.261954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:30;event=finish_source;source_idx=2; 2026-01-07T17:36:37.262010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:30;event=finish_source;source_idx=3; 2026-01-07T17:36:37.262246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:37.262475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:37.262783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:37.263011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:37.263227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:37.263759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:458:2470] finished for tablet 9437184 2026-01-07T17:36:37.264376Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:454:2466];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.021},{"events":["f_ack","l_ProduceResults","f_Finish"],"t":0.022},{"events":["l_ack","l_processing","l_Finish"],"t":0.023}],"full":{"a":2187859,"name":"_full_task","f":2187859,"d_finished":0,"c":0,"l":2211083,"d":23224},"events":[{"name":"bootstrap","f":2188215,"d_finished":1667,"c":1,"l":2189882,"d":1667},{"a":2209981,"name":"ack","f":2209981,"d_finished":0,"c":0,"l":2211083,"d":1102},{"a":2209955,"name":"processing","f":2190126,"d_finished":10586,"c":5,"l":2209792,"d":11714},{"name":"ProduceResults","f":2189305,"d_finished":2336,"c":7,"l":2210532,"d":2336},{"a":2210540,"name":"Finish","f":2210540,"d_finished":0,"c":0,"l":2211083,"d":543},{"name":"task_result","f":2190148,"d_finished":10482,"c":5,"l":2209788,"d":10482}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:37.264475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:454:2466];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:37.265032Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:454:2466];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.021},{"events":["f_ack","l_ProduceResults","f_Finish"],"t":0.022},{"events":["l_ack","l_processing","l_Finish"],"t":0.023}],"full":{"a":2187859,"name":"_full_task","f":2187859,"d_finished":0,"c":0,"l":2211782,"d":23923},"events":[{"name":"bootstrap","f":2188215,"d_finished":1667,"c":1,"l":2189882,"d":1667},{"a":2209981,"name":"ack","f":2209981,"d_finished":0,"c":0,"l":2211782,"d":1801},{"a":2209955,"name":"processing","f":2190126,"d_finished":10586,"c":5,"l":2209792,"d":12413},{"name":"ProduceResults","f":2189305,"d_finished":2336,"c":7,"l":2210532,"d":2336},{"a":2210540,"name":"Finish","f":2210540,"d_finished":0,"c":0,"l":2211782,"d":1242},{"name":"task_result","f":2190148,"d_finished":10482,"c":5,"l":2209788,"d":10482}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:37.265154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:37.237935Z;index_granules=0;index_portions=4;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9344;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9344;selected_rows=0; 2026-01-07T17:36:37.265220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:37.265449Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2026-01-07T17:36:30.183135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:30.219068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:30.219302Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:30.228755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:30.229033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:30.229235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:30.229345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:30.229441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:30.229593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:30.229700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:30.229815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:30.229937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:30.230037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.230163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:30.230279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:30.230394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:30.268161Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:30.268909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:30.268984Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:30.269246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.269661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:30.269771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:30.269828Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:30.269960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:30.270036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:30.270084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:30.270118Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:30.270304Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.270386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:30.270439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:30.270468Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:30.270564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:30.270638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:30.270695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:30.270730Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:30.270786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:30.270824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:30.270853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:30.270904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:30.270945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:30.270973Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:30.271188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:30.271291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:30.271331Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:30.271462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:30.271505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.271560Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.271651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:30.271722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:30.271775Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:30.271838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:30.271880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:30.271916Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:30.272065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:30.272144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-07T17:36:36.762473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:36:36.762738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:36:36.762907Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:36.763027Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:36.763181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:36.763366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:36.763509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:36.763697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:36.764165Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2026-01-07T17:36:36.764859Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":7099556,"name":"_full_task","f":7099556,"d_finished":0,"c":0,"l":7113123,"d":13567},"events":[{"name":"bootstrap","f":7099859,"d_finished":1557,"c":1,"l":7101416,"d":1557},{"a":7112217,"name":"ack","f":7110812,"d_finished":1264,"c":1,"l":7112076,"d":2170},{"a":7112206,"name":"processing","f":7101669,"d_finished":4018,"c":3,"l":7112079,"d":4935},{"name":"ProduceResults","f":7100831,"d_finished":2479,"c":6,"l":7112603,"d":2479},{"a":7112610,"name":"Finish","f":7112610,"d_finished":0,"c":0,"l":7113123,"d":513},{"name":"task_result","f":7101691,"d_finished":2694,"c":2,"l":7110616,"d":2694}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:36.764991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:36.765576Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":7099556,"name":"_full_task","f":7099556,"d_finished":0,"c":0,"l":7113930,"d":14374},"events":[{"name":"bootstrap","f":7099859,"d_finished":1557,"c":1,"l":7101416,"d":1557},{"a":7112217,"name":"ack","f":7110812,"d_finished":1264,"c":1,"l":7112076,"d":2977},{"a":7112206,"name":"processing","f":7101669,"d_finished":4018,"c":3,"l":7112079,"d":5742},{"name":"ProduceResults","f":7100831,"d_finished":2479,"c":6,"l":7112603,"d":2479},{"a":7112610,"name":"Finish","f":7112610,"d_finished":0,"c":0,"l":7113930,"d":1320},{"name":"task_result","f":7101691,"d_finished":2694,"c":2,"l":7110616,"d":2694}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:36.765673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:36.747579Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-07T17:36:36.765725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:36.766022Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::InsertedPortionsCleanerNormalizer [GOOD] >> Normalizers::EmptyTablesNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2026-01-07T17:36:30.066718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:30.099627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:30.099870Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:30.108375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:30.108708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:30.108962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:30.109087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:30.109193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:30.109293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:30.109429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:30.109593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:30.109716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:30.109831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.109986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:30.110218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:30.110337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:30.142872Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:30.143529Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:30.143596Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:30.143783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.143982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:30.144055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:30.144100Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:30.144219Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:30.144290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:30.144357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:30.144394Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:30.144589Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.144654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:30.144701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:30.144733Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:30.144829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:30.144893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:30.144992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:30.145035Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:30.145110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:30.145180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:30.145223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:30.145268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:30.145334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:30.145365Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:30.145595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:30.145718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:30.145761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:30.145911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:30.145956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.145987Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.146035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:30.146079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:30.146120Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:30.146178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:30.146220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:30.146281Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:30.146429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:30.146488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-07T17:36:37.165484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2026-01-07T17:36:02.157234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:02.186435Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:02.186668Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:02.193206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:02.193434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:02.193634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:02.193740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:02.193817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:02.193932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:02.194034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:02.194146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:02.194257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:02.194352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.194471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:02.194576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:02.194670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:02.224621Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:02.225383Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:02.225443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:02.225749Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.225938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:02.226017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:02.226061Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:02.226182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:02.226255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:02.226296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:02.226326Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:02.226525Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.226603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:02.226647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:02.226678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:02.226793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:02.226872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:02.226954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:02.226983Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:02.227031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:02.227069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:02.227097Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:02.227142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:02.227178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:02.227203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:02.227440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:02.227582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:02.227618Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:02.227802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:02.227873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.227918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.227992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:02.228040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:02.228078Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:02.228135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:02.228176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:02.228205Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:02.228335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:02.228416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-07T17:36:36.487509Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=871; 2026-01-07T17:36:36.487566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=58951; 2026-01-07T17:36:36.487607Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=59086; 2026-01-07T17:36:36.487669Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-07T17:36:36.488008Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=295; 2026-01-07T17:36:36.488049Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=59947; 2026-01-07T17:36:36.488214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=112; 2026-01-07T17:36:36.488357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=75; 2026-01-07T17:36:36.488736Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=328; 2026-01-07T17:36:36.489118Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=332; 2026-01-07T17:36:36.505547Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16354; 2026-01-07T17:36:36.521492Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15820; 2026-01-07T17:36:36.521600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-07T17:36:36.521650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-07T17:36:36.521689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:36:36.521758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2026-01-07T17:36:36.521813Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-07T17:36:36.521915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=66; 2026-01-07T17:36:36.521953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:36:36.522026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=41; 2026-01-07T17:36:36.522123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2026-01-07T17:36:36.522206Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=51; 2026-01-07T17:36:36.522242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=102633; 2026-01-07T17:36:36.522394Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:36.522498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:36:36.522549Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:36:36.522620Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:36.522694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:36.522890Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:36.522950Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:36.522987Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:36.523046Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:36.523111Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807064616;tx_id=18446744073709551615;;current_snapshot_ts=1767807363628; 2026-01-07T17:36:36.523150Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:36.523191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:36.523225Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:36.523314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:36.523505Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.021000s; 2026-01-07T17:36:36.528469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:36.529125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:36:36.529192Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:36.529299Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:36.529351Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:36.529441Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807064616;tx_id=18446744073709551615;;current_snapshot_ts=1767807363628; 2026-01-07T17:36:36.529500Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:36.529551Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:36.529596Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:36.529679Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:36:36.529733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:36.530852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.066000s; 2026-01-07T17:36:36.530904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> TKeyValueTest::TestRenameWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2026-01-07T17:36:01.817522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:01.843657Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:01.843825Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:01.849883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:01.850109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:01.850277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:01.850365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:01.850453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:01.850532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:01.850635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:01.850692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:01.850774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:01.850867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.850995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:01.851056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:01.851110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:01.872361Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:01.873046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:01.873132Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:01.873351Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.873521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:01.873596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:01.873642Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:01.873756Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:01.873837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:01.873887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:01.873920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:01.874128Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.874239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:01.874300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:01.874333Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:01.874423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:01.874497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:01.874563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:01.874594Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:01.874656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:01.874699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:01.874727Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:01.874770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:01.874809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:01.874836Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:01.875097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:01.875239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:01.875284Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:01.875436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:01.875494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.875528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.875592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:01.875644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:01.875684Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:01.875748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:01.875792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:01.875826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:01.876014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:01.876102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-07T17:36:37.619800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=948; 2026-01-07T17:36:37.619862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=76192; 2026-01-07T17:36:37.619930Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=76351; 2026-01-07T17:36:37.620010Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-07T17:36:37.620443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=378; 2026-01-07T17:36:37.620494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=77452; 2026-01-07T17:36:37.620699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=116; 2026-01-07T17:36:37.620835Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=83; 2026-01-07T17:36:37.621376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=464; 2026-01-07T17:36:37.621715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=282; 2026-01-07T17:36:37.636315Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14526; 2026-01-07T17:36:37.651585Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15147; 2026-01-07T17:36:37.651706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-07T17:36:37.651770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-07T17:36:37.651811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:36:37.651920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=69; 2026-01-07T17:36:37.651986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:36:37.652090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2026-01-07T17:36:37.652138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:36:37.652213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2026-01-07T17:36:37.652304Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=51; 2026-01-07T17:36:37.652415Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=62; 2026-01-07T17:36:37.652457Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=120141; 2026-01-07T17:36:37.652603Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:37.652712Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:36:37.652768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:36:37.652832Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:37.652878Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:37.653074Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:37.653137Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:37.653179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:37.653225Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:37.653287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807064272;tx_id=18446744073709551615;;current_snapshot_ts=1767807363296; 2026-01-07T17:36:37.653331Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:37.653374Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:37.653410Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:37.653501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:37.653690Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.150000s; 2026-01-07T17:36:37.659160Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:37.659708Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:36:37.659774Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:37.659863Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:37.659933Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:37.660012Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807064272;tx_id=18446744073709551615;;current_snapshot_ts=1767807363296; 2026-01-07T17:36:37.660074Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:37.660135Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:37.660180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:37.660275Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:36:37.660331Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:37.661445Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.197000s; 2026-01-07T17:36:37.661499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |86.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13 ... recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:52:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:52:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:82:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:85:2057] recipient: [63:84:2115] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:87:2057] recipient: [63:84:2115] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:86:2116] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:202:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:82:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:85:2057] recipient: [64:84:2115] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:87:2057] recipient: [64:84:2115] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:86:2116] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:202:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:83:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:86:2057] recipient: [65:85:2115] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:88:2057] recipient: [65:85:2115] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:87:2116] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:203:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:84:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:87:2057] recipient: [66:86:2116] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:89:2057] recipient: [66:86:2116] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:88:2117] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:108:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:52:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:52:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:109:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:88:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:91:2057] recipient: [68:90:2120] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:93:2057] recipient: [68:90:2120] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:92:2121] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:208:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] !Reboot 72057594037927937 (actor [69:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:88:2057] recipient: [69:39:2086] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:91:2057] recipient: [69:90:2120] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:93:2057] recipient: [69:90:2120] !Reboot 72057594037927937 (actor [69:58:2099]) rebooted! !Reboot 72057594037927937 (actor [69:58:2099]) tablet resolver refreshed! new actor is[69:92:2121] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:208:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:53:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:59:2057] recipient: [70:53:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:76:2057] recipient: [70:14:2061] !Reboot 72057594037927937 (actor [70:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:89:2057] recipient: [70:39:2086] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:92:2057] recipient: [70:91:2120] Leader for TabletID 72057594037927937 is [70:93:2121] sender: [70:94:2057] recipient: [70:91:2120] !Reboot 72057594037927937 (actor [70:58:2099]) rebooted! !Reboot 72057594037927937 (actor [70:58:2099]) tablet resolver refreshed! new actor is[70:93:2121] Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:53:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:59:2057] recipient: [71:53:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:76:2057] recipient: [71:14:2061] |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:52:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:82:2113] Leader for TabletID 72057594037927937 is [45:82:2113] sender: [45:198:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:78:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:81:2057] recipient: [46:80:2112] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:83:2057] recipient: [46:80:2112] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:82:2113] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:198:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:79:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:82:2057] recipient: [47:81:2112] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:84:2057] recipient: [47:81:2112] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:83:2113] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:199:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:82:2057] recipient: [48:39:2086] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:85:2057] recipient: [48:84:2115] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:87:2057] recipient: [48:84:2115] !Reboot 72057594037927937 (actor [48:58:2099]) rebooted! !Reboot 72057594037927937 (actor [48:58:2099]) tablet resolver refreshed! new actor is[48:86:2116] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:202:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:53:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:59:2057] recipient: [49:53:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:76:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:82:2057] recipient: [49:39:2086] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:85:2057] recipient: [49:84:2115] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:87:2057] recipient: [49:84:2115] !Reboot 72057594037927937 (actor [49:58:2099]) rebooted! !Reboot 72057594037927937 (actor [49:58:2099]) tablet resolver refreshed! new actor is[49:86:2116] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:202:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:53:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:59:2057] recipient: [50:53:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:76:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:83:2057] recipient: [50:39:2086] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:86:2057] recipient: [50:85:2115] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:88:2057] recipient: [50:85:2115] !Reboot 72057594037927937 (actor [50:58:2099]) rebooted! !Reboot 72057594037927937 (actor [50:58:2099]) tablet resolver refreshed! new actor is[50:87:2116] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:203:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:53:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:59:2057] recipient: [51:53:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:76:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:86:2057] recipient: [51:39:2086] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:89:2057] recipient: [51:88:2118] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:91:2057] recipient: [51:88:2118] !Reboot 72057594037927937 (actor [51:58:2099]) rebooted! !Reboot 72057594037927937 (actor [51:58:2099]) tablet resolver refreshed! new actor is[51:90:2119] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:206:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:53:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:59:2057] recipient: [52:53:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:76:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:86:2057] recipient: [52:39:2086] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:89:2057] recipient: [52:88:2118] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:91:2057] recipient: [52:88:2118] !Reboot 72057594037927937 (actor [52:58:2099]) rebooted! !Reboot 72057594037927937 (actor [52:58:2099]) tablet resolver refreshed! new actor is[52:90:2119] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:206:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:53:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:59:2057] recipient: [53:53:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:76:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:87:2057] recipient: [53:39:2086] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:90:2057] recipient: [53:89:2118] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:92:2057] recipient: [53:89:2118] !Reboot 72057594037927937 (actor [53:58:2099]) rebooted! !Reboot 72057594037927937 (actor [53:58:2099]) tablet resolver refreshed! new actor is[53:91:2119] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:207:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:53:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:59:2057] recipient: [54:53:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:76:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:90:2057] recipient: [54:39:2086] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:93:2057] recipient: [54:92:2121] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:95:2057] recipient: [54:92:2121] !Reboot 72057594037927937 (actor [54:58:2099]) rebooted! !Reboot 72057594037927937 (actor [54:58:2099]) tablet resolver refreshed! new actor is[54:94:2122] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:210:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:52:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:59:2057] recipient: [55:52:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:76:2057] recipient: [55:14:2061] !Reboot 72057594037927937 (actor [55:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:90:2057] recipient: [55:39:2086] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:93:2057] recipient: [55:92:2121] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:95:2057] recipient: [55:92:2121] !Reboot 72057594037927937 (actor [55:58:2099]) rebooted! !Reboot 72057594037927937 (actor [55:58:2099]) tablet resolver refreshed! new actor is[55:94:2122] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:210:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:53:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:59:2057] recipient: [56:53:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:76:2057] recipient: [56:14:2061] !Reboot 72057594037927937 (actor [56:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:91:2057] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:94:2057] recipient: [56:93:2121] Leader for TabletID 72057594037927937 is [56:95:2122] sender: [56:96:2057] recipient: [56:93:2121] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2026-01-07T17:36:04.982336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:05.014249Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:05.014474Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:05.021814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:05.022056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:05.022285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:05.022407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:05.022513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:05.022637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:05.022751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:05.022861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:05.022955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:05.023070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:05.023224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:05.023324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:05.023426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:05.052857Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:05.053495Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:05.053558Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:05.053720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:05.053879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:05.053951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:05.054002Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:05.054124Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:05.054192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:05.054232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:05.054258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:05.054426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:05.054481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:05.054529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:05.054563Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:05.054656Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:05.054717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:05.054778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:05.054807Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:05.054854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:05.054887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:05.054918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:05.054960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:05.054994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:05.055024Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:05.055226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:05.055353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:05.055398Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:05.055525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:05.055577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:05.055619Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:05.055670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:05.055712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:05.055750Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:05.055798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:05.055831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:05.055861Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:05.055984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:05.056059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-07T17:36:39.075568Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=871; 2026-01-07T17:36:39.075623Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=76103; 2026-01-07T17:36:39.075668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=76211; 2026-01-07T17:36:39.075732Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-07T17:36:39.076104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=324; 2026-01-07T17:36:39.076152Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=77099; 2026-01-07T17:36:39.076306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=103; 2026-01-07T17:36:39.076424Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=71; 2026-01-07T17:36:39.076863Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=395; 2026-01-07T17:36:39.077310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=392; 2026-01-07T17:36:39.097259Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=19864; 2026-01-07T17:36:39.114993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17605; 2026-01-07T17:36:39.115108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-07T17:36:39.115163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-07T17:36:39.115205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-07T17:36:39.115280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2026-01-07T17:36:39.115323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:36:39.115405Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=50; 2026-01-07T17:36:39.115450Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:36:39.115510Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-07T17:36:39.115596Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=50; 2026-01-07T17:36:39.115675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=46; 2026-01-07T17:36:39.115712Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=125271; 2026-01-07T17:36:39.115836Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:39.115942Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:36:39.115995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:36:39.116055Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:39.116094Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:39.116271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:39.116331Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:39.116366Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:39.116407Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:39.116466Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807067393;tx_id=18446744073709551615;;current_snapshot_ts=1767807366458; 2026-01-07T17:36:39.116503Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:39.116546Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:39.116582Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:39.116660Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:39.116838Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.023000s; 2026-01-07T17:36:39.119984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:39.121365Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:36:39.121437Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:39.121510Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:39.121558Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:39.121620Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807067393;tx_id=18446744073709551615;;current_snapshot_ts=1767807366458; 2026-01-07T17:36:39.121666Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:39.121711Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:39.121751Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:39.121824Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:36:39.121906Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:39.122692Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.194000s; 2026-01-07T17:36:39.122741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] |86.6%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::WriteReadStandalone >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium |86.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2026-01-07T17:35:55.933159Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674776524904010:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:55.935055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:55.981013Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:35:55.983683Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592674777191064671:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:55.983774Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:55.995877Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:35:56.173153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:56.192275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:56.340321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:56.351683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:56.353131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:56.354557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:56.354620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:56.406779Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:35:56.406948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:56.415345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:56.628340Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:56.637418Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:56.669882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:56.828908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001771/r3tmp/yandexK0qRU1.tmp 2026-01-07T17:35:56.828950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001771/r3tmp/yandexK0qRU1.tmp 2026-01-07T17:35:56.829125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001771/r3tmp/yandexK0qRU1.tmp 2026-01-07T17:35:56.829248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:56.861792Z INFO: TTestServer started on Port 2457 GrpcPort 2842 2026-01-07T17:35:56.942508Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:56.993424Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:2842 2026-01-07T17:35:57.054123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:57.140535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:59.468211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674793704774645:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:59.468332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:59.468557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674793704774662:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:59.468576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674793704774661:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:59.468617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:35:59.473357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:35:59.495732Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674793704774665:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:35:59.680701Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674793704774752:3108] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:35:59.706746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:59.797810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:59.816509Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592674793704774762:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:35:59.818866Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmUwZTlhMi05ZjBiYzZhYi0xYjMzMWM3NS1jYjkxZDdkZA==, ActorId: [1:7592674793704774638:2334], ActorState: ExecuteState, LegacyTraceId: 01kecrf6f0a5panpshc5sb7g6j, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:35:59.820634Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:35:59.905795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592674797999742439:3392] 2026-01-07T17:36:00.932659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674776524904010:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:36:00.932748Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:36:00.981252Z node 2 :METADATA_PROVIDER ERR ... ers: [] Groups: [] } }] } 2026-01-07T17:36:42.701622Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7592674926105396866:2121], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:36:42.701820Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [7:7592674926105396866:2121], cacheItem# { Subscriber: { Subscriber: [7:7592674947580234772:3149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767807395809 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:36:42.701899Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [7:7592674926105396866:2121], cacheItem# { Subscriber: { Subscriber: [7:7592674947580234628:3058] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767807395550 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:36:42.702424Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7592674977645007481:4376], recipient# [7:7592674977645007480:2519], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:36:42.706136Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7592674926105396866:2121], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:36:42.706262Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [7:7592674926105396866:2121], cacheItem# { Subscriber: { Subscriber: [7:7592674947580234628:3058] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767807395550 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:36:42.706395Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7592674926105396866:2121], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2026-01-07T17:36:42.706495Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [7:7592674926105396866:2121], cacheItem# { Subscriber: { Subscriber: [7:7592674947580234772:3149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767807395809 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:36:42.706693Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7592674977645007489:4378], recipient# [7:7592674977645007487:2527], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2026-01-07T17:36:42.706855Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7592674977645007488:4377], recipient# [7:7592674977645007486:2528], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:36:42.721382Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:36:42.721441Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.721461Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:42.721484Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.721504Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:36:42.721578Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:36:42.721592Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.721603Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:42.721617Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.721629Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-07T17:36:42.721657Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:36:42.721669Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.721679Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:42.721694Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.721702Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-07T17:36:42.821456Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:36:42.821507Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.821527Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:42.821550Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.821570Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:36:42.821646Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:36:42.821658Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.821668Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:42.821699Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.821711Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-07T17:36:42.821741Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:36:42.821764Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.821774Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:36:42.821788Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:36:42.821810Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> TColumnShardTestReadWrite::Write [GOOD] |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2026-01-07T17:36:37.739322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:37.778407Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:37.778647Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:37.786263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertedPortions; 2026-01-07T17:36:37.786532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:37.786780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:37.786999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:37.787112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:37.787221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:37.787396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:37.787551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:37.787676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:37.787775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:37.787913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:37.788057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:37.788192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:37.788317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:37.820607Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:37.820875Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=CleanInsertedPortions; 2026-01-07T17:36:37.820919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-07T17:36:37.821202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2026-01-07T17:36:37.821320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2026-01-07T17:36:37.821383Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2026-01-07T17:36:37.821444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2026-01-07T17:36:37.821551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertedPortions;id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:37.821605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-07T17:36:37.821635Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-07T17:36:37.821753Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:37.821822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:37.821878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:37.821897Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-07T17:36:37.821962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:37.822005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:37.822038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:37.822074Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:37.822251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:37.822327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:37.822383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:37.822424Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:37.822548Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:37.822620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:37.822657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:37.822683Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:37.822733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:37.822773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:37.822799Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:37.822840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:37.822984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:37.823021Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:37.823234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:37.823297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:37.823337Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:37.823470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:37.823512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:37.823540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cp ... =common_data.cpp:29;PRECHARGE:column_enginesLoadingTime=19; 2026-01-07T17:36:43.118298Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:countersLoadingTime=92; 2026-01-07T17:36:43.118422Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:countersLoadingTime=65; 2026-01-07T17:36:43.118518Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:sharding_infoLoadingTime=43; 2026-01-07T17:36:43.118598Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:sharding_infoLoadingTime=36; 2026-01-07T17:36:43.118663Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-07T17:36:43.118716Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=9; 2026-01-07T17:36:43.118760Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=576; 2026-01-07T17:36:43.118895Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=64; 2026-01-07T17:36:43.119007Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=58; 2026-01-07T17:36:43.119115Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=71; 2026-01-07T17:36:43.119194Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=50; 2026-01-07T17:36:43.119346Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=120; 2026-01-07T17:36:43.130241Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10826; 2026-01-07T17:36:43.130372Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=17; 2026-01-07T17:36:43.130436Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=15; 2026-01-07T17:36:43.130474Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:36:43.130588Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=70; 2026-01-07T17:36:43.130630Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:36:43.130723Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2026-01-07T17:36:43.130774Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-07T17:36:43.130856Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2026-01-07T17:36:43.130991Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=81; 2026-01-07T17:36:43.131069Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=38; 2026-01-07T17:36:43.131112Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20256; 2026-01-07T17:36:43.131265Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:43.131400Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:36:43.131477Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:36:43.131562Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:43.131614Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:43.131675Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:43.131762Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:36:43.131880Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:43.131957Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:43.132011Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:43.132109Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:43.155247Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.137000s; 2026-01-07T17:36:43.155867Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:43.155986Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:36:43.156050Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:43.156152Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:36:43.156239Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:43.156312Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:43.156364Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:43.156479Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:43.156679Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.064000s; 2026-01-07T17:36:43.156731Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-07T17:36:43.284821Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 111 scanId: 0 version: {1767807401899:111} readable: {1767807401899:max} at tablet 9437184 2026-01-07T17:36:43.285038Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2026-01-07T17:36:43.285119Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1767807401899:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:36:43.285237Z node 2 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1767807401899:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |86.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |86.6%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |86.6%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |86.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TColumnShardTestReadWrite::WriteOverload+InStore >> TColumnShardTestReadWrite::ReadWithProgram >> TColumnShardTestReadWrite::WriteOverload-InStore >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck >> TColumnShardTestReadWrite::CompactionGC >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:32:36.486443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:36.650156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:32:36.671975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:32:36.672510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:32:36.672578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:32:37.105430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:37.105571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:37.177620Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807153764334 != 1767807153764338 2026-01-07T17:32:37.192086Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:37.238215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:37.349022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:37.550152Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:32:37.827668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:37.996692Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:32:37.996786Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:32:37.997543Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:865:2748] 2026-01-07T17:32:38.281300Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:865:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:32:38.281429Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:865:2748] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:32:38.282200Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:32:38.282302Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:865:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:32:38.282706Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:32:38.282887Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:865:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:32:38.283036Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:865:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:32:38.283353Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:32:38.285343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:38.286719Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:865:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:32:38.286827Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:865:2748] txid# 281474976715657 SEND to# [1:847:2730] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:32:38.352594Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:889:2769]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:32:38.354704Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:889:2769]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:32:38.355085Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:889:2769] 2026-01-07T17:32:38.355464Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:32:38.420070Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:889:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:32:38.420906Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:32:38.422260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:32:38.424253Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:32:38.424361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:32:38.424440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:32:38.424858Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:32:38.425055Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:32:38.425163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:905:2769] in generation 1 2026-01-07T17:32:38.436482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:32:38.506155Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:32:38.506410Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:32:38.506562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:907:2779] 2026-01-07T17:32:38.506609Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:32:38.506668Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:32:38.506759Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:32:38.507019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:889:2769], Recipient [1:889:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:32:38.507076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:32:38.507380Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:32:38.507478Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:32:38.507593Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:32:38.507650Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:32:38.507707Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:32:38.507744Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:32:38.507798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:32:38.507847Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:32:38.507905Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:32:38.508452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2773], Recipient [1:889:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:32:38.508515Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:32:38.508592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:888:2768], serverId# [1:896:2773], sessionId# [0:0:0] 2026-01-07T17:32:38.508682Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:896:2773] 2026-01-07T17:32:38.508857Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:32:38.509051Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:32:38.509369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:32:38.509455Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:32:38.509659Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transacti ... ed with status OK role: Leader [27:1173:2971] 2026-01-07T17:36:44.296993Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [27:1173:2971] 2026-01-07T17:36:44.297171Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [27:1173:2971] 2026-01-07T17:36:44.297334Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72075186224037888] shutdown pipe due to pending shutdown request [27:1173:2971] 2026-01-07T17:36:44.297450Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72075186224037888] notify reset [27:1173:2971] 2026-01-07T17:36:44.297828Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [27:1172:2970], Recipient [27:918:2782]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-07T17:36:44.298006Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2026-01-07T17:36:44.298162Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:36:44.298357Z node 27 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2026-01-07T17:36:44.298587Z node 27 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186224037888 tableId# 38 localTid# 1001, requested from [27:1172:2970], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2026-01-07T17:36:44.298773Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:36:44.298942Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:36:44.299370Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2026-01-07T17:36:44.299495Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:16} starting compaction 2026-01-07T17:36:44.300026Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} starting Scan{1 on 1001, Compact{72075186224037888.1.16, eph 1}} 2026-01-07T17:36:44.300219Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} started compaction 1 2026-01-07T17:36:44.300340Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 3825467961049470949 2026-01-07T17:36:44.304754Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 16, product {tx status + 1 parts epoch 2} done 2026-01-07T17:36:44.305252Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2026-01-07T17:36:44.305438Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2026-01-07T17:36:44.305541Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2026-01-07T17:36:44.306183Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.540524Z 2026-01-07T17:36:44.306405Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2026-01-07T17:36:44.306568Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:36:44.306718Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2026-01-07T17:36:44.306866Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [27:1172:2970]pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:36:44.307870Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2026-01-07T17:36:44.308033Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 11910707001711436910 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 11484734231071408091 ========= Starting an immediate read ========= 2026-01-07T17:36:44.594012Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037888] send [27:1107:2916] 2026-01-07T17:36:44.594160Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [27:1107:2916] 2026-01-07T17:36:44.594639Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [27:1197:2978], Recipient [27:918:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-07T17:36:44.594949Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-07T17:36:44.595127Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:36:44.595387Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:36:44.595543Z node 27 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:36:44.595682Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2502/18446744073709551615 2026-01-07T17:36:44.595865Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-07T17:36:44.596131Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:36:44.596248Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:36:44.596465Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:36:44.596647Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:36:44.596756Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-07T17:36:44.596870Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:36:44.596916Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:36:44.596968Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:36:44.596997Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:36:44.597245Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-07T17:36:44.597641Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is DelayComplete 2026-01-07T17:36:44.597719Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:36:44.597846Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:36:44.597930Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:36:44.597998Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:36:44.598027Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:36:44.598077Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-07T17:36:44.598189Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:36:44.598379Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:36:44.598544Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:36:44.729374Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-07T17:36:44.729580Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:36:44.729956Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{16, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-07T17:36:44.730126Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:36:44.731132Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} commited cookie 1 for step 17 2026-01-07T17:36:44.731544Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [27:536:2479] 2026-01-07T17:36:44.731666Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [27:536:2479] |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_volatile/unittest |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |86.7%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2026-01-07T17:36:38.601309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:38.630261Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:38.630502Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:38.637861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:38.638115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:38.638349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:38.638481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:38.638586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:38.638679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:38.638803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:38.638932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:38.639037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:38.639168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:38.639304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:38.639424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:38.639528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:38.675301Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:38.676140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:38.676201Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:38.676388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:38.676582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:38.676655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:38.676712Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:38.676830Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:38.676899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:38.676967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:38.677002Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:38.677191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:38.677254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:38.677296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:38.677327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:38.677416Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:38.677513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:38.677577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:38.677604Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:38.677661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:38.677712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:38.677741Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:38.677796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:38.677845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:38.677875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:38.678208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:38.678325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:38.678361Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:38.678487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:38.678535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:38.678562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:38.678608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:38.678651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:38.678687Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:38.678745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:38.678813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:38.678861Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:38.679003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:38.679062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-07T17:36:45.334273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |86.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn >> DataShardSnapshots::MvccSnapshotAndSplit |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |86.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2026-01-07T17:36:40.196347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:40.226789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:40.226981Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:40.234261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:40.234478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:40.234671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:40.234782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:40.234844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:40.234903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:40.235017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:40.235164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:40.235316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:40.235410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:40.235530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:40.235660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:40.235784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:40.262255Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:40.263669Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:40.263735Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:40.263920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:40.264091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:40.264168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:40.264222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:40.264330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:40.264400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:40.264445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:40.264476Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:40.264655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:40.264720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:40.264772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:40.264804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:40.264894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:40.264983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:40.265044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:40.265075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:40.265125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:40.265170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:40.265210Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:40.265267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:40.265304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:40.265337Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:40.265578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:40.265688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:40.265728Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:40.265912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:40.265958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:40.265989Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:40.266041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:40.266089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:40.266127Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:40.266180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:40.266219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:40.266251Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:40.266398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:40.266456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-07T17:36:46.391613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> DataShardSnapshots::MvccSnapshotTailCleanup >> TColumnShardTestReadWrite::ReadWithProgramLike |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] >> TColumnShardTestReadWrite::WriteReadDuplicate >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2026-01-07T17:36:44.710140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:44.745813Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:44.746051Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:44.754751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:44.755039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:44.755293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:44.755422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:44.755542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:44.755685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:44.755802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:44.755904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:44.756017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:44.756127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:44.756252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:44.756382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:44.756450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:44.806665Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:44.809376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:44.809460Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:44.809662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:44.809851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:44.809917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:44.809964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:44.810083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:44.810169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:44.810213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:44.810241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:44.810418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:44.810520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:44.810580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:44.810613Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:44.810727Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:44.810809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:44.810866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:44.810896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:44.810949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:44.810997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:44.811030Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:44.811073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:44.811116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:44.811151Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:44.811720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:44.811859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:44.811896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:44.812024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:44.812065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:44.812108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:44.812159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:44.812212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:44.812270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:44.812323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:44.812367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:44.812402Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:44.812534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:44.812593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-07T17:36:49.109663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:36:49.110002Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:36:49.110202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:49.110365Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:49.110532Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:49.110783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:49.110983Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:49.111142Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:49.111521Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2026-01-07T17:36:49.112074Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.012},{"events":["f_ack"],"t":0.013},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":5044053,"name":"_full_task","f":5044053,"d_finished":0,"c":0,"l":5059647,"d":15594},"events":[{"name":"bootstrap","f":5044356,"d_finished":1777,"c":1,"l":5046133,"d":1777},{"a":5058814,"name":"ack","f":5057083,"d_finished":1537,"c":1,"l":5058620,"d":2370},{"a":5058801,"name":"processing","f":5046311,"d_finished":4446,"c":3,"l":5058623,"d":5292},{"name":"ProduceResults","f":5045605,"d_finished":2710,"c":6,"l":5059224,"d":2710},{"a":5059230,"name":"Finish","f":5059230,"d_finished":0,"c":0,"l":5059647,"d":417},{"name":"task_result","f":5046331,"d_finished":2847,"c":2,"l":5056854,"d":2847}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:49.112160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:49.112661Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.012},{"events":["f_ack"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":5044053,"name":"_full_task","f":5044053,"d_finished":0,"c":0,"l":5060264,"d":16211},"events":[{"name":"bootstrap","f":5044356,"d_finished":1777,"c":1,"l":5046133,"d":1777},{"a":5058814,"name":"ack","f":5057083,"d_finished":1537,"c":1,"l":5058620,"d":2987},{"a":5058801,"name":"processing","f":5046311,"d_finished":4446,"c":3,"l":5058623,"d":5909},{"name":"ProduceResults","f":5045605,"d_finished":2710,"c":6,"l":5059224,"d":2710},{"a":5059230,"name":"Finish","f":5059230,"d_finished":0,"c":0,"l":5060264,"d":1034},{"name":"task_result","f":5046331,"d_finished":2847,"c":2,"l":5056854,"d":2847}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:49.112751Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:49.092276Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-07T17:36:49.112799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:49.113109Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> DataShardSnapshots::VolatileSnapshotSplit >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2026-01-07T17:36:49.051090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:49.081550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:49.081790Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:49.089255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:49.089496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:49.089730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:49.089862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:49.090003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:49.090109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:49.090200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:49.090320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:49.090435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:49.090679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.090816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:49.090949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:49.091089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:49.119639Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:49.120344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:49.120427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:49.120628Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:49.120790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:49.120858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:49.120903Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:49.121046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:49.121143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:49.121208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:49.121239Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:49.121395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:49.121461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:49.121507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:49.121544Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:49.121650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:49.121725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:49.121796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:49.121826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:49.121884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:49.121939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:49.121989Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:49.122039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:49.122079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:49.122117Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:49.122751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:49.122884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:49.122921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:49.123053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:49.123095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.123125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.123178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:49.123224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:49.123270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:49.123331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:49.123368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:49.123398Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:49.123593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:49.123659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;fline=source.cpp:346;source_idx=0; 2026-01-07T17:36:50.365744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;source_idx=0;tablet_id=9437184;fline=source.cpp:346;source_idx=0; 2026-01-07T17:36:50.365934Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-07T17:36:50.365970Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-07T17:36:50.366041Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2026-01-07T17:36:50.366194Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2026-01-07T17:36:50.366253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-07T17:36:50.366285Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2026-01-07T17:36:50.366434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2026-01-07T17:36:50.366508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=Calculation; 2026-01-07T17:36:50.366755Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=Filter; 2026-01-07T17:36:50.366829Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=4; 2026-01-07T17:36:50.366926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:25;event=empty_result;scan_step_idx=5; 2026-01-07T17:36:50.367037Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2026-01-07T17:36:50.367069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-07T17:36:50.367115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=0;prepared=1; 2026-01-07T17:36:50.367159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=0; 2026-01-07T17:36:50.367309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:50.367487Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:50.367624Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:50.367758Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:50.367853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:50.368078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:352:2364] finished for tablet 9437184 2026-01-07T17:36:50.368496Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:351:2363];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_Finish"],"t":0.006}],"full":{"a":2380408,"name":"_full_task","f":2380408,"d_finished":0,"c":0,"l":2386459,"d":6051},"events":[{"name":"bootstrap","f":2380693,"d_finished":1180,"c":1,"l":2381873,"d":1180},{"a":2385929,"name":"ack","f":2385929,"d_finished":0,"c":0,"l":2386459,"d":530},{"a":2385920,"name":"processing","f":2381987,"d_finished":2194,"c":2,"l":2385829,"d":2733},{"name":"ProduceResults","f":2381457,"d_finished":1070,"c":4,"l":2386188,"d":1070},{"a":2386193,"name":"Finish","f":2386193,"d_finished":0,"c":0,"l":2386459,"d":266},{"name":"task_result","f":2382001,"d_finished":2163,"c":2,"l":2385827,"d":2163}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:50.368568Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:351:2363];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:50.368886Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:351:2363];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_Finish"],"t":0.006}],"full":{"a":2380408,"name":"_full_task","f":2380408,"d_finished":0,"c":0,"l":2386922,"d":6514},"events":[{"name":"bootstrap","f":2380693,"d_finished":1180,"c":1,"l":2381873,"d":1180},{"a":2385929,"name":"ack","f":2385929,"d_finished":0,"c":0,"l":2386922,"d":993},{"a":2385920,"name":"processing","f":2381987,"d_finished":2194,"c":2,"l":2385829,"d":3196},{"name":"ProduceResults","f":2381457,"d_finished":1070,"c":4,"l":2386188,"d":1070},{"a":2386193,"name":"Finish","f":2386193,"d_finished":0,"c":0,"l":2386922,"d":729},{"name":"task_result","f":2382001,"d_finished":2163,"c":2,"l":2385827,"d":2163}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:50.368978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:50.359833Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-07T17:36:50.369022Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:50.369152Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2026-01-07T17:36:01.928377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:01.950227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:01.950444Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:01.956001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:01.956201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:01.956376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:01.956457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:01.956547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:01.956616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:01.956700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:01.956764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:01.956833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:01.956965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.957041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:01.957101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:01.957171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:01.979528Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:01.980050Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:01.980089Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:01.980215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.980345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:01.980396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:01.980427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:01.980495Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:01.980550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:01.980581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:01.980599Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:01.980722Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.980777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:01.980803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:01.980820Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:01.980893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:01.980960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:01.980996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:01.981018Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:01.981048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:01.981072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:01.981093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:01.981127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:01.981152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:01.981174Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:01.981312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:01.981379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:01.981396Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:01.981497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:01.981525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.981545Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.981579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:01.981610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:01.981634Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:01.981670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:01.981693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:01.981711Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:01.981786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:01.981823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-07T17:36:50.063001Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1259; 2026-01-07T17:36:50.063085Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=70845; 2026-01-07T17:36:50.063138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=70996; 2026-01-07T17:36:50.063212Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-07T17:36:50.063711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=424; 2026-01-07T17:36:50.063767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=72136; 2026-01-07T17:36:50.063943Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=112; 2026-01-07T17:36:50.064097Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=93; 2026-01-07T17:36:50.064572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=415; 2026-01-07T17:36:50.065005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=374; 2026-01-07T17:36:50.084053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=18934; 2026-01-07T17:36:50.101849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17641; 2026-01-07T17:36:50.101973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2026-01-07T17:36:50.102032Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-07T17:36:50.102077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:36:50.102164Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=47; 2026-01-07T17:36:50.102213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:36:50.102307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2026-01-07T17:36:50.102372Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:36:50.102459Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2026-01-07T17:36:50.102550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=51; 2026-01-07T17:36:50.102641Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2026-01-07T17:36:50.102680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=121343; 2026-01-07T17:36:50.102824Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:50.102931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:36:50.102989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:36:50.103065Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:50.103124Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:50.103326Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:50.103385Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:50.103424Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:50.103475Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:50.103540Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807066006;tx_id=18446744073709551615;;current_snapshot_ts=1767807363463; 2026-01-07T17:36:50.103582Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:50.103626Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:50.103662Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:50.103759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:50.103956Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.037000s; 2026-01-07T17:36:50.107166Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:50.107453Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:36:50.107513Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:50.107596Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:50.107646Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:50.107712Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807066006;tx_id=18446744073709551615;;current_snapshot_ts=1767807363463; 2026-01-07T17:36:50.107761Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:50.107820Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:50.107871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:50.107956Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-07T17:36:50.108008Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:50.108552Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.043000s; 2026-01-07T17:36:50.108599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |86.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |86.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2026-01-07T17:36:50.788044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:50.826693Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:50.826932Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:50.834350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:50.834593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:50.834815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:50.834939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:50.835055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:50.835148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:50.835240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:50.835355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:50.835459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:50.835572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:50.835709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:50.835826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:50.835937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:50.865377Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:50.865956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:50.866011Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:50.866193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:50.866370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:50.866438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:50.866478Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:50.866578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:50.866654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:50.866694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:50.866721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:50.866907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:50.866970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:50.867011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:50.867040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:50.867126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:50.867180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:50.867247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:50.867276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:50.867333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:50.867373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:50.867410Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:50.867453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:50.867489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:50.867546Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:50.867765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:50.867884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:50.867920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:50.868070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:50.868123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:50.868152Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:50.868202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:50.868248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:50.868287Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:50.868348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:50.868386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:50.868419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:50.868540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:50.868596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:36:51.913897Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=10;finished=1; 2026-01-07T17:36:51.913947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:51.913997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:51.914266Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:51.914445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:51.914505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:36:51.914663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2026-01-07T17:36:51.914727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=10;batch_columns=message; 2026-01-07T17:36:51.915049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:371:2383];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2026-01-07T17:36:51.915246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:51.915395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:51.915551Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:51.915742Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:51.915909Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:51.916076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:51.916353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:372:2384] finished for tablet 9437184 2026-01-07T17:36:51.916885Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:371:2383];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":1963770,"name":"_full_task","f":1963770,"d_finished":0,"c":0,"l":1973885,"d":10115},"events":[{"name":"bootstrap","f":1964046,"d_finished":1321,"c":1,"l":1965367,"d":1321},{"a":1973169,"name":"ack","f":1971699,"d_finished":1348,"c":1,"l":1973047,"d":2064},{"a":1973156,"name":"processing","f":1965526,"d_finished":3884,"c":3,"l":1973050,"d":4613},{"name":"ProduceResults","f":1964893,"d_finished":2454,"c":6,"l":1973571,"d":2454},{"a":1973578,"name":"Finish","f":1973578,"d_finished":0,"c":0,"l":1973885,"d":307},{"name":"task_result","f":1965544,"d_finished":2473,"c":2,"l":1971482,"d":2473}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:51.917012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:371:2383];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:51.917506Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:371:2383];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":1963770,"name":"_full_task","f":1963770,"d_finished":0,"c":0,"l":1974515,"d":10745},"events":[{"name":"bootstrap","f":1964046,"d_finished":1321,"c":1,"l":1965367,"d":1321},{"a":1973169,"name":"ack","f":1971699,"d_finished":1348,"c":1,"l":1973047,"d":2694},{"a":1973156,"name":"processing","f":1965526,"d_finished":3884,"c":3,"l":1973050,"d":5243},{"name":"ProduceResults","f":1964893,"d_finished":2454,"c":6,"l":1973571,"d":2454},{"a":1973578,"name":"Finish","f":1973578,"d_finished":0,"c":0,"l":1974515,"d":937},{"name":"task_result","f":1965544,"d_finished":2473,"c":2,"l":1971482,"d":2473}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:51.917615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:51.904236Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-07T17:36:51.917657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:51.917824Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2026-01-07T17:36:12.797928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:12.821017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:12.821230Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:12.827554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:12.827739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:12.827902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:12.827975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:12.828057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:12.828128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:12.828214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:12.828275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:12.828345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:12.828420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:12.828508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:12.828574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:12.828628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:12.850937Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:12.851455Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:12.851528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:12.851713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:12.851888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:12.851963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:12.852010Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:12.852127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:12.852203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:12.852247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:12.852281Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:12.852431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:12.852523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:12.852587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:12.852611Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:12.852671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:12.852710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:12.852750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:12.852768Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:12.852799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:12.852824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:12.852844Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:12.852893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:12.852946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:12.852968Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:12.853148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:12.853265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:12.853289Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:12.853416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:12.853456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:12.853483Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:12.853528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:12.853568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:12.853605Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:12.853662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:12.853717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:12.853751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:12.853876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:12.853935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-07T17:36:51.691679Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=966; 2026-01-07T17:36:51.691735Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=80700; 2026-01-07T17:36:51.691785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=80831; 2026-01-07T17:36:51.691861Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=16; 2026-01-07T17:36:51.692232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=315; 2026-01-07T17:36:51.692278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=81876; 2026-01-07T17:36:51.692460Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=119; 2026-01-07T17:36:51.692599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=79; 2026-01-07T17:36:51.693125Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=463; 2026-01-07T17:36:51.693594Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=400; 2026-01-07T17:36:51.712960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=19188; 2026-01-07T17:36:51.730430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17324; 2026-01-07T17:36:51.730655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=59; 2026-01-07T17:36:51.730777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=16; 2026-01-07T17:36:51.730820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-07T17:36:51.730900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2026-01-07T17:36:51.731074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2026-01-07T17:36:51.731190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2026-01-07T17:36:51.731245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:36:51.731320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2026-01-07T17:36:51.731423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=65; 2026-01-07T17:36:51.731499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=45; 2026-01-07T17:36:51.731539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=131727; 2026-01-07T17:36:51.731688Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:51.731801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:36:51.731862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:36:51.731930Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:51.731987Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:51.732184Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:51.732249Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:51.732290Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:51.732336Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:51.732405Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807075247;tx_id=18446744073709551615;;current_snapshot_ts=1767807374271; 2026-01-07T17:36:51.732449Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:51.732494Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:51.732531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:51.732617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:51.732814Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.079000s; 2026-01-07T17:36:51.739208Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:51.739945Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:36:51.740020Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:51.740120Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:51.740174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:51.740249Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807075247;tx_id=18446744073709551615;;current_snapshot_ts=1767807374271; 2026-01-07T17:36:51.740304Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:51.740358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:51.740408Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:51.740492Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:36:51.740546Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:51.741874Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.148000s; 2026-01-07T17:36:51.741932Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> KqpTx::LocksAbortOnCommit >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore >> TTransferTests::Create >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> Cdc::UpdateRetentionPeriod [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> Cdc::SupportedTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2026-01-07T17:36:51.759278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:51.787355Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:51.787600Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:51.795173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:51.795432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:51.795676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:51.795800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:51.795906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:51.796004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:51.796109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:51.796241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:51.796354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:51.796464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:51.796555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:51.796672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:51.796807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:51.824965Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:51.825604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:51.825662Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:51.825881Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:51.826106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:51.826199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:51.826255Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:51.826375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:51.826466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:51.826513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:51.826542Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:51.826706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:51.826773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:51.826815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:51.826844Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:51.826965Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:51.827057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:51.827112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:51.827139Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:51.827195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:51.827310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:51.827352Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:51.827402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:51.827448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:51.827490Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:51.827724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:51.827788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:51.827818Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:51.827967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:51.828025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:51.828053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:51.828099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:51.828154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:51.828202Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:51.828265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:51.828302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:51.828331Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:51.828510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:51.828560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tab ... jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:36:56.869659Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2026-01-07T17:36:56.869697Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:36:56.869764Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:36:56.870232Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:56.870400Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:56.870437Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:36:56.870562Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-07T17:36:56.870631Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-07T17:36:56.870881Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-07T17:36:56.871017Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:56.871144Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:56.871270Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:56.871560Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:36:56.871686Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:56.871806Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:56.872081Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:464:2476] finished for tablet 9437184 2026-01-07T17:36:56.872498Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":5632946,"name":"_full_task","f":5632946,"d_finished":0,"c":0,"l":5643451,"d":10505},"events":[{"name":"bootstrap","f":5633238,"d_finished":1162,"c":1,"l":5634400,"d":1162},{"a":5642858,"name":"ack","f":5641517,"d_finished":1123,"c":1,"l":5642640,"d":1716},{"a":5642846,"name":"processing","f":5634574,"d_finished":3006,"c":3,"l":5642643,"d":3611},{"name":"ProduceResults","f":5634015,"d_finished":1996,"c":6,"l":5643152,"d":1996},{"a":5643157,"name":"Finish","f":5643157,"d_finished":0,"c":0,"l":5643451,"d":294},{"name":"task_result","f":5634587,"d_finished":1822,"c":2,"l":5641112,"d":1822}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:56.872566Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:36:56.873079Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":5632946,"name":"_full_task","f":5632946,"d_finished":0,"c":0,"l":5643919,"d":10973},"events":[{"name":"bootstrap","f":5633238,"d_finished":1162,"c":1,"l":5634400,"d":1162},{"a":5642858,"name":"ack","f":5641517,"d_finished":1123,"c":1,"l":5642640,"d":2184},{"a":5642846,"name":"processing","f":5634574,"d_finished":3006,"c":3,"l":5642643,"d":4079},{"name":"ProduceResults","f":5634015,"d_finished":1996,"c":6,"l":5643152,"d":1996},{"a":5643157,"name":"Finish","f":5643157,"d_finished":0,"c":0,"l":5643919,"d":762},{"name":"task_result","f":5634587,"d_finished":1822,"c":2,"l":5641112,"d":1822}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:36:56.873166Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:36:56.859253Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-07T17:36:56.873212Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:36:56.873349Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] Test command err: 2026-01-07T17:36:48.823396Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:48.856085Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:48.856398Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:48.864182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:48.864437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:48.864676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:48.864774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:48.864875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:48.865006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:48.865121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:48.865245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:48.865337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:48.865452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:48.865570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:48.865677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:48.865789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:48.898513Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:48.901603Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:48.901688Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:48.901913Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:48.902165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:48.902250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:48.902298Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:48.902430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:48.902520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:48.902567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:48.902601Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:48.902784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:48.902852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:48.902915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:48.902948Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:48.903039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:48.903106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:48.903164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:48.903208Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:48.903265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:48.903303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:48.903332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:48.903376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:48.903419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:48.903473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:48.903705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:48.903833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:48.903866Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:48.903987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:48.904029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:48.904058Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:48.904110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:48.904151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:48.904189Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:48.904259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:48.904308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:48.904345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:48.904553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:48.904612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... G: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-07T17:36:54.949976Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:54.981173Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=7385e814-ebef11f0-b5541061-25a89eaa; 2026-01-07T17:36:54.981469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2026-01-07T17:36:54.981531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2026-01-07T17:36:54.981584Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=540;count=2;size_of_portion=192; 2026-01-07T17:36:54.982099Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-07T17:36:54.982232Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=3;operation_id=2; 2026-01-07T17:36:54.996410Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-07T17:36:54.996605Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=5;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:54.998347Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=7411d75c-ebef11f0-b5ed5682-9d717ab2; 2026-01-07T17:36:54.998580Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2026-01-07T17:36:54.998631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2026-01-07T17:36:54.998684Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=810;count=3;size_of_portion=192; 2026-01-07T17:36:54.999151Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-07T17:36:54.999252Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=4;operation_id=3; 2026-01-07T17:36:55.013624Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-07T17:36:55.013847Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:55.015602Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=7493744c-ebef11f0-9cc9cad3-49dc9b20; 2026-01-07T17:36:55.015839Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2026-01-07T17:36:55.015889Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2026-01-07T17:36:55.015940Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1080;count=4;size_of_portion=192; 2026-01-07T17:36:55.016436Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-07T17:36:55.016536Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=5;operation_id=4; 2026-01-07T17:36:55.029881Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-07T17:36:55.030083Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:55.054855Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=5;last=5; 2026-01-07T17:36:55.054942Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6330728;operation_id=758fb1f8-ebef11f0-9f522388-f56d4afc;in_flight=1;size_in_flight=6330728; 2026-01-07T17:36:56.080902Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-07T17:36:56.149737Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=758fb1f8-ebef11f0-9f522388-f56d4afc; 2026-01-07T17:36:56.150078Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2026-01-07T17:36:56.150154Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2026-01-07T17:36:56.150227Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1350;count=5;size_of_portion=192; 2026-01-07T17:36:56.150937Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-07T17:36:56.151073Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=6;operation_id=5; 2026-01-07T17:36:56.174776Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-07T17:36:56.175028Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] Test command err: 2026-01-07T17:36:48.975693Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:49.012985Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:49.013447Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:49.022701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:49.023006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:49.023288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:49.023460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:49.023627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:49.023786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:49.023967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:49.024134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:49.024291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:49.024466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.024642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:49.024810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:49.024975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:49.067317Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:49.067937Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:49.067995Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:49.068194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:49.068361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:49.068436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:49.068474Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:49.068570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:49.068635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:49.068674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:49.068721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:49.068905Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:49.068985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:49.069036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:49.069065Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:49.069158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:49.069217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:49.069271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:49.069306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:49.069368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:49.069407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:49.069439Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:49.069479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:49.069519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:49.069551Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:49.069792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:49.069900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:49.069933Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:49.070058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:49.070095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.070124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.070172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:49.070213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:49.070250Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:49.070301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:49.070361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:49.070393Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:49.070576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:49.070635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... G: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-07T17:36:54.932093Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:54.968257Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=738e059e-ebef11f0-9a509fe9-453b9e88; 2026-01-07T17:36:54.968549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2026-01-07T17:36:54.968604Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2026-01-07T17:36:54.968658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=540;count=2;size_of_portion=192; 2026-01-07T17:36:54.969183Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-07T17:36:54.969293Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=3;operation_id=2; 2026-01-07T17:36:54.985489Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-07T17:36:54.985671Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=5;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:54.987405Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=740f67ec-ebef11f0-ad938a39-f56104fa; 2026-01-07T17:36:54.987621Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2026-01-07T17:36:54.987673Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2026-01-07T17:36:54.987724Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=810;count=3;size_of_portion=192; 2026-01-07T17:36:54.988163Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-07T17:36:54.988261Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=4;operation_id=3; 2026-01-07T17:36:55.002235Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-07T17:36:55.002419Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:55.004216Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=749770ec-ebef11f0-ac7eba09-9bcae529; 2026-01-07T17:36:55.004444Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2026-01-07T17:36:55.004496Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2026-01-07T17:36:55.004548Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1080;count=4;size_of_portion=192; 2026-01-07T17:36:55.006355Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-07T17:36:55.006474Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=5;operation_id=4; 2026-01-07T17:36:55.025533Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-07T17:36:55.025734Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:55.068367Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=5;last=5; 2026-01-07T17:36:55.068468Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6330728;operation_id=7590a0cc-ebef11f0-93d6a4c2-698ffa6f;in_flight=1;size_in_flight=6330728; 2026-01-07T17:36:56.157610Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-07T17:36:56.228144Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=7590a0cc-ebef11f0-93d6a4c2-698ffa6f; 2026-01-07T17:36:56.228428Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2026-01-07T17:36:56.228506Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2026-01-07T17:36:56.228591Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1350;count=5;size_of_portion=192; 2026-01-07T17:36:56.229235Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-07T17:36:56.229362Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=6;operation_id=5; 2026-01-07T17:36:56.244688Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-07T17:36:56.244891Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> TTransferTests::Create_Disabled |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Secret::ValidationQueryService [GOOD] >> Cdc::Drop[PqRunner] [GOOD] >> TTransferTests::Create [GOOD] >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader >> TTransferTests::CreateSequential >> Cdc::Drop[YdsRunner] |86.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2026-01-07T17:36:02.764246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:02.795111Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:02.795329Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:02.802175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:02.802404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:02.802612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:02.802741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:02.802866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:02.802973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:02.803067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:02.803177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:02.803271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:02.803409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.803517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:02.803627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:02.803741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:02.832984Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:02.833581Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:02.833640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:02.833820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.833966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:02.834031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:02.834071Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:02.834183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:02.834247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:02.834299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:02.834334Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:02.834501Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.834557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:02.834596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:02.834644Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:02.834727Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:02.834785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:02.834832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:02.834859Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:02.834902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:02.834943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:02.834977Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:02.835014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:02.835059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:02.835090Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:02.835300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:02.835411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:02.835446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:02.835553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:02.835591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.835617Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.835659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:02.835697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:02.835733Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:02.835792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:02.835831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:02.835860Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:02.835982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:02.836039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-07T17:36:56.050263Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1124; 2026-01-07T17:36:56.050358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=86015; 2026-01-07T17:36:56.050415Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=86150; 2026-01-07T17:36:56.050499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=17; 2026-01-07T17:36:56.050978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=419; 2026-01-07T17:36:56.051031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=87227; 2026-01-07T17:36:56.051252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=155; 2026-01-07T17:36:56.051412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=86; 2026-01-07T17:36:56.051874Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=394; 2026-01-07T17:36:56.052292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=361; 2026-01-07T17:36:56.085135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=32742; 2026-01-07T17:36:56.103637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18364; 2026-01-07T17:36:56.103768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-07T17:36:56.103833Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-07T17:36:56.103886Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-07T17:36:56.103968Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2026-01-07T17:36:56.104016Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:36:56.104111Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2026-01-07T17:36:56.104161Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2026-01-07T17:36:56.104276Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=44; 2026-01-07T17:36:56.104382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=64; 2026-01-07T17:36:56.104477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=58; 2026-01-07T17:36:56.104520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=156289; 2026-01-07T17:36:56.104679Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:56.104806Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:36:56.104871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:36:56.105002Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:56.105058Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:56.105280Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:56.105353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:56.105397Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:56.105463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:56.105535Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807066894;tx_id=18446744073709551615;;current_snapshot_ts=1767807364294; 2026-01-07T17:36:56.105583Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:56.105635Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:56.105679Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:56.105797Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:56.106010Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.133000s; 2026-01-07T17:36:56.109376Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:56.109644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:36:56.109722Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:56.109804Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:56.109862Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:56.109945Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807066894;tx_id=18446744073709551615;;current_snapshot_ts=1767807364294; 2026-01-07T17:36:56.109997Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:56.110054Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:56.110101Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:56.110189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-07T17:36:56.110253Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:56.110765Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.089000s; 2026-01-07T17:36:56.110823Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2026-01-07T17:36:06.409957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:06.441549Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:06.441769Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:06.449195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:06.449430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:06.449692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:06.449830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:06.449947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:06.450055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:06.450170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:06.450301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:06.450432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:06.450562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:06.450678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:06.450799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:06.450909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:06.480670Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:06.481384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:06.481450Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:06.481666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:06.481834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:06.481910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:06.481957Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:06.482122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:06.482204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:06.482256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:06.482290Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:06.482508Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:06.482581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:06.482632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:06.482663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:06.482759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:06.482824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:06.482888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:06.482918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:06.482978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:06.483038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:06.483075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:06.483122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:06.483185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:06.483218Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:06.483446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:06.483556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:06.483590Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:06.483742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:06.483791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:06.483820Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:06.483866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:06.483917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:06.483960Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:06.484016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:06.484055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:06.484088Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:06.484311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:06.484391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-07T17:36:57.353488Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1223; 2026-01-07T17:36:57.353563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=77427; 2026-01-07T17:36:57.353619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=77567; 2026-01-07T17:36:57.353693Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-07T17:36:57.354234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=466; 2026-01-07T17:36:57.354292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=78695; 2026-01-07T17:36:57.354470Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=114; 2026-01-07T17:36:57.354612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=81; 2026-01-07T17:36:57.355052Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=383; 2026-01-07T17:36:57.355473Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=357; 2026-01-07T17:36:57.372593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=17031; 2026-01-07T17:36:57.388099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15374; 2026-01-07T17:36:57.388214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-07T17:36:57.388272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-07T17:36:57.388319Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2026-01-07T17:36:57.388399Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2026-01-07T17:36:57.388444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:36:57.388539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2026-01-07T17:36:57.388584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:36:57.388656Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2026-01-07T17:36:57.388751Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2026-01-07T17:36:57.388838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2026-01-07T17:36:57.388882Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=123052; 2026-01-07T17:36:57.389052Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:57.389172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:36:57.389231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:36:57.389300Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:36:57.389348Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:36:57.389596Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:57.389673Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:36:57.389732Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:57.389784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:57.389856Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807070481;tx_id=18446744073709551615;;current_snapshot_ts=1767807367938; 2026-01-07T17:36:57.389905Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:57.389953Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:57.389992Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:57.390091Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:57.390297Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.009000s; 2026-01-07T17:36:57.393486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:57.393969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:36:57.394033Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:57.394127Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:36:57.394179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:36:57.394255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807070481;tx_id=18446744073709551615;;current_snapshot_ts=1767807367938; 2026-01-07T17:36:57.394304Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:57.394354Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:57.394400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:57.394484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-07T17:36:57.394538Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:57.395095Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.167000s; 2026-01-07T17:36:57.395145Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:34:03.892485Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:04.034648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:34:04.053705Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:34:04.054411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:34:04.054495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:34:04.432147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:04.432330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:04.531598Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807241299810 != 1767807241299814 2026-01-07T17:34:04.552281Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:04.623341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:04.764018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:34:05.537094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:05.538377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:05.538433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:05.538479Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:05.538687Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:05.552777Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-07T17:34:17.491990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:17.492206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:17.492848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:17.510823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:17.510974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:17.520282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:17.547803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2826], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:34:17.673448Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2860] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:17.816184Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:1027:2868], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2026-01-07T17:34:17.841214Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWJhY2MxMzYtMjBlMDZlNTItMjNhMTBlNDctZTMxMDI0MGU=, ActorId: [1:949:2816], ActorState: ExecuteState, LegacyTraceId: 01kecrc2tp1yjzedn6s92vwx7y, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 1 column: 20 } message: "mismatched input \'-\' expecting \'(\'" end_position { row: 1 column: 20 } severity: 1 } tx_id# trace_id# REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2026-01-07T17:34:28.468450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:29.485596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:30.269749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:31.192439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:32.322709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:32.939871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:34:33.546948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:34.365170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-07T17:34:34.991441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:34:34.991515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:36.324247Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTU4MTFmNDMtN2U5YjQ1OTctYTY2OGIzYzgtZDRjZWQwYjg=, ActorId: [1:1050:2883], ActorState: ExecuteState, LegacyTraceId: 01kecrcd7ya45f552n1xerx3tq, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 not found for alter" severity: 1 } } trace_id# REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-07T17:35:11.585980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB ... lled at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:35:14.452226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715731:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2026-01-07T17:35:26.912397Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTNlZDg4OWEtOWY1YmU4NjQtOWRhMzA0NDUtMzRkZGFkZmY=, ActorId: [1:3289:4623], ActorState: ExecuteState, LegacyTraceId: 01kecre64265pnm97kmf40hfdv, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET_ACCESS\"" severity: 1 issues { message: "preparation problem: used in access secret secret2 not found" severity: 1 } } trace_id# REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Executing operation with object "SECRET_ACCESS"
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-07T17:35:39.703649Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=15; 2026-01-07T17:35:39.703902Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 15 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:35:39.704086Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 15 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:35:39.704433Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:3649:4837], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3561:4837]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3649:4837].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:35:39.705026Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:3643:4837], SessionActorId: [1:3561:4837], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3561:4837]. 2026-01-07T17:35:39.706864Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OTE0NzU0ZGMtNmRiNjAwZjgtZTgyY2JhYjQtNGI3NDI5NDA=, ActorId: [1:3561:4837], ActorState: ExecuteState, LegacyTraceId: 01kecrek1dd8cbs2y702y6yche, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3644:4837] from: [1:3643:4837] trace_id# 2026-01-07T17:35:39.707212Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3644:4837] TxId: 281474976715757. Ctx: { TraceId: 01kecrek1dd8cbs2y702y6yche, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTE0NzU0ZGMtNmRiNjAwZjgtZTgyY2JhYjQtNGI3NDI5NDA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:35:39.707713Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTE0NzU0ZGMtNmRiNjAwZjgtZTgyY2JhYjQtNGI3NDI5NDA=, ActorId: [1:3561:4837], ActorState: ExecuteState, LegacyTraceId: 01kecrek1dd8cbs2y702y6yche, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-07T17:35:39.719044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01kecrejk5axr7t1bac2h4xfmf" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=OTE0NzU0ZGMtNmRiNjAwZjgtZTgyY2JhYjQtNGI3NDI5NDA=" tx_control { tx_id: "01kecrejk5axr7t1bac2h4xfmf" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-07T17:35:52.647897Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTIyNmJkMjEtZWM5YjE2ZGQtMTc2NTEzNzEtODMxODVhNTA=, ActorId: [1:3827:5044], ActorState: ExecuteState, LegacyTraceId: 01kecrez5b55a5580hmjvv2wmr, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 using in access for test@test1" severity: 1 } } trace_id# REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-07T17:36:04.182224Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4117:5276], for# root@builtin, access# DescribeSchema 2026-01-07T17:36:04.182332Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4117:5276], for# root@builtin, access# DescribeSchema 2026-01-07T17:36:04.184008Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:4114:5273], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:36:04.186320Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTA0ODhhNzUtN2M3OGUxZGUtODcwNTViZGYtYzJlZGRjMTE=, ActorId: [1:4110:5270], ActorState: ExecuteState, LegacyTraceId: 01kecrfb26e4dkkd5wjcm0fjfd, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-07T17:36:15.879389Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (51449FAE): Could not find correct token validator 2026-01-07T17:36:16.787053Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjE3NzA4ZDItMTA5OWI2MmYtYWM1N2MyMzAtZDE5OWQ1MmY=, ActorId: [1:4344:5456], ActorState: ExecuteState, LegacyTraceId: 01kecrfpfqc07vyf1bkb4dg4qe, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot CREATE objects: Secret already exists: secret1" severity: 1 } } trace_id# REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-07T17:36:29.443004Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OWE2M2NhNS1iZTlhZmY3MC0zMmNkNzUxYy1hZjAzOWFjMA==, ActorId: [1:4697:5738], ActorState: ExecuteState, LegacyTraceId: 01kecrg2x0f78ezm50y1wyhvyz, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot UPSERT objects: Secret already exists: secret1" severity: 1 } } trace_id# REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |86.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> KqpSnapshotIsolation::TConflictReadWriteOltp >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |86.7%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig >> TBoardSubscriber2DCTest::NotAvailableByShutdown >> BasicUsage::CreateTopicWithStreamingConsumer >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] >> TxUsage::WriteToTopic_Demo_11_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:32:36.761296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:36.849036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:32:36.863434Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:32:36.864049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:32:36.864094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:32:37.201763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:37.202043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:37.314994Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807153366171 != 1767807153366175 2026-01-07T17:32:37.328469Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:37.379695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:37.473681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:37.670389Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:32:37.957442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:38.115826Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:32:38.115892Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:32:38.116015Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:865:2748] 2026-01-07T17:32:38.315753Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:865:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:32:38.315871Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:865:2748] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:32:38.316538Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:32:38.316632Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:865:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:32:38.328220Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:32:38.328421Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:865:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:32:38.328673Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:865:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:32:38.329500Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:32:38.331221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:38.332454Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:865:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:32:38.332556Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:865:2748] txid# 281474976715657 SEND to# [1:847:2730] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:32:38.378694Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:889:2769]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:32:38.398376Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:889:2769]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:32:38.398760Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:889:2769] 2026-01-07T17:32:38.399124Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:32:38.457504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:889:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:32:38.458259Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:32:38.458384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:32:38.460148Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:32:38.460250Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:32:38.460315Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:32:38.460699Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:32:38.460918Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:32:38.461047Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:905:2769] in generation 1 2026-01-07T17:32:38.473466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:32:38.522048Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:32:38.522204Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:32:38.522292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:907:2779] 2026-01-07T17:32:38.522330Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:32:38.522360Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:32:38.522395Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:32:38.522598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:889:2769], Recipient [1:889:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:32:38.522640Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:32:38.522851Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:32:38.522952Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:32:38.523036Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:32:38.523106Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:32:38.523160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:32:38.523208Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:32:38.523253Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:32:38.523283Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:32:38.523348Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:32:38.523810Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2773], Recipient [1:889:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:32:38.523856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:32:38.523896Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:888:2768], serverId# [1:896:2773], sessionId# [0:0:0] 2026-01-07T17:32:38.523958Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:896:2773] 2026-01-07T17:32:38.523995Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:32:38.524094Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:32:38.524346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:32:38.524426Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:32:38.524509Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transacti ... 37: Add [0:7] at 72075186224037889 to execution unit ExecuteRead 2026-01-07T17:37:02.059890Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit ExecuteRead 2026-01-07T17:37:02.060016Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2026-01-07T17:37:02.060297Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2503/18446744073709551615 2026-01-07T17:37:02.060360Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[29:1292:3056], 1} after executionsCount# 1 2026-01-07T17:37:02.060413Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[29:1292:3056], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:37:02.060504Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[29:1292:3056], 1} finished in read 2026-01-07T17:37:02.060568Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2026-01-07T17:37:02.060602Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit ExecuteRead 2026-01-07T17:37:02.060635Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:37:02.060668Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:37:02.060730Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2026-01-07T17:37:02.060760Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:37:02.060789Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037889 has finished 2026-01-07T17:37:02.060827Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-07T17:37:02.061024Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:37:02.061097Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:37:02.061143Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-07T17:37:02.061701Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037889] send [29:1146:2944] 2026-01-07T17:37:02.061750Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037889] push event to server [29:1146:2944] 2026-01-07T17:37:02.061891Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [29:1292:3056], Recipient [29:928:2787]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-07T17:37:02.062479Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } 2026-01-07T17:37:02.062799Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037890] ::Bootstrap [29:1294:3059] 2026-01-07T17:37:02.062911Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037890] lookup [29:1294:3059] 2026-01-07T17:37:02.063107Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037890] queue send [29:1294:3059] 2026-01-07T17:37:02.063222Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72075186224037890] forward result local node, try to connect [29:1294:3059] 2026-01-07T17:37:02.063277Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037890]::SendEvent [29:1294:3059] 2026-01-07T17:37:02.063479Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [29:1295:3060], Recipient [29:1249:3028]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:37:02.063526Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:37:02.063572Z node 29 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [29:1294:3059], serverId# [29:1295:3060], sessionId# [0:0:0] 2026-01-07T17:37:02.063626Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037890] connected with status OK role: Leader [29:1294:3059] 2026-01-07T17:37:02.063673Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037890] send queued [29:1294:3059] 2026-01-07T17:37:02.063708Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037890] push event to server [29:1294:3059] 2026-01-07T17:37:02.063924Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [29:1292:3056], Recipient [29:1249:3028]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-07T17:37:02.064035Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-07T17:37:02.064099Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:37:02.064190Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2026-01-07T17:37:02.064274Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2026-01-07T17:37:02.064360Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-07T17:37:02.064395Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2026-01-07T17:37:02.064432Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-07T17:37:02.064472Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-07T17:37:02.064537Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2026-01-07T17:37:02.064586Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-07T17:37:02.064621Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-07T17:37:02.064649Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2026-01-07T17:37:02.064682Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-07T17:37:02.064806Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-07T17:37:02.065090Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037890 promoting UnprotectedReadEdge to v2503/18446744073709551615 2026-01-07T17:37:02.065148Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[29:1292:3056], 2} after executionsCount# 1 2026-01-07T17:37:02.065199Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[29:1292:3056], 2} sends rowCount# 1, bytes# 32, quota rows left# 998, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:37:02.065280Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037890 read iterator# {[29:1292:3056], 2} finished in read 2026-01-07T17:37:02.065341Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-07T17:37:02.065372Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2026-01-07T17:37:02.065400Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2026-01-07T17:37:02.065431Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2026-01-07T17:37:02.065485Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-07T17:37:02.065516Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2026-01-07T17:37:02.065546Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2026-01-07T17:37:02.065582Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-07T17:37:02.065702Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:37:02.065765Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:37:02.065810Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-07T17:37:02.067865Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037890] send [29:1294:3059] 2026-01-07T17:37:02.067926Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037890] push event to server [29:1294:3059] 2026-01-07T17:37:02.068104Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [29:1292:3056], Recipient [29:1249:3028]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-07T17:37:02.068160Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037890 ReadCancel: { ReadId: 2 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 11 } items { uint32_value: 111 } }, { items { uint32_value: 21 } items { uint32_value: 21 } } |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_volatile/unittest >> TxUsage::WriteToTopic_Demo_41_Table |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |86.8%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::RollbackByIdle >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel |86.8%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |86.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |86.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> BasicUsage::CreateTopicWithCustomName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 8686, MsgBus: 19787 2026-01-07T17:34:19.569237Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674366476676301:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:19.569292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:20.089341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:20.089456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:20.144880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:20.164433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:20.170302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:20.178749Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674366476676086:2081] 1767807259535845 != 1767807259535848 2026-01-07T17:34:20.371109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:20.371143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:20.371155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:20.371253Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:20.406328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:20.533970Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:21.157087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:21.164178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:34:21.241100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:21.757438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:22.361905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:22.725784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.573027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674366476676301:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:24.573106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:24.807459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674387951514459:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.807597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.808186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674387951514469:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.808256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.206467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.262207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.314059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.373404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.439249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.549478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.652395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.744409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.846768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674392246482661:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.846896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.847361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674392246482667:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.847688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674392246482666:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.847722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.853790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:25.872132Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674392246482670:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:34:25.979902Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674392246482721:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:28.432149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... iptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:05.463426Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:05.483907Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592674821450561378:2081] 1767807365339027 != 1767807365339030 2026-01-07T17:36:05.488857Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:05.488991Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:05.489203Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:36:05.515245Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:05.575346Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:36:05.575378Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:36:05.575387Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:36:05.575473Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:36:05.707620Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:36:06.087070Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:06.104503Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:06.171089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:06.346741Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:06.356633Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:06.442735Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:08.846980Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674834335465140:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:08.847080Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:08.847407Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674834335465150:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:08.847490Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:08.922015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:08.953865Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:08.990923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.028652Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.063533Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.100892Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.138639Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.191664Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.316614Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674838630433317:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.316682Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.316801Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674838630433322:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.316832Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592674838630433324:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.316864Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.320589Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:36:09.332345Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592674838630433326:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:36:09.435046Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592674838630433379:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:36:10.339893Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592674821450561405:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:36:10.339974Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:36:11.501046Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:20.436016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:36:20.436046Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> TxUsage::WriteToTopic_Demo_12_Table >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict >> TTransferTests::Alter [GOOD] >> Cdc::SupportedTypes [GOOD] >> Cdc::StringEscaping >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2026-01-07T17:36:12.405129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:12.425010Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:12.425208Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:12.431053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:12.431225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:12.431383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:12.431462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:12.431548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:12.431615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:12.431693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:12.431773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:12.431827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:12.431916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:12.432000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:12.432104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:12.432198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:12.453563Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:12.454184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:12.454241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:12.454434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:12.454614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:12.454690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:12.454729Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:12.454820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:12.454871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:12.454907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:12.454936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:12.455074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:12.455116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:12.455144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:12.455167Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:12.455225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:12.455280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:12.455342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:12.455363Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:12.455393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:12.455418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:12.455442Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:12.455477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:12.455506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:12.455524Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:12.455681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:12.455765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:12.455783Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:12.455861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:12.455884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:12.455899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:12.455925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:12.455950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:12.456008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:12.456061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:12.456105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:12.456133Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:12.456255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:12.456304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-07T17:37:09.147998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1081; 2026-01-07T17:37:09.148059Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=69807; 2026-01-07T17:37:09.148114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=69935; 2026-01-07T17:37:09.148184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-07T17:37:09.148545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=310; 2026-01-07T17:37:09.148595Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=70856; 2026-01-07T17:37:09.148807Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=149; 2026-01-07T17:37:09.148962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=97; 2026-01-07T17:37:09.149371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=354; 2026-01-07T17:37:09.149747Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=315; 2026-01-07T17:37:09.166000Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16178; 2026-01-07T17:37:09.182273Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16152; 2026-01-07T17:37:09.182402Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=17; 2026-01-07T17:37:09.182473Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=17; 2026-01-07T17:37:09.182517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-07T17:37:09.182595Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2026-01-07T17:37:09.182643Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:37:09.182728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=53; 2026-01-07T17:37:09.182775Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:37:09.182847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2026-01-07T17:37:09.182941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=59; 2026-01-07T17:37:09.183031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=53; 2026-01-07T17:37:09.183072Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=114634; 2026-01-07T17:37:09.183214Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:09.183332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:09.183394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:09.183465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:09.183515Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:09.183743Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:09.183817Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:09.183859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:09.183908Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:09.183978Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807076482;tx_id=18446744073709551615;;current_snapshot_ts=1767807373938; 2026-01-07T17:37:09.184025Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:09.184071Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:09.184111Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:09.184212Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:09.184414Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.029000s; 2026-01-07T17:37:09.187407Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:09.187875Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:09.187938Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:09.188017Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:09.188073Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:09.188144Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807076482;tx_id=18446744073709551615;;current_snapshot_ts=1767807373938; 2026-01-07T17:37:09.188189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:09.188243Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:09.188286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:09.188367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-07T17:37:09.188435Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:09.189067Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.127000s; 2026-01-07T17:37:09.189114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:36:59.692748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:36:59.692840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:36:59.692895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:36:59.695059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:36:59.695140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:36:59.695172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:36:59.695254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:36:59.695350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:36:59.696246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:36:59.696542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:36:59.879237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:36:59.879306Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:59.908793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:36:59.909187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:36:59.909369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:36:59.937840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:36:59.938251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:36:59.939033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:36:59.939323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:36:59.942688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:36:59.942899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:36:59.944184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:36:59.944247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:36:59.944399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:36:59.944449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:36:59.944555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:36:59.944773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:00.171274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.172544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.172734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.172841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.172937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:00.173860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... vProgressOperation 2026-01-07T17:37:14.578869Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:37:14.578924Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:196: [72057594046678944] TAlterReplication TPropose opId# 104:0 ProgressState 2026-01-07T17:37:14.578975Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:37:14.579049Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2026-01-07T17:37:14.579210Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:14.580135Z node 6 :TX_PROXY INFO: describe.cpp:354: Actor# [6:970:2913] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-07T17:37:14.580414Z node 6 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2026-01-07T17:37:14.580473Z node 6 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2026-01-07T17:37:14.580815Z node 6 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186233409547] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2026-01-07T17:37:14.580971Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409547][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2026-01-07T17:37:14.581045Z node 6 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409547][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2026-01-07T17:37:14.582926Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:37:14.582985Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2026-01-07T17:37:14.583092Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-07T17:37:14.583956Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269287424, Sender [6:138:2160], Recipient [6:274:2273] 2026-01-07T17:37:14.584020Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5268: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:37:14.584126Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:14.584262Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 25769805936 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:14.584321Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:210: [72057594046678944] TAlterReplication TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-07T17:37:14.584467Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:37:14.584702Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:37:14.584780Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:37:14.584858Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:706: Ack tablet strongly msg opId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 2026-01-07T17:37:14.585978Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409547][TxDiscoveryTargetsResult] Complete 2026-01-07T17:37:14.587195Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:37:14.587248Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:394: Ack coordinator stepId#5000041 first txId#104 countTxs#1 2026-01-07T17:37:14.587322Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:364: Ack mediator stepId#5000041 2026-01-07T17:37:14.587367Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2026-01-07T17:37:14.587624Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [6:128:2153], Recipient [6:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:37:14.587665Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:37:14.587797Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:14.587846Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:37:14.588127Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:14.588176Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:37:14.588565Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:37:14.588621Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:37:14.588749Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:37:14.588803Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:37:14.588846Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:37:14.588897Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:37:14.588958Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:37:14.589005Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:37:14.589057Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:37:14.589109Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:37:14.589149Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:37:14.589300Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:37:14.589349Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2026-01-07T17:37:14.589393Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-07T17:37:14.590068Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [6:210:2211], Recipient [6:128:2153]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Version: 4 } 2026-01-07T17:37:14.590116Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-07T17:37:14.590227Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:37:14.590334Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:37:14.590379Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:37:14.590431Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:37:14.590484Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:37:14.590591Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:37:14.590664Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:37:14.593086Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:37:14.594127Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:37:14.594192Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_transfer/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |86.8%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> KqpSnapshotIsolation::TConflictReadWriteOltp [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:01.278954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:01.279048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:01.279090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:01.279123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:01.279164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:01.279190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:01.279255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:01.279355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:01.280189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:01.280489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:01.374053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:01.374126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:01.391265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:01.391691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:01.391862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:01.399842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:01.400260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:01.401047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:01.401559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:01.406760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:01.406990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:01.408260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:01.408324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:01.408460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:01.408510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:01.408641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:01.408883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:01.589227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.590971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.591044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.591112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.591187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.591250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:01.591296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 713:2698], Recipient [6:128:2152]: NKikimrTxColumnShard.TEvNotifyTxCompletionResult Origin: 72075186233409546 TxId: 101 2026-01-07T17:37:15.576333Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5291: StateWork, processing event TEvColumnShard::TEvNotifyTxCompletionResult 2026-01-07T17:37:15.576431Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-07T17:37:15.576483Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:37:15.576647Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-07T17:37:15.576848Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:37:15.579502Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:37:15.579583Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:37:15.579647Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 101:0 2026-01-07T17:37:15.579850Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [6:128:2152], Recipient [6:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:37:15.579899Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:37:15.579973Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:37:15.580036Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:37:15.580199Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:37:15.580252Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:37:15.580297Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:37:15.580356Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:37:15.580408Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:37:15.580461Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:37:15.580567Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:746:2722] message: TxId: 101 2026-01-07T17:37:15.580641Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:37:15.580699Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:37:15.580749Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:37:15.580974Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:37:15.583581Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:37:15.583733Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [6:746:2722] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 101 at schemeshard: 72057594046678944 2026-01-07T17:37:15.583986Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:37:15.584045Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:747:2723] 2026-01-07T17:37:15.584317Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [6:749:2725], Recipient [6:128:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:37:15.584366Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:37:15.584425Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:37:15.585247Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [6:794:2762], Recipient [6:128:2152]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2026-01-07T17:37:15.585316Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:37:15.588726Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:15.589224Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_replication.cpp:361: [72057594046678944] TCreateReplication Propose: opId# 102:0, path# /MyRoot/Transfer 2026-01-07T17:37:15.589336Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2026-01-07T17:37:15.589631Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:37:15.592491Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:15.592835Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2026-01-07T17:37:15.592916Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:37:15.593323Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:37:15.593380Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:37:15.593829Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [6:800:2768], Recipient [6:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:37:15.593902Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:37:15.593952Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:37:15.594121Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [6:746:2722], Recipient [6:128:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2026-01-07T17:37:15.594160Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:37:15.594260Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:37:15.594377Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:37:15.594448Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:798:2766] 2026-01-07T17:37:15.594701Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [6:800:2768], Recipient [6:128:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:37:15.594742Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:37:15.594791Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-07T17:37:15.595215Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [6:801:2769], Recipient [6:128:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-07T17:37:15.595287Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:37:15.595416Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:37:15.595637Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 227us result status StatusPathDoesNotExist 2026-01-07T17:37:15.595835Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_transfer/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DropColumn >> KqpTx::RollbackByIdle [GOOD] >> KqpTx::MixEnginesOldNew >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2026-01-07T17:36:14.230866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:14.256198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:14.256395Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:14.263002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:14.263236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:14.263463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:14.263549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:14.263634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:14.263703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:14.263758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:14.263818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:14.263892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:14.263970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:14.264039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:14.264110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:14.264182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:14.284387Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:14.284841Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:14.284899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:14.285061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:14.285174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:14.285231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:14.285272Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:14.285384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:14.285451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:14.285487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:14.285508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:14.285626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:14.285670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:14.285706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:14.285730Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:14.285786Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:14.285827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:14.285867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:14.285887Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:14.285916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:14.285962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:14.285988Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:14.286041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:14.286086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:14.286120Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:14.286314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:14.286415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:14.286443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:14.286566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:14.286617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:14.286648Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:14.286697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:14.286740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:14.286778Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:14.286831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:14.286867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:14.286896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:14.287010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:14.287071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-07T17:37:13.507286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=689; 2026-01-07T17:37:13.507332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=109594; 2026-01-07T17:37:13.507375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=109725; 2026-01-07T17:37:13.507431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-07T17:37:13.507696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=233; 2026-01-07T17:37:13.507746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=110549; 2026-01-07T17:37:13.507899Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=108; 2026-01-07T17:37:13.507991Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=52; 2026-01-07T17:37:13.508233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=208; 2026-01-07T17:37:13.508471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=209; 2026-01-07T17:37:13.524391Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15845; 2026-01-07T17:37:13.551942Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=27406; 2026-01-07T17:37:13.552074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2026-01-07T17:37:13.552137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-07T17:37:13.552179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:37:13.552272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=55; 2026-01-07T17:37:13.552314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-07T17:37:13.552403Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2026-01-07T17:37:13.552445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:37:13.552514Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2026-01-07T17:37:13.552615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=63; 2026-01-07T17:37:13.552698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=47; 2026-01-07T17:37:13.552743Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=167265; 2026-01-07T17:37:13.552894Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:13.553044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:13.553110Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:13.553194Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:13.553244Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:13.553452Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:13.553524Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:13.553568Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:13.553628Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:13.553702Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807078365;tx_id=18446744073709551615;;current_snapshot_ts=1767807375765; 2026-01-07T17:37:13.553748Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:13.553795Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:13.553830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:13.553925Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:13.554121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.086000s; 2026-01-07T17:37:13.560363Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:13.560779Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:13.560838Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:13.560945Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:13.561003Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:13.561080Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807078365;tx_id=18446744073709551615;;current_snapshot_ts=1767807375765; 2026-01-07T17:37:13.561127Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:13.561191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:13.561235Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:13.561323Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-07T17:37:13.561377Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:13.562107Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.004000s; 2026-01-07T17:37:13.562161Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2026-01-07T17:36:30.179060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:30.206423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:30.206644Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:30.215426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:30.215652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:30.215903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:30.216013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:30.216147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:30.216267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:30.216381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:30.216480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:30.216583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:30.216752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.216963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:30.217105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:30.217205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:30.249596Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:30.250254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:30.250326Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:30.250515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.250758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:30.250842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:30.250897Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:30.251011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:30.251092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:30.251123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:30.251148Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:30.251285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.251341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:30.251369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:30.251391Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:30.251456Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:30.251502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:30.251549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:30.251571Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:30.251609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:30.251634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:30.251660Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:30.251687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:30.251713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:30.251731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:30.251935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:30.252020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:30.252052Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:30.252209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:30.252250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.252279Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.252334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:30.252367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:30.252395Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:30.252441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:30.252480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:30.252510Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:30.252659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:30.252714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-07T17:37:14.946745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=943; 2026-01-07T17:37:14.946793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=75083; 2026-01-07T17:37:14.946840Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=75201; 2026-01-07T17:37:14.946909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-07T17:37:14.947336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=377; 2026-01-07T17:37:14.947382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=76182; 2026-01-07T17:37:14.947545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=105; 2026-01-07T17:37:14.947663Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2026-01-07T17:37:14.948097Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=389; 2026-01-07T17:37:14.948491Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=339; 2026-01-07T17:37:14.964797Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16231; 2026-01-07T17:37:14.978962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=14039; 2026-01-07T17:37:14.979065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-07T17:37:14.979126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-07T17:37:14.979171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:37:14.979245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=39; 2026-01-07T17:37:14.979289Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-07T17:37:14.979379Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=53; 2026-01-07T17:37:14.979420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:37:14.979482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2026-01-07T17:37:14.979565Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2026-01-07T17:37:14.979644Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=46; 2026-01-07T17:37:14.979683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=116511; 2026-01-07T17:37:14.979824Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:14.979926Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:14.979978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:14.980046Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:14.980089Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:14.980308Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:14.980368Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:14.980404Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:14.980449Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:14.980511Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807092578;tx_id=18446744073709551615;;current_snapshot_ts=1767807391643; 2026-01-07T17:37:14.980553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:14.980594Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:14.980631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:14.980717Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:14.980899Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.053000s; 2026-01-07T17:37:14.985004Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:14.985359Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:14.985415Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:14.985527Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:14.985599Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:14.985666Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807092578;tx_id=18446744073709551615;;current_snapshot_ts=1767807391643; 2026-01-07T17:37:14.985713Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:14.985767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:14.985808Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:14.985887Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:37:14.985937Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:14.986844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.159000s; 2026-01-07T17:37:14.986890Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2026-01-07T17:36:16.990890Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:17.031862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:17.032103Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:17.040645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:17.040916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:17.041241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:17.041404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:17.041560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:17.041708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:17.041834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:17.041962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:17.042094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:17.042271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:17.042426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:17.042546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:17.042653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:17.081400Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:17.085115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:17.085222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:17.085422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:17.085593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:17.085689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:17.085740Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:17.085860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:17.085946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:17.086011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:17.086058Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:17.086253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:17.086321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:17.086364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:17.086395Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:17.086500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:17.086574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:17.086644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:17.086680Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:17.086745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:17.086784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:17.086816Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:17.086860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:17.086916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:17.086949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:17.087169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:17.087300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:17.087348Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:17.087487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:17.087531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:17.087561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:17.087613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:17.087658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:17.087695Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:17.087751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:17.087808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:17.087858Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:17.087998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:17.088076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-07T17:37:15.078377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1527; 2026-01-07T17:37:15.078443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=69045; 2026-01-07T17:37:15.078495Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=69176; 2026-01-07T17:37:15.078572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-07T17:37:15.078993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=368; 2026-01-07T17:37:15.079033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=70310; 2026-01-07T17:37:15.079226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=105; 2026-01-07T17:37:15.079366Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=74; 2026-01-07T17:37:15.079785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=372; 2026-01-07T17:37:15.080155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=324; 2026-01-07T17:37:15.098836Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=18591; 2026-01-07T17:37:15.117625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18627; 2026-01-07T17:37:15.117760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-07T17:37:15.117824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-07T17:37:15.117865Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:37:15.117941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2026-01-07T17:37:15.117984Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:37:15.118073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2026-01-07T17:37:15.118122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-01-07T17:37:15.118236Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=46; 2026-01-07T17:37:15.118350Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2026-01-07T17:37:15.118449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=53; 2026-01-07T17:37:15.118493Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=120634; 2026-01-07T17:37:15.118642Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:15.118764Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:15.118830Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:15.118904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:15.118955Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:15.119195Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:15.119271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:15.119313Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:15.119359Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:15.119465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807081128;tx_id=18446744073709551615;;current_snapshot_ts=1767807378518; 2026-01-07T17:37:15.119526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:15.119578Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:15.119623Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:15.119735Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:15.119954Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.058000s; 2026-01-07T17:37:15.123293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:15.123794Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:15.123859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:15.123945Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:15.124004Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:15.124078Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807081128;tx_id=18446744073709551615;;current_snapshot_ts=1767807378518; 2026-01-07T17:37:15.124134Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:15.124189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:15.124232Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:15.124340Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-07T17:37:15.124396Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:15.125224Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.018000s; 2026-01-07T17:37:15.125280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] >> BasicUsage::CreateTopicWithStreamingConsumer [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::ReadWithRestarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2026-01-07T17:36:02.075677Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:02.108099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:02.108338Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:02.115839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:02.116096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:02.116317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:02.116442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:02.116542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:02.116653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:02.116769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:02.116870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:02.117027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:02.117153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.117287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:02.117409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:02.117525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:02.147215Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:02.147883Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:02.147944Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:02.148128Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.148289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:02.148360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:02.148401Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:02.148510Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:02.148606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:02.148654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:02.148685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:02.148870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.148975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:02.149025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:02.149060Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:02.149155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:02.149221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:02.149291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:02.149325Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:02.149376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:02.149414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:02.149447Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:02.149494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:02.149531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:02.149560Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:02.149792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:02.149914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:02.149949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:02.150106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:02.150155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.150187Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.150235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:02.150279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:02.150330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:02.150379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:02.150419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:02.150454Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:02.150577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:02.150645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... odicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:36:56.735557Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:56.735639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-07T17:36:56.735701Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:36:56.738231Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=20;drop=0;skip=0;portions_counter=20;chunks=2240;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:56.738468Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.738542Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=ro_controller.cpp:41;event=CS::CLEANUP::PORTIONS;tablet_id=9437184; 2026-01-07T17:36:56.738617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=abstract.cpp:13;event=new_stage;stage=Started;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.739331Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessorResources;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.739500Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessors;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.739685Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:56.739784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.626500s; 2026-01-07T17:36:56.739867Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:56.740509Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.740843Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_finished;consumer=CLEANUP_PORTIONS;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04; Cleanup old portions: 2 4 9 8 1 6 3 7 10 5 19 14 18 13 21 11 15 22 20 17 2026-01-07T17:36:56.741229Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2026-01-07T17:36:56.741296Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.741473Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[183] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2026-01-07T17:36:56.742364Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.742915Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=148108480;raw_bytes=150092142;count=25;records=1800002} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=123422240;raw_bytes=125076680;count=20;records=1500000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:36:56.757795Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.757903Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2026-01-07T17:36:56.758318Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::76902fb0-ebef11f0-bb3f4b6a-97270b04; 2026-01-07T17:36:56.758411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:36:56.758499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-07T17:36:56.758565Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:36:56.758650Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:36:56.758710Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:56.758766Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:36:56.758878Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.625000s; 2026-01-07T17:36:56.758969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=76902fb0-ebef11f0-bb3f4b6a-97270b04;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:36:56.759085Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:20:4:0:6171112:0] 2026-01-07T17:36:56.759166Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:0:6171112:0] 2026-01-07T17:36:56.759229Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:5:4:0:6171112:0] 2026-01-07T17:36:56.759284Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:16:3:0:6171112:0] 2026-01-07T17:36:56.759324Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:19:3:0:6171112:0] 2026-01-07T17:36:56.759365Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:1:3:0:6171112:0] 2026-01-07T17:36:56.759405Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:4:3:0:6171112:0] 2026-01-07T17:36:56.759462Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:12:2:0:6171112:0] 2026-01-07T17:36:56.759508Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:7:3:0:6171112:0] 2026-01-07T17:36:56.759548Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2026-01-07T17:36:56.759591Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:3:2:0:6171112:0] 2026-01-07T17:36:56.759631Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:6:2:0:6171112:0] 2026-01-07T17:36:56.759668Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:8:4:0:6171112:0] 2026-01-07T17:36:56.759706Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:18:2:0:6171112:0] 2026-01-07T17:36:56.759752Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:9:2:0:6171112:0] 2026-01-07T17:36:56.759816Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:17:4:0:6171112:0] 2026-01-07T17:36:56.759863Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:13:3:0:6171112:0] 2026-01-07T17:36:56.759905Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:11:4:0:6171112:0] 2026-01-07T17:36:56.759948Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2026-01-07T17:36:56.759997Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:15:2:0:6171112:0] GC for channel 2 deletes blobs: Compactions happened: 2 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2026-01-07T17:36:01.805019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:01.835457Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:01.835684Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:01.842358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:01.842595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:01.842791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:01.842917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:01.843029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:01.843124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:01.843235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:01.843337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:01.843442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:01.843556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.843671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:01.843765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:01.843871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:01.872344Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:01.872979Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:01.873044Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:01.873230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.873402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:01.873468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:01.873507Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:01.873601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:01.873665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:01.873705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:01.873732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:01.873887Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:01.873942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:01.873994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:01.874025Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:01.874121Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:01.874180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:01.874245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:01.874276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:01.874321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:01.874378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:01.874411Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:01.874451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:01.874487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:01.874514Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:01.874716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:01.874810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:01.874839Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:01.874991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:01.875032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.875059Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:01.875102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:01.875162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:01.875200Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:01.875247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:01.875281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:01.875323Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:01.875449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:01.875516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7697; 2026-01-07T17:37:17.804661Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-07T17:37:17.805645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=907; 2026-01-07T17:37:17.805706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9021; 2026-01-07T17:37:17.805759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9154; 2026-01-07T17:37:17.805873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-07T17:37:17.805979Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=59; 2026-01-07T17:37:17.806032Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9851; 2026-01-07T17:37:17.806210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=123; 2026-01-07T17:37:17.806345Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=91; 2026-01-07T17:37:17.806539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=149; 2026-01-07T17:37:17.806725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=138; 2026-01-07T17:37:17.809389Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2614; 2026-01-07T17:37:17.812096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2628; 2026-01-07T17:37:17.812183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-07T17:37:17.812234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-07T17:37:17.812269Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:37:17.812382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2026-01-07T17:37:17.812429Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-07T17:37:17.812521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2026-01-07T17:37:17.812554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:37:17.812613Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-07T17:37:17.812722Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=70; 2026-01-07T17:37:17.812832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=65; 2026-01-07T17:37:17.812876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=25274; 2026-01-07T17:37:17.813068Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:17.813221Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:17.813284Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:17.813367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:17.813450Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:17.813639Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:17.813719Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:17.813763Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:17.813822Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:17.813905Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:17.813971Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:17.814036Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:17.814155Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:17.814473Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.120000s; 2026-01-07T17:37:17.816641Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:17.817668Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:17.817741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:17.817841Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:17.817891Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:17.817951Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:17.818027Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:17.818097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:17.818145Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:17.818259Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:37:17.818334Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:17.818854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.099000s; 2026-01-07T17:37:17.818918Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table [GOOD] >> Cdc::StringEscaping [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> TxUsage::WriteToTopic_Demo_12_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2026-01-07T17:36:15.373920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:15.398743Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:15.399015Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:15.405674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:15.405868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:15.406057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:15.406160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:15.406239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:15.406338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:15.406430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:15.406493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:15.406554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:15.406644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:15.406725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:15.406796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:15.406859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:15.441096Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:15.442870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:15.442957Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:15.443168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:15.443396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:15.443476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:15.443542Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:15.443670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:15.443743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:15.443788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:15.443831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:15.444039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:15.444106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:15.444153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:15.444182Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:15.444277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:15.444344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:15.444414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:15.444445Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:15.444498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:15.444538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:15.444573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:15.444620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:15.444663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:15.444715Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:15.444987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:15.445138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:15.445183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:15.445353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:15.445404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:15.445440Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:15.445490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:15.445536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:15.445581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:15.445651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:15.445692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:15.445752Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:15.445900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:15.445968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-07T17:37:17.946683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1287; 2026-01-07T17:37:17.946745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=55734; 2026-01-07T17:37:17.946787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=55838; 2026-01-07T17:37:17.946849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-07T17:37:17.947212Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=319; 2026-01-07T17:37:17.947256Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=56744; 2026-01-07T17:37:17.947419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=110; 2026-01-07T17:37:17.947532Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=68; 2026-01-07T17:37:17.948047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=333; 2026-01-07T17:37:17.948400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=304; 2026-01-07T17:37:17.966108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=17638; 2026-01-07T17:37:17.982446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16226; 2026-01-07T17:37:17.982561Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-07T17:37:17.982618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-07T17:37:17.982661Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:37:17.982737Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2026-01-07T17:37:17.982780Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:37:17.982862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2026-01-07T17:37:17.982930Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:37:17.982994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2026-01-07T17:37:17.983113Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=67; 2026-01-07T17:37:17.983223Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=56; 2026-01-07T17:37:17.983259Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=99397; 2026-01-07T17:37:17.983400Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:17.983515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:17.983565Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:17.983634Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:17.983681Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:17.983877Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:17.983936Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:17.983971Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:17.984013Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:17.984095Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807079518;tx_id=18446744073709551615;;current_snapshot_ts=1767807376907; 2026-01-07T17:37:17.984142Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:17.984185Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:17.984219Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:17.984315Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:17.984512Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.193000s; 2026-01-07T17:37:17.987946Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:17.988270Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:17.988351Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:17.988435Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:17.988486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:17.988554Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807079518;tx_id=18446744073709551615;;current_snapshot_ts=1767807376907; 2026-01-07T17:37:17.988599Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:17.988651Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:17.988693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:17.988792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-07T17:37:17.988854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:17.989679Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.198000s; 2026-01-07T17:37:17.989730Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy >> TxUsage::WriteToTopic_Demo_41_Table [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2026-01-07T17:36:51.769673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:51.807312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:51.807603Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:51.816483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:51.816763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:51.817016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:51.817137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:51.817245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:51.817343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:51.817486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:51.817600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:51.817700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:51.817836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:51.817943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:51.818065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:51.818194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:51.855775Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:51.856457Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:51.856549Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:51.856730Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:51.856895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:51.856994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:51.857047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:51.857177Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:51.857284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:51.857331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:51.857362Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:51.857545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:51.857610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:51.857655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:51.857685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:51.857789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:51.857862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:51.857951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:51.858006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:51.858062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:51.858110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:51.858144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:51.858186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:51.858234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:51.858270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:51.858518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:51.858645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:51.858682Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:51.858829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:51.858878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:51.858907Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:51.858956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:51.859001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:51.859042Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:51.859121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:51.859188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:51.859243Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:51.859385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:51.859447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... RD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:37:20.735678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:37:20.735723Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:37:20.735866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2026-01-07T17:37:20.735926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=10;batch_columns=timestamp; 2026-01-07T17:37:20.736207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:3589:5595];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2026-01-07T17:37:20.736366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:37:20.736506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:37:20.736657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:37:20.737215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:37:20.737390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:37:20.737525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:37:20.737785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:3596:5602] finished for tablet 9437184 2026-01-07T17:37:20.738339Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:3589:5595];stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.101},{"events":["f_ack"],"t":0.102},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.104}],"full":{"a":29648983,"name":"_full_task","f":29648983,"d_finished":0,"c":0,"l":29753821,"d":104838},"events":[{"name":"bootstrap","f":29649983,"d_finished":1965,"c":1,"l":29651948,"d":1965},{"a":29753160,"name":"ack","f":29751415,"d_finished":1239,"c":1,"l":29752654,"d":1900},{"a":29753141,"name":"processing","f":29652882,"d_finished":16024,"c":14,"l":29752657,"d":16704},{"name":"ProduceResults","f":29651347,"d_finished":4782,"c":17,"l":29753537,"d":4782},{"a":29753542,"name":"Finish","f":29753542,"d_finished":0,"c":0,"l":29753821,"d":279},{"name":"task_result","f":29652924,"d_finished":14485,"c":13,"l":29750216,"d":14485}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:37:20.738433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:3589:5595];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:37:20.738928Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:3589:5595];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.101},{"events":["f_ack"],"t":0.102},{"events":["l_ProduceResults","f_Finish"],"t":0.104},{"events":["l_ack","l_processing","l_Finish"],"t":0.105}],"full":{"a":29648983,"name":"_full_task","f":29648983,"d_finished":0,"c":0,"l":29754455,"d":105472},"events":[{"name":"bootstrap","f":29649983,"d_finished":1965,"c":1,"l":29651948,"d":1965},{"a":29753160,"name":"ack","f":29751415,"d_finished":1239,"c":1,"l":29752654,"d":2534},{"a":29753141,"name":"processing","f":29652882,"d_finished":16024,"c":14,"l":29752657,"d":17338},{"name":"ProduceResults","f":29651347,"d_finished":4782,"c":17,"l":29753537,"d":4782},{"a":29753542,"name":"Finish","f":29753542,"d_finished":0,"c":0,"l":29754455,"d":913},{"name":"task_result","f":29652924,"d_finished":14485,"c":13,"l":29750216,"d":14485}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:37:20.739024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:37:20.627955Z;index_granules=0;index_portions=12;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=16464;inserted_portions_bytes=14016;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=30480;selected_rows=0; 2026-01-07T17:37:20.739069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:37:20.739238Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=84bb7cf2-ebef11f0-b3aef996-35ab1bae; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TTxLocatorTest::TestImposibleSize >> TTxLocatorTest::TestAllocateAllByPieces >> TxUsage::WriteToTopic_Demo_12_Query |86.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxLocatorTest::TestImposibleSize [GOOD] >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> TxUsage::WriteToTopic_Demo_42_Table >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2026-01-07T17:37:24.471745Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:37:24.472242Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:37:24.472975Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:37:24.474868Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.475378Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:37:24.486878Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.486987Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.487044Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.487176Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:37:24.487288Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.487367Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:37:24.487556Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:37:24.488346Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#281474976710656 2026-01-07T17:37:24.488515Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2026-01-07T17:37:24.493625Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2026-01-07T17:37:24.494178Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2110] requested range size#123456 2026-01-07T17:37:24.494739Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.494822Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.494935Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2026-01-07T17:37:24.494973Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2110] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2026-01-07T17:37:24.495353Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2114] requested range size#281474976587200 2026-01-07T17:37:24.495499Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2026-01-07T17:37:24.495537Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:80:2114] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2026-01-07T17:37:24.495919Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#246912 2026-01-07T17:37:24.496325Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.496398Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.496512Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2026-01-07T17:37:24.496564Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2026-01-07T17:37:24.496986Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2121] requested range size#281474976340288 2026-01-07T17:37:24.497094Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2026-01-07T17:37:24.497132Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:87:2121] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2026-01-07T17:37:24.483341Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:37:24.483933Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:37:24.484779Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:37:24.486909Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.487445Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:37:24.498504Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.498613Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.498679Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.498780Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:37:24.498883Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.498969Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:37:24.499128Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:37:24.499850Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#8796093022207 2026-01-07T17:37:24.500356Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.500430Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.500544Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2026-01-07T17:37:24.500585Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2026-01-07T17:37:24.521035Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#8796093022207 2026-01-07T17:37:24.521770Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.521876Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.522002Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2026-01-07T17:37:24.522050Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2026-01-07T17:37:24.522492Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#8796093022207 2026-01-07T17:37:24.522992Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.523068Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.523196Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2026-01-07T17:37:24.523262Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2026-01-07T17:37:24.523711Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#8796093022207 2026-01-07T17:37:24.524134Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.524235Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.524322Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2026-01-07T17:37:24.524359Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2026-01-07T17:37:24.524814Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2123] requested range size#8796093022207 2026-01-07T17:37:24.525191Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.525255Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.525359Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2026-01-07T17:37:24.525409Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:89:2123] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2026-01-07T17:37:24.525818Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:93:2127] requested range size#8796093022207 2026-01-07T17:37:24.526198Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.526264Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.526350Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2026-01-07T17:37:24.526395Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:93:2127] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2026-01-07T17:37:24.526840Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:97:2131] requested range size#8796093022207 2026-01-07T17:37:24.527217Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.527353Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.527506Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2026-01-07T17:37:24.527563Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:97:2131] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2026-01-07T17:37:24.527997Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:101:2135] requested range size#8796093022207 2026-01-07T17:37:24.528367Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.528509Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.528657Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2026-01-07T17:37:24.528718Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:101:2135] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2026-01-07T17:37:24.529260Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:105:2139] requested range size#8796093022207 2026-01-07T17:37:24.529634Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.529704Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.529796Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2026-01-07T17:37:24.529864Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:105:2139] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2026-01-07T17:37:24.530360Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#8796093022207 2026-01-07T17:37:24.530722Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.530801Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.530891Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Suc ... node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:157:2191] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2026-01-07T17:37:24.545675Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:161:2195] requested range size#8796093022207 2026-01-07T17:37:24.546015Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.546099Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.546193Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2026-01-07T17:37:24.546237Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:161:2195] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2026-01-07T17:37:24.546953Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:165:2199] requested range size#8796093022207 2026-01-07T17:37:24.547299Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.547379Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.547473Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2026-01-07T17:37:24.547516Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:165:2199] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2026-01-07T17:37:24.548244Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:169:2203] requested range size#8796093022207 2026-01-07T17:37:24.548576Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.548655Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.548744Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2026-01-07T17:37:24.548780Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:169:2203] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2026-01-07T17:37:24.549599Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:173:2207] requested range size#8796093022207 2026-01-07T17:37:24.549997Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.550087Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.550179Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2026-01-07T17:37:24.550222Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:173:2207] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2026-01-07T17:37:24.550970Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:177:2211] requested range size#8796093022207 2026-01-07T17:37:24.551284Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.551395Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.551507Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2026-01-07T17:37:24.551548Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:177:2211] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2026-01-07T17:37:24.552338Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:181:2215] requested range size#8796093022207 2026-01-07T17:37:24.552688Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.552759Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.552884Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2026-01-07T17:37:24.552946Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:181:2215] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2026-01-07T17:37:24.553730Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:185:2219] requested range size#8796093022207 2026-01-07T17:37:24.554071Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.554121Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.554220Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2026-01-07T17:37:24.554262Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:185:2219] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2026-01-07T17:37:24.555025Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:189:2223] requested range size#8796093022207 2026-01-07T17:37:24.555808Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.555892Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.556125Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2026-01-07T17:37:24.556224Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:189:2223] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2026-01-07T17:37:24.557185Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:193:2227] requested range size#8796093022207 2026-01-07T17:37:24.557593Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.557687Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.557821Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2026-01-07T17:37:24.557880Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:193:2227] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2026-01-07T17:37:24.564817Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:197:2231] requested range size#8796093022207 2026-01-07T17:37:24.565658Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.565746Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.565879Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2026-01-07T17:37:24.565924Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:197:2231] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2026-01-07T17:37:24.567320Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:201:2235] requested range size#31 2026-01-07T17:37:24.567973Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.568122Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:37:24.568294Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2026-01-07T17:37:24.568354Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:201:2235] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2026-01-07T17:37:24.569455Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:205:2239] requested range size#1 2026-01-07T17:37:24.569657Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2026-01-07T17:37:24.569717Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:205:2239] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> TTopicApiDescribes::GetPartitionDescribe |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTopicApiDescribes::GetLocalDescribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: 2026-01-07T17:36:28.169269Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:36:28.254957Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:28.255061Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:28.255133Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:28.255198Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:36:28.282290Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:28.303381Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:36:28.304579Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:36:28.308037Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:36:28.311463Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-01-07T17:36:28.313372Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-01-07T17:36:28.321318Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:28.321770Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|947d51ac-9822d780-968d18f5-6f6d57f4_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:28.340800Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:28.341346Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|584b70c2-8ab8144f-9d721407-aa8092e8_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:28.382379Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:28.382912Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ed427985-39904504-9bdeb997-5d508adf_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:245:2057] recipient: [1:104:2137] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:248:2057] recipient: [1:247:2242] Leader for TabletID 72057594037927937 is [1:249:2243] sender: [1:250:2057] recipient: [1:247:2242] 2026-01-07T17:36:28.477023Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:28.477080Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:28.478188Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:28.478247Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:36:28.478962Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:298:2243] 2026-01-07T17:36:28.481258Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:299:2243] 2026-01-07T17:36:28.488205Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:36:28.488611Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:36:28.488953Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:298:2243] 2026-01-07T17:36:28.489191Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:299:2243] 2026-01-07T17:36:28.497196Z node 1 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-07T17:36:28.499869Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [1:249:2243] sender: [1:327:2057] recipient: [1:14:2061] Got start offset = 0 2026-01-07T17:36:29.056835Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2026-01-07T17:36:29.120539Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:29.120621Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:29.120669Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:29.120725Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:182:2057] recipient: [2:14:2061] 2026-01-07T17:36:29.146120Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:29.147024Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-07T17:36:29.147798Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:188:2142] 2026-01-07T17:36:29.150552Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:188:2142] 2026-01-07T17:36:29.152337Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:189:2142] 2026-01-07T17:36:29.155696Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:189:2142] 2026-01-07T17:36:29.169295Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:29.169794Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f2fce0b5-2d6336d5-8ed7bae7-f29dc3e1_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:29.194662Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:29.195199Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|43db4cc-99a18848-178213b5-8c20cc99_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:29.239518Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:29.240040Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7d3ba247-2ac446fb-1ff0ee70-b404ed5b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:242:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:245:2057] recipient: [2:244:2239] Leader for TabletID 72057594037927937 is [2:246:2240] sender: [2:247:2057] recipient: [2:244:2239] 2026-01-07T17:36:29.309146Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:29.309216Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:29.310129Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:29.310190Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01 ... 057594037927937] Config applied version 55 actor [55:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 } 2026-01-07T17:37:23.884193Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:188:2142] 2026-01-07T17:37:23.887303Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:188:2142] 2026-01-07T17:37:23.889451Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:189:2142] 2026-01-07T17:37:23.891882Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:189:2142] 2026-01-07T17:37:23.908145Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:23.908605Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|79b59818-2851aa94-f10ddb68-f416c0eb_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:23.950184Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:23.950840Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ecefa526-c7a71fd1-401f13a0-b08c55fc_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:24.003904Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:24.004535Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a980e019-e2c6c2-87e9e421-c5f778b7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [55:112:2142] sender: [55:243:2057] recipient: [55:104:2137] Leader for TabletID 72057594037927937 is [55:112:2142] sender: [55:246:2057] recipient: [55:245:2240] Leader for TabletID 72057594037927937 is [55:247:2241] sender: [55:248:2057] recipient: [55:245:2240] 2026-01-07T17:37:24.124069Z node 55 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:37:24.124150Z node 55 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:37:24.125058Z node 55 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:24.125116Z node 55 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:37:24.126409Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:296:2241] 2026-01-07T17:37:24.129664Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:297:2241] 2026-01-07T17:37:24.141993Z node 55 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:24.142562Z node 55 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:24.143329Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:296:2241] 2026-01-07T17:37:24.143665Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:297:2241] 2026-01-07T17:37:24.158778Z node 55 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-07T17:37:24.159381Z node 55 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 55 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [55:247:2241] sender: [55:325:2057] recipient: [55:14:2061] Got start offset = 0 2026-01-07T17:37:24.705849Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:108:2057] recipient: [56:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:108:2057] recipient: [56:106:2138] Leader for TabletID 72057594037927937 is [56:112:2142] sender: [56:113:2057] recipient: [56:106:2138] 2026-01-07T17:37:24.799404Z node 56 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:37:24.799468Z node 56 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:37:24.799516Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:24.799571Z node 56 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:154:2057] recipient: [56:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:154:2057] recipient: [56:152:2172] Leader for TabletID 72057594037927938 is [56:158:2176] sender: [56:159:2057] recipient: [56:152:2172] Leader for TabletID 72057594037927937 is [56:112:2142] sender: [56:182:2057] recipient: [56:14:2061] 2026-01-07T17:37:24.820724Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:24.821696Z node 56 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 56 actor [56:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 56 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 56 } 2026-01-07T17:37:24.822424Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:188:2142] 2026-01-07T17:37:24.825402Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [56:188:2142] 2026-01-07T17:37:24.827526Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:189:2142] 2026-01-07T17:37:24.829779Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [56:189:2142] 2026-01-07T17:37:24.837947Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:24.838342Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|46c4f138-c5f3268c-8e7c8-74b52624_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:24.857901Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:24.858484Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|78dfb645-c394a12e-4b12dd9a-e80d592f_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:24.907177Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:24.907747Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fd94082a-64d4f4c2-d408e774-81d0ba7f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [56:112:2142] sender: [56:243:2057] recipient: [56:104:2137] Leader for TabletID 72057594037927937 is [56:112:2142] sender: [56:246:2057] recipient: [56:245:2240] Leader for TabletID 72057594037927937 is [56:247:2241] sender: [56:248:2057] recipient: [56:245:2240] 2026-01-07T17:37:25.007337Z node 56 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:37:25.007507Z node 56 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:37:25.008524Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:25.008581Z node 56 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:37:25.009817Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:296:2241] 2026-01-07T17:37:25.012561Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:297:2241] 2026-01-07T17:37:25.023846Z node 56 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:25.024402Z node 56 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:25.025407Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [56:296:2241] 2026-01-07T17:37:25.025800Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [56:297:2241] 2026-01-07T17:37:25.037728Z node 56 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-07T17:37:25.041296Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [56:247:2241] sender: [56:325:2057] recipient: [56:14:2061] Got start offset = 0 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2026-01-07T17:36:02.115734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:02.139531Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:02.139735Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:02.145909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:02.146127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:02.146376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:02.146518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:02.146621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:02.146735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:02.146851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:02.146961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:02.147082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:02.147213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.147352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:02.147476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:02.147579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:02.171277Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:02.171822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:02.171891Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:02.172056Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.172192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:02.172247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:02.172278Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:02.172362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:02.172423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:02.172465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:02.172488Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:02.172621Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:02.172679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:02.172725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:02.172748Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:02.172811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:02.172856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:02.172894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:02.172921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:02.172994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:02.173026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:02.173051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:02.173081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:02.173105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:02.173124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:02.173283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:02.173441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:02.173487Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:02.173598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:02.173628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.173650Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:02.173693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:02.173727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:02.173753Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:02.173800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:02.173836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:02.173858Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:02.173966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:02.174018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7413; 2026-01-07T17:37:20.914060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-07T17:37:20.915051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=935; 2026-01-07T17:37:20.915113Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8738; 2026-01-07T17:37:20.915159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8888; 2026-01-07T17:37:20.915222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-07T17:37:20.915313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=49; 2026-01-07T17:37:20.915360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9649; 2026-01-07T17:37:20.915546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=126; 2026-01-07T17:37:20.915687Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=93; 2026-01-07T17:37:20.915921Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=184; 2026-01-07T17:37:20.916129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=155; 2026-01-07T17:37:20.919995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3798; 2026-01-07T17:37:20.923509Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3408; 2026-01-07T17:37:20.923619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2026-01-07T17:37:20.923679Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-07T17:37:20.923749Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2026-01-07T17:37:20.923852Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=59; 2026-01-07T17:37:20.923903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:37:20.924013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=69; 2026-01-07T17:37:20.924061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:37:20.924154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=49; 2026-01-07T17:37:20.924272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=68; 2026-01-07T17:37:20.924397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=66; 2026-01-07T17:37:20.924453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=34557; 2026-01-07T17:37:20.924621Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:20.924758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:20.924854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:20.924997Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:20.925070Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:20.925228Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:20.925330Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:20.925383Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:20.925441Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:20.925518Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:20.925603Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:20.925657Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:20.925770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:20.926019Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.037000s; 2026-01-07T17:37:20.928390Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:20.929796Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:20.929881Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:20.929977Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:20.930038Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:20.930103Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:20.930185Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:20.930255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:20.930309Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:20.930402Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:37:20.930470Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:20.931251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.171000s; 2026-01-07T17:37:20.931313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> TTopicApiDescribes::DescribeTopic |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> KqpTx::MixEnginesOldNew [GOOD] >> TableCreator::CreateTables |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> Cdc::DropColumn [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> Cdc::DropIndex >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> RetryPolicy::TWriteSession_SeqNoShift |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |86.9%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query |86.8%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> TableCreator::CreateTables [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> DataShardSnapshots::VolatileSnapshotTimeout |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 20151, MsgBus: 14314 2026-01-07T17:36:58.490664Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675046667996673:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:36:58.490868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:36:58.565208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:59.088384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:36:59.095935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:59.096064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:59.269409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:59.301044Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675046667996539:2081] 1767807418457184 != 1767807418457187 2026-01-07T17:36:59.343302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:59.348157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:36:59.512232Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:59.633391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:36:59.633413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:36:59.633419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:36:59.633758Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:00.581158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:00.609480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:37:00.741347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:01.033048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:01.240544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:01.336832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:03.491211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675046667996673:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:03.491288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:37:03.617940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675068142834917:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:03.618128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:03.628795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675068142834927:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:03.629004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:04.093916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:04.143892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:04.186404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:04.234901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:04.287096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:04.384645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:04.493148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:04.602214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:04.805195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675072437803098:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:04.805349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:04.807038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675072437803103:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:04.807095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675072437803104:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:04.807162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:04.812113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:04.845564Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675072437803107:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:37:04.943849Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675072437803158:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... 44480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:17.188664Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=Zjc4N2MyODYtZDljNjE5MGUtODY4MjJiZTYtODI4ZTgxZDA=, ActorId: [2:7592675131304570057:2535], ActorState: ReadyState, LegacyTraceId: 01kecrhjc4cbesp8dmqha6cm4n, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kecrhjax69mzcfb42kf6qnds" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 7924, MsgBus: 1406 2026-01-07T17:37:18.388424Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592675134489183530:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:18.389013Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:18.411414Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:18.535578Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:18.535670Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:18.540371Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:18.565946Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:18.677169Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:18.719389Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:37:18.719414Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:37:18.719425Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:37:18.719507Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:19.206486Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:19.232768Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:19.311358Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:19.551344Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:19.754035Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:19.876645Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:22.502190Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592675151669054497:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.502295Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.502649Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592675151669054507:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.502697Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.591964Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:22.640062Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:22.685665Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:22.731585Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:22.801278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:22.864810Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:22.952461Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:23.042441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:23.152954Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592675155964022676:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.153053Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.155466Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592675155964022681:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.155529Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592675155964022682:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.155577Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.160703Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:23.176536Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592675155964022685:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:37:23.232536Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592675155964022736:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:23.385181Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592675134489183530:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:23.385260Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2026-01-07T17:37:27.212362Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675173960837884:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:27.212450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:27.252827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:37:27.527157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:27.527256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:27.528874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:27.541413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:37:27.598413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:27.599594Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675173960837672:2081] 1767807447164111 != 1767807447164114 2026-01-07T17:37:27.806632Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:37:27.896958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:37:27.896997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:37:27.897006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:37:27.897104Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:27.954679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:27.981249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:27.984002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:28.213351Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2026-01-07T17:36:30.175443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:30.205550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:30.205757Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:30.212688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:30.212906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:30.213127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:30.213235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:30.213328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:30.213456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:30.213586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:30.213685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:30.213782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:30.213880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.214030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:30.214124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:30.214227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:30.250890Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:30.252253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:30.252339Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:30.252521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.252678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:30.252744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:30.252784Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:30.252899Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:30.252979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:30.253020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:30.253049Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:30.253219Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.253280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:30.253438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:30.253483Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:30.253586Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:30.253655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:30.253707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:30.253734Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:30.253787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:30.253821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:30.253851Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:30.253906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:30.253938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:30.253962Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:30.254163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:30.254268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:30.254300Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:30.254419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:30.254482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.254523Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.254577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:30.254618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:30.254652Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:30.254707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:30.254744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:30.254771Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:30.254933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:30.255006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ame=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-07T17:37:23.766220Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1767; 2026-01-07T17:37:23.766290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=255491; 2026-01-07T17:37:23.766338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=255625; 2026-01-07T17:37:23.766422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-07T17:37:23.766854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=377; 2026-01-07T17:37:23.766907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=256750; 2026-01-07T17:37:23.767104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=133; 2026-01-07T17:37:23.767251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=93; 2026-01-07T17:37:23.767735Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=428; 2026-01-07T17:37:23.768095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=304; 2026-01-07T17:37:23.790471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=22284; 2026-01-07T17:37:23.819708Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=29073; 2026-01-07T17:37:23.819867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2026-01-07T17:37:23.819936Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-07T17:37:23.819981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:37:23.820081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=61; 2026-01-07T17:37:23.820131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:37:23.820280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=68; 2026-01-07T17:37:23.820326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:37:23.820428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=49; 2026-01-07T17:37:23.820558Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=71; 2026-01-07T17:37:23.820668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=69; 2026-01-07T17:37:23.820731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=320314; 2026-01-07T17:37:23.820900Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:23.821052Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:23.821134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:23.821218Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:23.821313Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:23.821490Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:23.821574Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:23.821613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:23.821663Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:23.821735Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807092621;tx_id=18446744073709551615;;current_snapshot_ts=1767807391644; 2026-01-07T17:37:23.821779Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:23.821827Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:23.821873Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:23.821979Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:23.822200Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.039000s; 2026-01-07T17:37:23.827822Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:23.828302Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:23.828373Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:23.828477Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:23.828539Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:23.828618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807092621;tx_id=18446744073709551615;;current_snapshot_ts=1767807391644; 2026-01-07T17:37:23.828674Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:23.828758Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:23.828807Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:23.828896Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:37:23.828964Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:23.830078Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.061000s; 2026-01-07T17:37:23.830131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2026-01-07T17:36:41.561862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:41.597844Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:41.598110Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:41.607782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:41.608069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:41.608446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:41.608582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:41.608682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:41.608837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:41.608986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:41.609127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:41.609244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:41.609357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:41.609511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:41.609622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:41.609723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:41.643981Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:41.644762Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:41.644829Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:41.645040Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:41.645204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:41.645273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:41.645320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:41.645434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:41.645538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:41.645606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:41.645641Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:41.645860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:41.645935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:41.645981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:41.646020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:41.646125Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:41.646200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:41.646269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:41.646300Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:41.646356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:41.646391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:41.646419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:41.646462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:41.646503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:41.646533Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:41.646812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:41.646941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:41.646981Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:41.647141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:41.647188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:41.647231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:41.647294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:41.647345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:41.647384Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:41.647439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:41.647478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:41.647510Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:41.647663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:41.647731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-07T17:37:28.174904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1026; 2026-01-07T17:37:28.174975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=61610; 2026-01-07T17:37:28.175028Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=61736; 2026-01-07T17:37:28.175104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-07T17:37:28.175530Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=372; 2026-01-07T17:37:28.175580Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=62708; 2026-01-07T17:37:28.175758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=118; 2026-01-07T17:37:28.176144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=101; 2026-01-07T17:37:28.176744Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=528; 2026-01-07T17:37:28.177242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=415; 2026-01-07T17:37:28.216150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=38801; 2026-01-07T17:37:28.236734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=20402; 2026-01-07T17:37:28.236901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=17; 2026-01-07T17:37:28.237037Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=16; 2026-01-07T17:37:28.237083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:37:28.237171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2026-01-07T17:37:28.237217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:37:28.237308Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2026-01-07T17:37:28.237352Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:37:28.237453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=45; 2026-01-07T17:37:28.237593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=61; 2026-01-07T17:37:28.237700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=66; 2026-01-07T17:37:28.237762Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=133140; 2026-01-07T17:37:28.237946Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:28.238081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:28.238147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:28.238226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:28.238283Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:28.238513Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:28.238592Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:28.238637Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:28.238693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:28.238799Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807104020;tx_id=18446744073709551615;;current_snapshot_ts=1767807403033; 2026-01-07T17:37:28.238865Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:28.238917Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:28.238983Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:28.239112Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:28.239341Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.196000s; 2026-01-07T17:37:28.245305Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:28.245970Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:28.246037Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:28.246135Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:28.246190Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:28.246286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807104020;tx_id=18446744073709551615;;current_snapshot_ts=1767807403033; 2026-01-07T17:37:28.246364Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:28.246425Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:28.246468Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:28.246550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:37:28.246618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:28.247854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.004000s; 2026-01-07T17:37:28.247902Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> BasicUsage::ReadWithRestarts [GOOD] >> Describe::LocationWithKillTablets >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2026-01-07T17:36:43.160180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:43.198908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:43.199211Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:43.206904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:43.207159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:43.207395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:43.207520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:43.207671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:43.207782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:43.207900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:43.208012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:43.208112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:43.208227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:43.208401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:43.208513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:43.208613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:43.242593Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:43.244277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:43.244361Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:43.244577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:43.244769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:43.244863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:43.244913Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:43.245061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:43.245142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:43.245190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:43.245253Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:43.245456Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:43.245527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:43.245599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:43.245634Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:43.245751Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:43.245843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:43.245913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:43.245944Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:43.245999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:43.246050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:43.246083Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:43.246129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:43.246173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:43.246203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:43.246440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:43.246613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:43.246660Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:43.246812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:43.246880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:43.246912Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:43.246960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:43.247004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:43.247046Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:43.247105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:43.247151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:43.247185Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:43.247313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:43.247389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-07T17:37:28.588144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1048; 2026-01-07T17:37:28.588204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=74615; 2026-01-07T17:37:28.588260Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=74746; 2026-01-07T17:37:28.588338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-07T17:37:28.588759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=368; 2026-01-07T17:37:28.588803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=75803; 2026-01-07T17:37:28.589003Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=137; 2026-01-07T17:37:28.589139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=79; 2026-01-07T17:37:28.589572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=379; 2026-01-07T17:37:28.589937Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=313; 2026-01-07T17:37:28.604304Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14279; 2026-01-07T17:37:28.618313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=13855; 2026-01-07T17:37:28.618444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-07T17:37:28.618506Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-07T17:37:28.618550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:37:28.618627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2026-01-07T17:37:28.618671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:37:28.618759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2026-01-07T17:37:28.618800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:37:28.618869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2026-01-07T17:37:28.618961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2026-01-07T17:37:28.619042Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2026-01-07T17:37:28.619081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=115058; 2026-01-07T17:37:28.619254Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:28.619381Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:28.619442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:28.619517Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:28.619564Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:28.619779Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:28.619839Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:28.619880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:28.619926Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:28.619990Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807105558;tx_id=18446744073709551615;;current_snapshot_ts=1767807404623; 2026-01-07T17:37:28.620032Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:28.620078Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:28.620113Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:28.620200Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:28.620388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.109000s; 2026-01-07T17:37:28.625004Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:28.625380Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:28.625443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:28.625544Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:28.625604Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:28.625683Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807105558;tx_id=18446744073709551615;;current_snapshot_ts=1767807404623; 2026-01-07T17:37:28.625738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:28.625791Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:28.625837Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:28.625924Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:37:28.626004Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:28.627001Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.178000s; 2026-01-07T17:37:28.627054Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |87.0%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |87.0%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names >> TIcNodeCache::GetNodesInfoTest >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> Cdc::RenameTable [GOOD] >> Cdc::ResolvedTimestamps >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table [GOOD] >> TTopicApiDescribes::DescribeConsumer >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TOlapNaming::CreateColumnStoreFailed >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TxUsage::WriteToTopic_Demo_12_Query [GOOD] >> TOlap::StoreStatsQuota >> TOlapNaming::AlterColumnTableFailed >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query >> TOlapNaming::CreateColumnTableOk >> TxUsage::WriteToTopic_Demo_13_Table >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> TOlapNaming::CreateColumnStoreFailed [GOOD] >> TOlapNaming::AlterColumnTableOk >> TOlapNaming::CreateColumnTableExtraSymbolsOk >> TOlap::CreateDropStandaloneTable >> TOlap::CreateStoreWithDirs >> TOlap::StoreStats >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> Cdc::DropIndex [GOOD] >> Cdc::InitialScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2026-01-07T17:36:30.112710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:30.148595Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:30.148859Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:30.156720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:30.157012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:30.157238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:30.157398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:30.157542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:30.157658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:30.157775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:30.157915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:30.158052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:30.158205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.158334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:30.158452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:30.158592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:30.193162Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:30.193960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:30.194023Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:30.194226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.194402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:30.194470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:30.194513Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:30.194618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:30.194693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:30.194743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:30.194778Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:30.194951Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.195012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:30.195056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:30.195084Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:30.195173Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:30.195227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:30.195286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:30.195313Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:30.195361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:30.195398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:30.195442Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:30.195496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:30.195544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:30.195584Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:30.195804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:30.195909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:30.195939Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:30.196052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:30.196090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.196147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.196196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:30.196237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:30.196271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:30.196337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:30.196374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:30.196420Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:30.196590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:30.196647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-07T17:37:33.819710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=998; 2026-01-07T17:37:33.819768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=72351; 2026-01-07T17:37:33.819820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=72483; 2026-01-07T17:37:33.819893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-07T17:37:33.820273Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=324; 2026-01-07T17:37:33.820325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=73503; 2026-01-07T17:37:33.820496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=112; 2026-01-07T17:37:33.820630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=81; 2026-01-07T17:37:33.821055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=371; 2026-01-07T17:37:33.821430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=318; 2026-01-07T17:37:33.840477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=18947; 2026-01-07T17:37:33.856707Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16090; 2026-01-07T17:37:33.856832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2026-01-07T17:37:33.856890Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-01-07T17:37:33.857799Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=850; 2026-01-07T17:37:33.857931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=62; 2026-01-07T17:37:33.857989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2026-01-07T17:37:33.858077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=53; 2026-01-07T17:37:33.858120Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:37:33.858193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-07T17:37:33.858285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2026-01-07T17:37:33.858367Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=46; 2026-01-07T17:37:33.858406Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=119088; 2026-01-07T17:37:33.858556Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:33.858667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:33.858724Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:33.858790Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:33.858836Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:33.859040Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:33.859105Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:33.859145Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:33.859191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:33.859255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807094244;tx_id=18446744073709551615;;current_snapshot_ts=1767807391645; 2026-01-07T17:37:33.859298Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:33.859345Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:33.859378Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:33.859465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:33.859651Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.182000s; 2026-01-07T17:37:33.862898Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:33.863125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:33.863177Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:33.863255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:33.863307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-07T17:37:33.863371Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807094244;tx_id=18446744073709551615;;current_snapshot_ts=1767807391645; 2026-01-07T17:37:33.863420Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:33.863468Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:33.863510Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:33.863590Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-07T17:37:33.863638Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:33.864405Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.031000s; 2026-01-07T17:37:33.864471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query [GOOD] >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore >> TOlap::CreateTableWithNullableKeysNotAllowed >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2026-01-07T17:37:26.225078Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675167796925240:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:26.225295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:26.297972Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:37:26.317338Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:26.364257Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:26.677088Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:26.684245Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:37:26.758112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:26.811757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:26.811857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:26.817187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:26.817259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:26.832753Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:37:26.832919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:26.851848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:26.927452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:26.945419Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:26.952338Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:27.193585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0030a5/r3tmp/yandexjue7hk.tmp 2026-01-07T17:37:27.193612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0030a5/r3tmp/yandexjue7hk.tmp 2026-01-07T17:37:27.193769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0030a5/r3tmp/yandexjue7hk.tmp 2026-01-07T17:37:27.193839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:27.241099Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:27.348654Z INFO: TTestServer started on Port 25346 GrpcPort 3698 2026-01-07T17:37:27.361886Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:3698 2026-01-07T17:37:27.628669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:27.834773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:30.378595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675184976795694:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.378728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.379239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675184976795722:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.379286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675184976795723:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.379458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.383850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:30.391213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675184976795758:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.391398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.392946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675184976795768:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.393018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.419431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675184976795726:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:30.657114Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675184976795814:3051] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:30.703589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:30.708168Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675186429000880:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:30.711025Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MzNlZWE1ZTQtZGNmZGZiZDYtNjM0YWY1YzEtYjA0OGU0MTk=, ActorId: [2:7592675186429000840:2304], ActorState: ExecuteState, LegacyTraceId: 01kecrhz9p9pd1ch2n2gh9krvn, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:30.709060Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675184976795833:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:30.711438Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDBjMmU3ZDctMzY5NmM4Y2YtZTI2YmYzZjQtYzczNjBlYjA=, ActorId: [1:7592675184976795691:2330], ActorState: ExecuteState, LegacyTraceId: 01kecrhz87bfjpgycjtfwhze6p, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check ... ateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7592675215041568154:2466] 2026-01-07T17:37:37.849686Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7592675216493772430:2362] 2026-01-07T17:37:37.854203Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037893][Partition][1][StateInit] bootstrapping 1 [1:7592675215041568158:2467] 2026-01-07T17:37:37.860422Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][9][StateInit] bootstrapping 9 [2:7592675216493772450:2363] 2026-01-07T17:37:37.860776Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][7][StateInit] bootstrapping 7 [2:7592675216493772452:2360] 2026-01-07T17:37:37.857531Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037899][Partition][4][StateInit] bootstrapping 4 [1:7592675215041568169:2468] 2026-01-07T17:37:37.859460Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037899][Partition][4][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7592675215041568169:2468] 2026-01-07T17:37:37.860098Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7592675215041568158:2467] 2026-01-07T17:37:37.861612Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037893][Partition][6][StateInit] bootstrapping 6 [1:7592675215041568159:2467] 2026-01-07T17:37:37.863399Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][6][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7592675215041568159:2467] 2026-01-07T17:37:37.862466Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][9][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7592675216493772450:2363] 2026-01-07T17:37:37.862663Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7592675216493772452:2360] 2026-01-07T17:37:37.865097Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][2][StateInit] bootstrapping 2 [2:7592675216493772454:2363] 2026-01-07T17:37:37.866325Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7592675216493772457:2360] 2026-01-07T17:37:37.868578Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.868026Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7592675216493772454:2363] 2026-01-07T17:37:37.868197Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7592675216493772457:2360] 2026-01-07T17:37:37.874845Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.876069Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037896][Partition][5][StateInit] bootstrapping 5 [1:7592675215041568180:2469] 2026-01-07T17:37:37.876234Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.878258Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][5][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7592675215041568180:2469] 2026-01-07T17:37:37.879876Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037896][Partition][10][StateInit] bootstrapping 10 [1:7592675215041568187:2469] 2026-01-07T17:37:37.881847Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][10][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7592675215041568187:2469] 2026-01-07T17:37:37.888874Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.888910Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.890111Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.889620Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.893746Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig ===Query complete Create topic result: 1 2026-01-07T17:37:37.915933Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7592675215041568254:4029]: Request location 2026-01-07T17:37:37.917163Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7592675215041568254:4029]: Got location 2026-01-07T17:37:37.918106Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7592675215041568264:4033]: Request location 2026-01-07T17:37:37.916526Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568263:4032] connected; active server actors: 1 2026-01-07T17:37:37.916803Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 1 2026-01-07T17:37:37.916820Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2026-01-07T17:37:37.916832Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 1 2026-01-07T17:37:37.916846Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2026-01-07T17:37:37.916856Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 1 2026-01-07T17:37:37.916869Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2026-01-07T17:37:37.916883Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 1 2026-01-07T17:37:37.916913Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 1 2026-01-07T17:37:37.916944Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 1 2026-01-07T17:37:37.916954Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 1 2026-01-07T17:37:37.916964Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 1 2026-01-07T17:37:37.916976Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 1 2026-01-07T17:37:37.916987Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 1 2026-01-07T17:37:37.916998Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 1 2026-01-07T17:37:37.917009Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 1 2026-01-07T17:37:37.917526Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568263:4032] disconnected. 2026-01-07T17:37:37.917547Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568263:4032] disconnected; active server actors: 1 2026-01-07T17:37:37.917567Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568263:4032] disconnected no session 2026-01-07T17:37:37.918450Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568266:4035] connected; active server actors: 1 2026-01-07T17:37:37.918800Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2026-01-07T17:37:37.918817Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2026-01-07T17:37:37.918828Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2026-01-07T17:37:37.919403Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568266:4035] disconnected. 2026-01-07T17:37:37.918948Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7592675215041568264:4033]: Got location 2026-01-07T17:37:37.919430Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568266:4035] disconnected; active server actors: 1 2026-01-07T17:37:37.919443Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568266:4035] disconnected no session 2026-01-07T17:37:37.920183Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675215041568269:4038] connected; active server actors: 1 2026-01-07T17:37:37.919776Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7592675215041568267:4036]: Request location |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query [GOOD] >> TTopicApiDescribes::DescribeTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2026-01-07T17:37:26.082954Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675168681577110:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:26.083030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:26.200097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:37:26.265279Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:26.323004Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:26.573004Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:26.573172Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:37:26.577841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:26.675368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:26.675508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:26.680346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:26.680416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:26.729741Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:37:26.729925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:26.730815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:26.763678Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:26.820825Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:26.934018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:26.984275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00309b/r3tmp/yandexCD3Fcf.tmp 2026-01-07T17:37:26.984337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00309b/r3tmp/yandexCD3Fcf.tmp 2026-01-07T17:37:26.984519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00309b/r3tmp/yandexCD3Fcf.tmp 2026-01-07T17:37:26.984630Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:27.070124Z INFO: TTestServer started on Port 62339 GrpcPort 24592 2026-01-07T17:37:27.153562Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:24592 2026-01-07T17:37:27.309770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:27.319901Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:27.463877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:30.094229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675185861447705:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.094554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.095653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675185861447718:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.095705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675185861447719:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.102982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:30.108012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:30.127582Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675185861447724:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:30.196395Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675185861447807:3060] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:30.607576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:30.636351Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675185861447817:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:30.643811Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675187618865655:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:30.645649Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmUwMDVmMmMtMzA0ZDI1Y2QtOWIwNjY0YWYtNjllNjE0Yzk=, ActorId: [1:7592675185861447703:2330], ActorState: ExecuteState, LegacyTraceId: 01kecrhyyv5g3hher4d5t247cr, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:30.645513Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ODY4OGU1Yi0xODFkMzFhZi1hMDY3MmM0Mi1hMWI2MTM3NQ==, ActorId: [2:7592675187618865621:2303], ActorState: ExecuteState, LegacyTraceId: 01kecrhz4h8nty76fw9mzrzpdq, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:30.653382Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:30.653164Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/ ... RSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 2 [1:7592675215926220627:2482] 2026-01-07T17:37:37.530090Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][6][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7592675215926220628:2482] 2026-01-07T17:37:37.532171Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.532276Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.532524Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][3][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7592675217683637468:2371] 2026-01-07T17:37:37.532688Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7592675217683637469:2371] 2026-01-07T17:37:37.533372Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.533462Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.533741Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][14][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7592675215926220667:2485] 2026-01-07T17:37:37.533865Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][11][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7592675215926220669:2485] 2026-01-07T17:37:37.534564Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2026-01-07T17:37:37.537037Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.537113Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.537339Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7592675217683637521:2372] 2026-01-07T17:37:37.537477Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][9][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7592675217683637520:2372] 2026-01-07T17:37:37.538647Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2026-01-07T17:37:37.538668Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2026-01-07T17:37:37.541252Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2026-01-07T17:37:37.544748Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.544748Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:37.544769Z node 2 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037894] has a tx writes info 2026-01-07T17:37:37.544774Z node 2 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037897] has a tx writes info 2026-01-07T17:37:37.545458Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][7][StateInit] bootstrapping 7 [2:7592675217683637601:2373] 2026-01-07T17:37:37.545477Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][8][StateInit] bootstrapping 8 [2:7592675217683637600:2374] 2026-01-07T17:37:37.547905Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][12][StateInit] bootstrapping 12 [2:7592675217683637602:2374] 2026-01-07T17:37:37.547906Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7592675217683637603:2373] 2026-01-07T17:37:37.556019Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.556282Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.556586Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][8][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7592675217683637600:2374] 2026-01-07T17:37:37.556731Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][12][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7592675217683637602:2374] 2026-01-07T17:37:37.558283Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2026-01-07T17:37:37.558618Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.558713Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:37:37.558920Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7592675217683637601:2373] 2026-01-07T17:37:37.559061Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7592675217683637603:2373] 2026-01-07T17:37:37.559581Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2026-01-07T17:37:38.519081Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-07T17:37:38.519198Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2026-01-07T17:37:38.519236Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7592675220221188127:2502]: Bootstrap 2026-01-07T17:37:38.519849Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7592675220221188127:2502]: Request location 2026-01-07T17:37:38.521745Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7592675220221188127:2502]: Got location 2026-01-07T17:37:38.521461Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675220221188129:2503] connected; active server actors: 1 2026-01-07T17:37:38.521561Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 Got response: 2026-01-07T17:37:38.522824Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675220221188129:2503] disconnected. 2026-01-07T17:37:38.522865Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675220221188129:2503] disconnected; active server actors: 1 2026-01-07T17:37:38.522892Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675220221188129:2503] disconnected no session operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2026-01-07T17:37:38.524797Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-07T17:37:38.524896Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2026-01-07T17:37:38.524948Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7592675220221188130:2504]: Bootstrap 2026-01-07T17:37:38.525673Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7592675220221188130:2504]: Request location 2026-01-07T17:37:38.527733Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675220221188133:2506] connected; active server actors: 1 2026-01-07T17:37:38.527837Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-01-07T17:37:38.528394Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7592675220221188130:2504]: Got location 2026-01-07T17:37:38.528971Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675220221188133:2506] disconnected. 2026-01-07T17:37:38.528995Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675220221188133:2506] disconnected; active server actors: 1 2026-01-07T17:37:38.529007Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675220221188133:2506] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1767807457 nanos: 508000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2026-01-07T17:37:38.533846Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-07T17:37:38.533939Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2026-01-07T17:37:38.534307Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7592675220221188137:2509]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> TOlap::CreateStore >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2026-01-07T17:37:27.072087Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675172215463719:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:27.072147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:27.166537Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675172500288740:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:27.169452Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:27.172977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:37:27.175740Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:27.333901Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:27.631439Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:27.669085Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:27.734794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:27.734924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:27.738364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:27.738461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:27.771349Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:37:27.771603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:27.773345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:27.878799Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:27.880282Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:27.898100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:28.135981Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:28.242202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00303d/r3tmp/yandexPuBr6I.tmp 2026-01-07T17:37:28.242238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00303d/r3tmp/yandexPuBr6I.tmp 2026-01-07T17:37:28.242450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00303d/r3tmp/yandexPuBr6I.tmp 2026-01-07T17:37:28.243018Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:28.301233Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:28.376964Z INFO: TTestServer started on Port 12327 GrpcPort 32558 PQClient connected to localhost:32558 2026-01-07T17:37:28.597849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:28.735738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:31.549297Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675189680158323:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:31.549297Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675189680158315:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:31.549392Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:31.549662Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675189680158330:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:31.549700Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:31.554546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:31.582008Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592675189680158329:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:37:31.652148Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675189680158358:2183] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:31.938454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:31.940359Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675189395334371:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:31.940877Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MmU5YWVkNDItMmI1NGI1MDUtMWRkMGFjYTUtYmE3ZTkzZTk=, ActorId: [1:7592675189395334332:2331], ActorState: ExecuteState, LegacyTraceId: 01kecrj0en12a2t3e9zxzh16bq, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:31.942101Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675189680158365:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:31.943740Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:31.944074Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZGJiMGRkOTAtZGM0ZDBjOWEtZTlmOWI3ZTgtNzIyZmM4MmE=, ActorId: [2:7592675189680158313:2306], ActorState: ExecuteState, LegacyTraceId: 01kecrj0cv3bkncajkkb8nwrhh, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:31.944443Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: ... ats { min_last_write_time { seconds: 1767807459 nanos: 193000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2026-01-07T17:37:39.999114Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-07T17:37:39.999264Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2026-01-07T17:37:39.999974Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7592675223755074712:2522]: Request location 2026-01-07T17:37:40.000632Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675223755074714:2523] connected; active server actors: 1 2026-01-07T17:37:40.000965Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2026-01-07T17:37:40.000991Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-01-07T17:37:40.001002Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2026-01-07T17:37:40.001016Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-01-07T17:37:40.001028Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2026-01-07T17:37:40.001039Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2026-01-07T17:37:40.001057Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2026-01-07T17:37:40.001075Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2026-01-07T17:37:40.001086Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 2 2026-01-07T17:37:40.001096Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2026-01-07T17:37:40.001106Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2026-01-07T17:37:40.001117Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 2 2026-01-07T17:37:40.001128Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 2 2026-01-07T17:37:40.001141Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2026-01-07T17:37:40.001303Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7592675223755074712:2522]: Got location 2026-01-07T17:37:40.001150Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 2 Got response: 2026-01-07T17:37:40.002086Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675223755074714:2523] disconnected. 2026-01-07T17:37:40.002112Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675223755074714:2523] disconnected; active server actors: 1 2026-01-07T17:37:40.002126Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675223755074714:2523] disconnected no session operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1767807458956 tx_id: 281474976720670 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe topic with no stats or location 2026-01-07T17:37:40.005289Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-07T17:37:40.005410Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1767807458956 tx_id: 281474976720670 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe bad topic 2026-01-07T17:37:40.009338Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-07T17:37:40.009461Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex >> TOlapNaming::AlterColumnTableFailed [GOOD] >> TOlapNaming::AlterColumnStoreOk >> TOlap::CreateTableTtl [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed >> TOlap::AlterTtl [GOOD] >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::CustomDefaultPresets >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> TOlapNaming::AlterColumnTableOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:37.363051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:37.363137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:37.363198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:37.363225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:37.363252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:37.363273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:37.363330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:37.363395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:37.364103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:37.364364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:37.454808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:37.454867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:37.471359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:37.471619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:37.471777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:37.477631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:37.477895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:37.478383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:37.478603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:37.480663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:37.480866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:37.483870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:37.483931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:37.484042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:37.484088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:37.484173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:37.484348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:37.617146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.617816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.617917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.617975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.618790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000042 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2026-01-07T17:37:43.542467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:43.542887Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:37:43.543367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 38], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-07T17:37:43.543437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 0 2026-01-07T17:37:43.543496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 106:0 type: TxCreateColumnTable target path: [OwnerId: 72057594046678944, LocalPathId: 43] source path: 2026-01-07T17:37:43.543770Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-07T17:37:43.544346Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:43.544419Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:747) 2026-01-07T17:37:43.544553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:37:43.544622Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-07T17:37:43.553529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 43, at schemeshard: 72057594046678944 2026-01-07T17:37:43.553872Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2026-01-07T17:37:43.554195Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:43.554245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:37:43.554535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-07T17:37:43.554671Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:43.554722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:208:2208], at schemeshard: 72057594046678944, txId: 106, path id: 38 2026-01-07T17:37:43.554774Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:208:2208], at schemeshard: 72057594046678944, txId: 106, path id: 43 2026-01-07T17:37:43.555214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:37:43.555294Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:237: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2026-01-07T17:37:43.555497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:323: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2026-01-07T17:37:43.556381Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:37:43.556494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:37:43.556542Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-07T17:37:43.556587Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2026-01-07T17:37:43.556643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-07T17:37:43.564435Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:37:43.564584Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:37:43.564621Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-07T17:37:43.564671Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 1 2026-01-07T17:37:43.564716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-07T17:37:43.564815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2026-01-07T17:37:43.566188Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2026-01-07T17:37:43.566354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2026-01-07T17:37:43.570908Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;self_id=[3:717:2702];ev=NActors::IEventHandle;tablet_id=72075186233409546;tx_id=106;this=136796787000416;method=TTxController::StartProposeOnExecute;tx_info=106:TX_KIND_SCHEMA;min=5000043;max=18446744073709551615;plan=0;src=[3:131:2155];cookie=12:5;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:37:43.572108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-07T17:37:43.582110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable >> TxUsage::WriteToTopic_Demo_42_Table [GOOD] >> TOlapNaming::CreateColumnTableExtraSymbolsOk [GOOD] >> TOlapNaming::CreateColumnStoreOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:37.322873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:37.322993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:37.323038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:37.323080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:37.323115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:37.323142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:37.323195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:37.323252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:37.324110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:37.324402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:37.428554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:37.428618Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:37.444912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:37.445263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:37.445465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:37.452083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:37.452403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:37.453121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:37.453402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:37.456201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:37.456426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:37.484207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:37.484279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:37.484409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:37.484492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:37.484599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:37.484796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:37.639937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.640824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.640980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.641896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hard::TEvProposeTransactionResult> complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.117631Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.117698Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:149: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2026-01-07T17:37:44.117829Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2026-01-07T17:37:44.118028Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 5000042 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:44.120069Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2026-01-07T17:37:44.120231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000043 2026-01-07T17:37:44.120706Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:44.120847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 12884904047 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:44.120922Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:110: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000043 2026-01-07T17:37:44.122004Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 128 -> 129 2026-01-07T17:37:44.122323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:37:44.122423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:37:44.123424Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=column_engine_logs.cpp:113;event=double_schema_version;v=1; FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000043 2026-01-07T17:37:44.148007Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:44.148079Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:37:44.148414Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:37:44.148686Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:44.148761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:208:2208], at schemeshard: 72057594046678944, txId: 106, path id: 38 2026-01-07T17:37:44.148830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:208:2208], at schemeshard: 72057594046678944, txId: 106, path id: 39 2026-01-07T17:37:44.149238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.149311Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:200: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:37:44.149409Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: alter_table.cpp:223: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-07T17:37:44.151454Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:37:44.151573Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:37:44.151619Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-07T17:37:44.151677Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-07T17:37:44.151732Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:37:44.153428Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:37:44.153525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:37:44.153565Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-07T17:37:44.153600Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 14 2026-01-07T17:37:44.153641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:37:44.153742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2026-01-07T17:37:44.155668Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-07T17:37:44.158252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-07T17:37:44.159758Z node 3 :TX_TIERING ERROR: log.cpp:841: fline=manager.cpp:170;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2026-01-07T17:37:44.160010Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-07T17:37:44.172440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2026-01-07T17:37:44.172517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2026-01-07T17:37:44.172657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2026-01-07T17:37:44.175973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.176272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.176338Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-07T17:37:44.176483Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:37:44.176521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:37:44.176567Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:37:44.176605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:37:44.176649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-07T17:37:44.176718Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:746:2722] message: TxId: 106 2026-01-07T17:37:44.176779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:37:44.176820Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-07T17:37:44.176873Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-07T17:37:44.177037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:37:44.180565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:37:44.180638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:954:2921] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreOk [GOOD] >> TOlapNaming::CreateColumnTableFailed [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions >> TxUsage::WriteToTopic_Demo_42_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:35.058416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:35.058520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:35.058574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:35.058610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:35.058645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:35.058673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:35.058752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:35.058815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:35.059753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:35.060045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:35.162919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:35.162983Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:35.180443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:35.180820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:35.181039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:35.187478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:35.187808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:35.188478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:35.188756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:35.191406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:35.191617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:35.192792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:35.192850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:35.192979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:35.193024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:35.193123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:35.193317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:35.342303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.343984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.344042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.344105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.344158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.344265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.344333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -07T17:37:44.151520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.151603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.151715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.158558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.158788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.158977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.159199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.159319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.159490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.159596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.159695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.163437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.163622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.163690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.163922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.164022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.164164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.164242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.164351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.165387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.165498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.165564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.165642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.165734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.165874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.165948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.166047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.170494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.170697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.170803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.170889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.170982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.171074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.171173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.171259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:44.175864Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:37:44.175996Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:37:44.176034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:37:44.176096Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:37:44.176137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:37:44.176175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:37:44.176263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:3125:4345] message: TxId: 102 2026-01-07T17:37:44.176333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:37:44.176407Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:37:44.176462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:37:44.178145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-07T17:37:44.182603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:37:44.182675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:4060:5219] TestWaitNotification: OK eventTxId 102 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TOlap::CustomDefaultPresets [GOOD] >> TxUsage::WriteToTopic_Demo_13_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:35.944213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:35.944308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:35.944355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:35.944390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:35.944436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:35.944476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:35.944524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:35.944585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:35.945440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:35.945725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:36.027250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:36.027297Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:36.041741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:36.042085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:36.042293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:36.048448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:36.048755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:36.049418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:36.049702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:36.052252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:36.052451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:36.053597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:36.053650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:36.053760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:36.053805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:36.053895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:36.054077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:36.190852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.191722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.191829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.191894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.191974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.192688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 129 2026-01-07T17:37:45.209442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:37:45.209522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:37:45.210635Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 UpsertColumns { Id: 3 Name: "comment" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:37:45.210790Z node 2 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=5000040;tx_id=103;;switch_optimizer=0;switch_accessors=1; 2026-01-07T17:37:45.216539Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:45.216616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:45.216811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:37:45.217018Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:45.217083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-07T17:37:45.217133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-01-07T17:37:45.217592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:37:45.217655Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:37:45.217725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-07T17:37:45.217819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:37:45.218753Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:37:45.218877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:37:45.218921Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:37:45.218964Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:37:45.219007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:37:45.220199Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:37:45.220274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:37:45.220300Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:37:45.220334Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-07T17:37:45.220367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:37:45.222706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-07T17:37:45.222908Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:45.222944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:37:45.223701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:37:45.223801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:37:45.223841Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:45.223887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:37:45.224377Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:37:45.224457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:37:45.224491Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:37:45.224524Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-01-07T17:37:45.224558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:37:45.224651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:37:45.226119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:37:45.238829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2026-01-07T17:37:45.238912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2026-01-07T17:37:45.239047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2026-01-07T17:37:45.239107Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:37:45.241321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:37:45.241494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:37:45.241543Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:37:45.241668Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:37:45.241728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:37:45.241773Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:37:45.241811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:37:45.241852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:37:45.241934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:747:2723] message: TxId: 103 2026-01-07T17:37:45.241990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:37:45.242031Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:37:45.242074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:37:45.242205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:37:45.244159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:37:45.244225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:850:2818] TestWaitNotification: OK eventTxId 103 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreOk [GOOD] >> TCmsTest::TestKeepAvailableMode >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table >> TxUsage::WriteToTopic_Demo_13_Query >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:35.970822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:35.970923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:35.970968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:35.971001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:35.971039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:35.971066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:35.971133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:35.971202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:35.972120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:35.972398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:36.057926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:36.057986Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:36.074545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:36.074986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:36.075209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:36.082575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:36.082937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:36.083724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:36.084026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:36.087171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:36.087388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:36.088708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:36.088774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:36.088912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:36.088987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:36.089103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:36.089329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:36.258469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.259559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.259708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.259781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.259872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.259944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:36.260649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:37:45.463829Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:45.463934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:45.463973Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:45.464180Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:45.464224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:45.464365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:37:45.464419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:45.466202Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:45.466248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:45.466418Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:45.466455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-07T17:37:45.466512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:45.466549Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:37:45.466658Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:37:45.466688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:37:45.466723Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:37:45.466752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:37:45.466786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:37:45.466819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:37:45.466852Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:37:45.466883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:37:45.466941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:37:45.466975Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:37:45.467005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:37:45.467815Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:37:45.467931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:37:45.467995Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:37:45.468054Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:37:45.468108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:37:45.468207Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:37:45.471183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:37:45.471703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:37:45.472590Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:682:2671] Bootstrap 2026-01-07T17:37:45.473835Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:682:2671] Become StateWork (SchemeCache [2:687:2676]) 2026-01-07T17:37:45.477263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:45.477672Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:37:45.477980Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2026-01-07T17:37:45.479822Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:682:2671] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:37:45.485558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:45.485840Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-07T17:37:45.486377Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:37:45.486610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:37:45.486660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:37:45.487048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:37:45.487165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:37:45.487209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:697:2686] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:37:45.490461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:45.490737Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:45.490949Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2026-01-07T17:37:45.492865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:45.493068Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:37:45.493414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:37:45.493461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:37:45.493818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:37:45.493898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:37:45.493924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:704:2693] TestWaitNotification: OK eventTxId 102 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> Cdc::InitialScan [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:40.895973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:40.896073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:40.896118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:40.896154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:40.896205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:40.896244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:40.896304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:40.896378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:40.897354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:40.897681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:40.984159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:40.984214Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:40.996200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:40.996528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:40.996763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:41.004498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:41.004875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:41.005702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:41.005990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:41.009267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:41.009517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:41.010757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:41.010829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:41.010984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:41.011040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:41.011197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:41.011417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:41.138811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.139948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.140874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.141018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:41.141117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... MESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-07T17:37:46.254152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:46.254222Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:37:46.254311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-07T17:37:46.255638Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:37:46.255755Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:37:46.255804Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:37:46.255856Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:37:46.255903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:37:46.257002Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:37:46.257094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:37:46.257126Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:37:46.257158Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:37:46.257192Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:37:46.257270Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:37:46.258901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-07T17:37:46.258992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2026-01-07T17:37:46.259077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2026-01-07T17:37:46.259537Z node 3 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2026-01-07T17:37:46.259621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6412: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2026-01-07T17:37:46.259742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2026-01-07T17:37:46.260184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:37:46.261336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:37:46.262249Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:46.274624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2026-01-07T17:37:46.274700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:37:46.274846Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:37:46.278387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:46.278554Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:37:46.278599Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:37:46.278754Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:37:46.278802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:37:46.278848Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:37:46.278906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:37:46.278954Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:37:46.279035Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:744:2720] message: TxId: 102 2026-01-07T17:37:46.279098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:37:46.279148Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:37:46.279190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:37:46.279357Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:37:46.281391Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:37:46.281456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:806:2774] TestWaitNotification: OK eventTxId 102 2026-01-07T17:37:46.282019Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:37:46.282288Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 300us result status StatusSuccess 2026-01-07T17:37:46.282807Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:37.284741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:37.284866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:37.284913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:37.284973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:37.285012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:37.285054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:37.285121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:37.285181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:37.286193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:37.286513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:37.378021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:37.378083Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:37.395078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:37.395454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:37.395672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:37.403075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:37.403469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:37.404147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:37.404448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:37.407478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:37.407679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:37.409014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:37.409077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:37.409214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:37.409267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:37.409375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:37.409659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:37.560521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.561686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.561841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.561915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:37.562808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Count reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:37:46.595888Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 2026-01-07T17:37:46.597227Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:46.597276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:46.597493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:37:46.597661Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:46.597712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:37:46.597760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:37:46.597982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:37:46.598037Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_store.cpp:246: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:37:46.598104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_store.cpp:269: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-07T17:37:46.599214Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:37:46.599333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:37:46.599380Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:37:46.599425Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:37:46.599469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:37:46.600648Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:37:46.600729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:37:46.600761Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:37:46.600789Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:37:46.600823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:37:46.600896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-07T17:37:46.604676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-07T17:37:46.607575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:37:46.607960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:37:46.622754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-07T17:37:46.622823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:37:46.622958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:37:46.624630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:37:46.624789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:37:46.624833Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:37:46.624977Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:37:46.625017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:37:46.625056Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:37:46.625090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:37:46.625124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:37:46.625202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:753:2729] message: TxId: 101 2026-01-07T17:37:46.625260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:37:46.625299Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:37:46.625329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:37:46.625476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:37:46.627036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:37:46.627084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:754:2730] TestWaitNotification: OK eventTxId 101 2026-01-07T17:37:46.627544Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:37:46.627798Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 279us result status StatusSuccess 2026-01-07T17:37:46.628430Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous >> TTopicApiDescribes::DescribeConsumer [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation >> TIcNodeCache::GetNodesInfoTest [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestForceRestartMode >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> Describe::LocationWithKillTablets [GOOD] >> Describe::DescribePartitionPermissions >> TSchemeShardSysNames::CreateOpsAreCovered [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2026-01-07T17:37:34.967373Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675204530636618:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:34.979637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:35.039851Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:35.044363Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675207211316160:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:35.046962Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:35.054789Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:35.276568Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:35.288665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:35.342212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:35.342305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:35.344342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:35.344450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:35.388006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:35.399964Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:35.426411Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:37:35.429160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:35.479364Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:35.614021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:35.693852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003037/r3tmp/yandexqanooi.tmp 2026-01-07T17:37:35.693878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003037/r3tmp/yandexqanooi.tmp 2026-01-07T17:37:35.717147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003037/r3tmp/yandexqanooi.tmp 2026-01-07T17:37:35.720657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:35.746724Z INFO: TTestServer started on Port 28053 GrpcPort 22302 PQClient connected to localhost:22302 2026-01-07T17:37:35.993800Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:36.007791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:36.050555Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:36.119701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:38.749151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675221710507148:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:38.749311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675221710507131:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:38.749478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:38.753049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675221710507152:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:38.753157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:38.754638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:38.790313Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675221710507151:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:38.993186Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675221710507241:3049] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:39.063828Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675220096218447:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:39.065516Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YzcyODY3NzUtYTI1NjcxZjMtYzdmMDdmZi1lNmFiMTI0Ng==, ActorId: [2:7592675220096218422:2305], ActorState: ExecuteState, LegacyTraceId: 01kecrj7gg488p215tpb5wzgwd, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:39.067825Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:39.075874Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675221710507258:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:39.081156Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjllMjcxMmEtY2Q0Nzg2MzMtYTBjMTMxNDMtOTkwMWE0MDc=, ActorId: [1:7592675221710507119:2330], ActorState: ExecuteState, LegacyTraceId: 01kecrj7dh9s4mwphadcpwk65x, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:39.081717Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:39.082959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but p ... nanos: 594000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1767807466 nanos: 613000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1767807466 nanos: 624000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1767807466 nanos: 623000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1767807466 nanos: 632000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } } } } 2026-01-07T17:37:47.467361Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-07T17:37:47.467489Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2026-01-07T17:37:47.469067Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7592675260365215057:2527]: Request location 2026-01-07T17:37:47.470001Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675260365215059:2528] connected; active server actors: 1 2026-01-07T17:37:47.475210Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2026-01-07T17:37:47.475239Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-01-07T17:37:47.475250Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2026-01-07T17:37:47.475261Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-01-07T17:37:47.475271Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2026-01-07T17:37:47.475281Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2026-01-07T17:37:47.475290Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2026-01-07T17:37:47.475304Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2026-01-07T17:37:47.475313Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 1, Generation 2 2026-01-07T17:37:47.475324Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2026-01-07T17:37:47.475332Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2026-01-07T17:37:47.475342Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 2, Generation 2 2026-01-07T17:37:47.475351Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 1, Generation 2 2026-01-07T17:37:47.475359Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2026-01-07T17:37:47.475368Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 2, Generation 2 2026-01-07T17:37:47.476330Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7592675260365215057:2527]: Got location Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1767807466362 tx_id: 281474976715672 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 2 generation: 2 } } } } } 2026-01-07T17:37:47.477461Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675260365215059:2528] disconnected. 2026-01-07T17:37:47.477492Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675260365215059:2528] disconnected; active server actors: 1 2026-01-07T17:37:47.477507Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7592675260365215059:2528] disconnected no session 2026-01-07T17:37:47.481483Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-07T17:37:47.481579Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1767807466362 tx_id: 281474976715672 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2026-01-07T17:37:47.493607Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-07T17:37:47.493718Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2026-01-07T17:37:33.866113Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675196872842553:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:33.866431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:33.925489Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675197925498187:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:33.925541Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:33.938876Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:33.976227Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:34.257046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:34.257429Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:34.297768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:34.297882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:34.301300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:34.301369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:34.341716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:34.342826Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:37:34.343804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:34.451426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:34.518166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:34.581031Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:34.626906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00303a/r3tmp/yandexAhkHZi.tmp 2026-01-07T17:37:34.626949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00303a/r3tmp/yandexAhkHZi.tmp 2026-01-07T17:37:34.681786Z INFO: TTestServer started on Port 23422 GrpcPort 13719 2026-01-07T17:37:34.735762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00303a/r3tmp/yandexAhkHZi.tmp 2026-01-07T17:37:34.736196Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:13719 2026-01-07T17:37:34.872476Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:34.875509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:34.951799Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:35.010274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:37.695898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675214052713014:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:37.696046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675214052713022:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:37.696144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:37.699826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675214052713030:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:37.700052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:37.700178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:37.747775Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675214052713028:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2026-01-07T17:37:37.832331Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675214052713116:3029] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:38.071688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:38.076089Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675214052713133:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:38.077388Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzY1NDg5NDMtNzRiYTA2MTEtOWVlNGJjODMtMzQ2NTMyNTY=, ActorId: [1:7592675214052713012:2330], ActorState: ExecuteState, LegacyTraceId: 01kecrj6cy52qmrn62mfpb5k3c, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:38.079812Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:38.077755Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675215105367785:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:38.078173Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZjJkMzRjMGUtZmNmNzE3ZjYtN2VlYWNlNi02ODgyMTAzYw==, ActorId: [2:7592675215105367760:2305], ActorState: ExecuteState, LegacyTraceId: 01kecrj6gr4416eyvygsys8y0s, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:38.079821Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:38.135787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:38.303967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675218347680870:3362] 2026-01-07T17:37:38.856728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675196872842553:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:38.856810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:37:38.930385Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592675197925498187:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:38.930476Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::VDisksEviction >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects |87.0%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMaintenanceApiTest::ActionReason [GOOD] >> TMaintenanceApiTest::CreateTime >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: 2026-01-07T17:36:25.932345Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:36:26.029971Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:26.030072Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:26.030140Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:26.030205Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:36:26.056055Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:26.077915Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:36:26.079306Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:36:26.081984Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:36:26.084162Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-01-07T17:36:26.086104Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-01-07T17:36:26.095251Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:26.095752Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|83053096-a11b4991-fd90281d-e7e86c3a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:26.101870Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|50a5f19e-96e23a7c-7283337f-624febca_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:26.121352Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:26.121694Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b1ba2c2f-a508a7c5-36a78600-2723ddc0_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:26.130275Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:26.130680Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e150e313-daa59766-926ce307-f72d84a4_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:26.138219Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:26.138614Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e2352b44-d9f5148c-84fd7100-7d203fa4_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:26.146075Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:36:26.146369Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|88f7a044-928c34a4-ad32af48-3efb30d5_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:36:26.586387Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-01-07T17:36:26.640608Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:26.640679Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:26.640735Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:26.640793Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:181:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:111:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:183:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:186:2057] recipient: [2:185:2195] Leader for TabletID 72057594037927937 is [2:187:2196] sender: [2:188:2057] recipient: [2:185:2195] 2026-01-07T17:36:26.718361Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:36:26.718446Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:36:26.718493Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:26.718577Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:111:2142]) rebooted! !Reboot 72057594037927937 (actor [2:111:2142]) tablet resolver refreshed! new actor is[2:187:2196] 2026-01-07T17:36:26.741242Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:26.812491Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:26.845031Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:26.855792Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:26.903922Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:26.963016Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:26.996208Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:27.135299Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:27.152152Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:27.473281Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:27.494679Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:27.882428Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:28.118396Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:36:28.244788Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:187:2196] sender: [2:268:2057] recipient: [2:14:2061] 2026-01-07T17:36:28.389165Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:36:28.390219Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-07T17:36:28.391324Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:274:2196] 2026-01-07T17:36:28.401442Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' parti ... INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:37:49.891568Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:37:49.965988Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:37:50.137745Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:37:50.376167Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:37:50.492793Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [47:289:2277] sender: [47:390:2057] recipient: [47:14:2061] 2026-01-07T17:37:51.014334Z node 48 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 48 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:108:2057] recipient: [48:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:108:2057] recipient: [48:106:2138] Leader for TabletID 72057594037927937 is [48:112:2142] sender: [48:113:2057] recipient: [48:106:2138] 2026-01-07T17:37:51.089876Z node 48 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:37:51.089942Z node 48 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:37:51.089991Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:51.090048Z node 48 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:154:2057] recipient: [48:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:154:2057] recipient: [48:152:2172] Leader for TabletID 72057594037927938 is [48:158:2176] sender: [48:159:2057] recipient: [48:152:2172] Leader for TabletID 72057594037927937 is [48:112:2142] sender: [48:182:2057] recipient: [48:14:2061] 2026-01-07T17:37:51.115529Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:51.116437Z node 48 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 48 actor [48:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 } 2026-01-07T17:37:51.117483Z node 48 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [48:188:2142] 2026-01-07T17:37:51.120161Z node 48 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:188:2142] 2026-01-07T17:37:51.122074Z node 48 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [48:189:2142] 2026-01-07T17:37:51.123476Z node 48 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:189:2142] 2026-01-07T17:37:51.135954Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.136510Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c35c11ef-1f6d543a-2889f46b-dcd4f044_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.143844Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e8c263d8-a3600add-650ca8b2-b0053886_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.186583Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.187097Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e289f332-4f861df5-6f2d5637-259d322d_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.197861Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.198384Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7d37aacc-6e1680cb-8533ceaa-f89961c6_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.216115Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.216629Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fc5fb125-bdaf3d9e-1f99f258-dc354380_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.228156Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.228675Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|75382f3e-a309838f-d60b5b63-26015892_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.694490Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:108:2057] recipient: [49:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:108:2057] recipient: [49:106:2138] Leader for TabletID 72057594037927937 is [49:112:2142] sender: [49:113:2057] recipient: [49:106:2138] 2026-01-07T17:37:51.761307Z node 49 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:37:51.761386Z node 49 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:37:51.761435Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:51.761494Z node 49 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:154:2057] recipient: [49:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:154:2057] recipient: [49:152:2172] Leader for TabletID 72057594037927938 is [49:158:2176] sender: [49:159:2057] recipient: [49:152:2172] Leader for TabletID 72057594037927937 is [49:112:2142] sender: [49:184:2057] recipient: [49:14:2061] 2026-01-07T17:37:51.782560Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:51.783401Z node 49 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 49 actor [49:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 } 2026-01-07T17:37:51.784447Z node 49 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [49:190:2142] 2026-01-07T17:37:51.787397Z node 49 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:190:2142] 2026-01-07T17:37:51.789403Z node 49 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [49:191:2142] 2026-01-07T17:37:51.791566Z node 49 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:191:2142] 2026-01-07T17:37:51.801135Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.801669Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5c2ba0ca-f70f09f9-c9761e4f-1e098513_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.807779Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bf2c4eb3-f014e6a-a8cf58fb-691a05cc_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.838939Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.839519Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a970630b-e51dc246-fd087358-bf569fa1_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.855551Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.856102Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5d9f6b79-ce87b56c-bad4e427-259ffc3a_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.871072Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.871647Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3c80c10b-775a82e8-3b93f1a7-611c7847_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:37:51.881544Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:37:51.881992Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dd00b20e-9a48ca78-fbeb350a-31a0bb5f_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |87.0%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin >> TCmsTest::VDisksEviction [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeDisconnects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2026-01-07T17:37:51.810333Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-07T17:37:51.810451Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-07T17:37:51.810591Z node 26 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-07T17:37:51.812388Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-07T17:37:51.813123Z node 26 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-07T17:37:51.813427Z node 26 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-07T17:37:51.813506Z node 26 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2026-01-07T17:37:51.813584Z node 26 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2026-01-07T17:37:51.813754Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-07T17:37:51.813982Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:37:51.814031Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2026-01-07T17:37:51.814302Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2026-01-07T17:37:51.814353Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:37:51.814451Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2026-01-07T17:37:51.814502Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2026-01-07T17:37:51.814533Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326: ... torage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120622560 } 2026-01-07T17:37:52.217775Z node 26 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-07T17:37:52.217850Z node 26 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2026-01-07T17:37:52.217889Z node 26 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2026-01-07T17:37:52.218014Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-07T17:37:52.218182Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:37:52.218220Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2026-01-07T17:37:52.218447Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.100000s 2026-01-07T17:37:52.218502Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:37:52.218606Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2026-01-07T17:37:52.218666Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2026-01-07T17:37:52.218692Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2026-01-07T17:37:52.218709Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2026-01-07T17:37:52.218734Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2026-01-07T17:37:52.218765Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2026-01-07T17:37:52.218782Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2026-01-07T17:37:52.218799Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 33, wbId# [33:8388350642965737326:1634689637] 2026-01-07T17:37:52.218971Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 120524072 ChangeTime: 120524072 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-07T17:37:52.219532Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 120524072 ChangeTime: 120524072 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-07T17:37:52.219645Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 120524072 ChangeTime: 120524072 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-07T17:37:52.219697Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 120524072 ChangeTime: 120524072 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-07T17:37:52.219740Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 120524072 ChangeTime: 120524072 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-07T17:37:52.219780Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 33, response# PDiskStateInfo { PDiskId: 33 CreateTime: 120524072 ChangeTime: 120524072 Path: "/33/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-07T17:37:52.219816Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 120524072 ChangeTime: 120524072 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-07T17:37:52.219861Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 120524072 ChangeTime: 120524072 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-07T17:37:52.219903Z node 26 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:37:52.232504Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-07T17:37:52.232775Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 26 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2026-01-07T17:37:52.233378Z node 26 :CMS INFO: cms.cpp:1441: User user removes request user-r-3 2026-01-07T17:37:52.233432Z node 26 :CMS DEBUG: cms.cpp:1464: Resulting status: OK 2026-01-07T17:37:52.233522Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:21: TTxRemoveRequest Execute 2026-01-07T17:37:52.233589Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 26 2026-01-07T17:37:52.233744Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2026-01-07T17:37:52.246066Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:45: TTxRemoveRequest Complete 2026-01-07T17:37:52.246273Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::DisableMaintenance >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropIndex >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table [GOOD] >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TxUsage::WriteToTopic_Demo_11_Table [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:43.531104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:43.531174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:43.531229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:43.531272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:43.531301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:43.531321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:43.531357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:43.531414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:43.532211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:43.532522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:43.626987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:43.627070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:43.643692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:43.644808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:43.645064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:43.654449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:43.654825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:43.655687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:43.655993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:43.659079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:43.659305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:43.660660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:43.660723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:43.660871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:43.660942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:43.661051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:43.661249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:43.816414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.817572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.817720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.817796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.817895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.817988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:43.818770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2026-01-07T17:37:55.595072Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:37:55.595099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-07T17:37:55.596067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:37:55.596109Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:37:55.596275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:37:55.596303Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:37:55.596379Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2026-01-07T17:37:55.596396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2026-01-07T17:37:55.596451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:21 2026-01-07T17:37:55.596466Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2026-01-07T17:37:55.596510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:19 2026-01-07T17:37:55.596533Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2026-01-07T17:37:55.596572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:17 2026-01-07T17:37:55.596588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2026-01-07T17:37:55.597382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2026-01-07T17:37:55.597428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2026-01-07T17:37:55.597508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:13 2026-01-07T17:37:55.597532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2026-01-07T17:37:55.597609Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:11 2026-01-07T17:37:55.597640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2026-01-07T17:37:55.597719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:9 2026-01-07T17:37:55.597746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2026-01-07T17:37:55.597814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:36 2026-01-07T17:37:55.597841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2026-01-07T17:37:55.597932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:38 2026-01-07T17:37:55.597964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2026-01-07T17:37:55.598066Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:40 2026-01-07T17:37:55.598097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2026-01-07T17:37:55.601855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:32 2026-01-07T17:37:55.601926Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2026-01-07T17:37:55.602049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:34 2026-01-07T17:37:55.602073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2026-01-07T17:37:55.602145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:28 2026-01-07T17:37:55.602172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2026-01-07T17:37:55.602228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:30 2026-01-07T17:37:55.602253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2026-01-07T17:37:55.602311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:26 2026-01-07T17:37:55.602333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2026-01-07T17:37:55.602390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2026-01-07T17:37:55.602411Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2026-01-07T17:37:55.602471Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:57 2026-01-07T17:37:55.602493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2026-01-07T17:37:55.602549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:53 2026-01-07T17:37:55.602574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2026-01-07T17:37:55.606459Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:55 2026-01-07T17:37:55.606525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2026-01-07T17:37:55.606624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:49 2026-01-07T17:37:55.606648Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2026-01-07T17:37:55.606709Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:47 2026-01-07T17:37:55.606732Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2026-01-07T17:37:55.606787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:51 2026-01-07T17:37:55.606809Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2026-01-07T17:37:55.606869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:45 2026-01-07T17:37:55.606891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2026-01-07T17:37:55.606945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:43 2026-01-07T17:37:55.606966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2026-01-07T17:37:55.607018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:41 2026-01-07T17:37:55.607064Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2026-01-07T17:37:55.607264Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 109 2026-01-07T17:37:55.608385Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:37:55.608624Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 282us result status StatusPathDoesNotExist 2026-01-07T17:37:55.608791Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:37:55.609557Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 42 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2026-01-07T17:37:55.609660Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 42 took 115us result status StatusPathDoesNotExist 2026-01-07T17:37:55.609744Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TxUsage::WriteToTopic_Demo_11_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2026-01-07T17:36:25.927665Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:25.964499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:25.964736Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:25.972673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:25.973013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:25.973295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:25.973448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:25.973562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:25.973672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:25.973820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:25.973963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:25.974107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:25.974262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:25.974406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:25.974526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:25.974680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:26.011774Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:26.013095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:26.013183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:26.013412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:26.013630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:26.013708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:26.013754Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:26.013877Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:26.013972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:26.014023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:26.014058Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:26.014268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:26.014356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:26.014412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:26.014450Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:26.014550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:26.014645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:26.014710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:26.014748Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:26.014822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:26.014878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:26.014922Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:26.014969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:26.015011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:26.015040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:26.015290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:26.015411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:26.015449Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:26.015601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:26.015650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:26.015684Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:26.015738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:26.015791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:26.015846Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:26.015906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:26.015965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:26.016001Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:26.016147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:26.016214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... n_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=6831; 2026-01-07T17:37:53.239798Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2026-01-07T17:37:53.240646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=798; 2026-01-07T17:37:53.240710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7956; 2026-01-07T17:37:53.240760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8091; 2026-01-07T17:37:53.240827Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-07T17:37:53.240948Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=56; 2026-01-07T17:37:53.241004Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8760; 2026-01-07T17:37:53.241194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=124; 2026-01-07T17:37:53.241387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=139; 2026-01-07T17:37:53.241618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=166; 2026-01-07T17:37:53.241837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=166; 2026-01-07T17:37:53.243887Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1987; 2026-01-07T17:37:53.246000Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2048; 2026-01-07T17:37:53.246067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2026-01-07T17:37:53.246109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2026-01-07T17:37:53.246145Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:37:53.246211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2026-01-07T17:37:53.246247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-07T17:37:53.246332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2026-01-07T17:37:53.246371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:37:53.246435Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-07T17:37:53.246522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=42; 2026-01-07T17:37:53.246602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2026-01-07T17:37:53.246638Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=22806; 2026-01-07T17:37:53.246782Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:53.246901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:53.246957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:53.247028Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:53.247085Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:53.247207Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:53.247265Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:53.247297Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:53.247357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:53.247419Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:53.247463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:53.247497Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:53.247597Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:53.247793Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.090000s; 2026-01-07T17:37:53.253757Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:53.254200Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:53.254273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:53.254377Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:53.254432Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:53.254511Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:53.254601Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:53.254673Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:53.254729Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:53.254837Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:37:53.254926Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:53.255756Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.001000s; 2026-01-07T17:37:53.255827Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser |87.1%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2026-01-07T17:36:30.186664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:30.220766Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:30.221049Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:30.228646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:30.228911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:30.229162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:30.229335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:30.229499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:30.229612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:30.229755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:30.229868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:30.230011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:30.230147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.230280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:30.230393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:30.230525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:30.261934Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:30.265520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:30.265612Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:30.265832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.266075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:30.266174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:30.266222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:30.266342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:30.266414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:30.266459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:30.266490Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:30.266670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:30.266753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:30.266799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:30.266830Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:30.266925Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:30.266982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:30.267051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:30.267087Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:30.267145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:30.267193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:30.267235Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:30.267284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:30.267319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:30.267346Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:30.267558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:30.267664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:30.267699Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:30.267867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:30.267912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.267946Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:30.267993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:30.268035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:30.268073Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:30.268133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:30.268179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:30.268220Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:30.268352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:30.268423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... nes;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=9299; 2026-01-07T17:37:53.417708Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-07T17:37:53.418695Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=906; 2026-01-07T17:37:53.418767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=10631; 2026-01-07T17:37:53.418817Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=10796; 2026-01-07T17:37:53.418889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-07T17:37:53.418996Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=60; 2026-01-07T17:37:53.419068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11698; 2026-01-07T17:37:53.419277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=129; 2026-01-07T17:37:53.419435Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=100; 2026-01-07T17:37:53.419639Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=155; 2026-01-07T17:37:53.419831Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=142; 2026-01-07T17:37:53.423320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3412; 2026-01-07T17:37:53.426965Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3540; 2026-01-07T17:37:53.427075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2026-01-07T17:37:53.427135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-07T17:37:53.427205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2026-01-07T17:37:53.427294Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2026-01-07T17:37:53.427342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:37:53.427444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=65; 2026-01-07T17:37:53.427490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-07T17:37:53.427562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2026-01-07T17:37:53.427657Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=58; 2026-01-07T17:37:53.427748Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=52; 2026-01-07T17:37:53.427796Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=31931; 2026-01-07T17:37:53.427966Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:53.428093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:37:53.428174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:37:53.428256Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:37:53.428310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:37:53.428465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:53.428538Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:53.428584Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:53.428637Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:53.428711Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:53.428766Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:53.428839Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:53.428987Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:53.429223Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.031000s; 2026-01-07T17:37:53.431469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:53.432483Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:37:53.432556Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:53.432644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:37:53.432693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:37:53.432753Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:53.432838Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:53.432901Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:53.432997Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:53.433108Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:37:53.433170Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:53.433913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.004000s; 2026-01-07T17:37:53.433982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TxUsage::WriteToTopic_Demo_13_Query [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous >> TxUsage::WriteToTopic_Demo_14_Table >> TMaintenanceApiTest::DisableMaintenance [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table [GOOD] >> TCmsTest::ActionWithZeroDuration >> TCmsTest::PermissionDuration >> TCmsTest::WalleTasks |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::DisableMaintenance [GOOD] |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimit >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> TxUsage::WriteToTopic_Demo_42_Query [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> TOlap::StoreStats [GOOD] >> TOlap::Decimal >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous >> Describe::DescribePartitionPermissions [GOOD] >> DirectReadWithServer::KillPQTablet >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table [GOOD] >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |87.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |87.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin >> TOlap::Decimal [GOOD] >> TOlap::MoveTableStats >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 2217, MsgBus: 23356 2026-01-07T17:34:18.037201Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674358451490071:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:18.037442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:18.085747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:18.572433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:18.646348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:18.646451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:18.747617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:18.776312Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674358451489970:2081] 1767807257949010 != 1767807257949013 2026-01-07T17:34:18.788438Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:18.794589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:19.023601Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:19.105623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:19.105645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:19.105652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:19.105750Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:19.723943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:19.738201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:34:19.803212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.101694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.277308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.376197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:22.997037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674358451490071:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:22.997102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:23.702748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674384221295622:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.702864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.703173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674384221295632:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:23.703219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.021708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.056091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.085784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.114270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.153175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.197081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.230515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.287635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.362172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674388516263801:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.362264Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.362547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674388516263807:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.362599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674388516263806:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.362695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.366934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:24.385334Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674388516263810:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:34:24.475686Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674388516263861:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EP ... iptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:00.267179Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:00.268658Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:36:00.284380Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592674800661572766:2081] 1767807360135628 != 1767807360135631 2026-01-07T17:36:00.304868Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:00.305118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:00.323846Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:00.396254Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:36:00.396282Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:36:00.396291Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:36:00.396372Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:36:00.526145Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:36:00.874833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:00.884566Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:00.979137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:01.144456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:01.144634Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:01.254279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:03.800561Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674813546476523:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.800641Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.801020Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674813546476533:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.801109Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.874345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:03.902543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:03.928057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:03.956036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:03.983551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:04.014215Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:04.041936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:04.084523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:04.146577Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674817841444701:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:04.146665Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:04.146941Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674817841444705:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:04.146949Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674817841444707:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:04.147003Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:04.151872Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:36:04.164601Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592674817841444710:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:36:04.247454Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592674817841444761:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:36:05.137166Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592674800661572793:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:36:05.137225Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:36:05.697496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:15.244076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:36:15.244121Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit >> TxUsage::WriteToTopic_Demo_43_Table >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanDebezium >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query >> TCmsTest::ManualRequestApprovalLockingAllNodes >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin >> Cdc::ShouldBreakLocksOnConcurrentDropIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> TCmsTest::PriorityRange [GOOD] >> TCmsTest::PriorityLocks >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser >> TOlap::MoveTableStats [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> TCmsTest::ManualRequestApprovalLockingAllNodes [GOOD] >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::MoveTableStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:37.857668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:37.857765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:37.857825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:37.857864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:37.857902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:37.857943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:37.858005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:37.858096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:37.859071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:37.859417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:37.956798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:37.956862Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:37.973049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:37.973459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:37.973664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:37.980796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:37.981165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:37.981933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:37.982186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:37.985041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:37.985239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:37.986399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:37.986460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:37.986578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:37.986629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:37.986725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:37.986915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:38.130248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.131973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.132055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:38.132098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... CHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#203:0 progress is 1/1 2026-01-07T17:38:06.827689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-01-07T17:38:06.827738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 203, ready parts: 1/1, is published: true 2026-01-07T17:38:06.827843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:867:2825] message: TxId: 203 2026-01-07T17:38:06.827902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-01-07T17:38:06.827947Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 203:0 2026-01-07T17:38:06.827985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 203:0 2026-01-07T17:38:06.828140Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-07T17:38:06.828184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:38:06.829355Z node 3 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186233409546 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 203 2026-01-07T17:38:06.829588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:38:06.829662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:38:06.829765Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:38:06.832400Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:38:06.832508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 203: got EvNotifyTxCompletionResult 2026-01-07T17:38:06.832569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 203: satisfy waiter [3:1045:2992] TestWaitNotification: OK eventTxId 203 2026-01-07T17:38:06.833477Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:38:06.833704Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 254us result status StatusPathDoesNotExist 2026-01-07T17:38:06.833920Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:38:06.834706Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 41 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2026-01-07T17:38:06.834962Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 41 took 262us result status StatusSuccess 2026-01-07T17:38:06.835557Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 220 LastUpdateTime: 220 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:38:06.901570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 1225216 rowCount 100000 cpuUsage 0 2026-01-07T17:38:06.901762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:279: PersistSingleStats for pathId [OwnerId: 72057594046678944, LocalPathId: 39], tabletId 72075186233409546, followerId 0: unknown pathId 2026-01-07T17:38:06.915371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:38:07.187921Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MovedColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:38:07.188286Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MovedColumnTable" took 429us result status StatusSuccess 2026-01-07T17:38:07.188850Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 220 LastUpdateTime: 220 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser >> TxUsage::WriteToTopic_Demo_14_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> TCmsTest::PriorityLocks [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin >> TxUsage::WriteToTopic_Demo_14_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2026-01-07T17:36:39.572180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:39.600893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:39.601142Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:39.608387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:39.608634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:39.608863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:39.609023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:39.609128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:39.609473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:39.609615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:39.609730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:39.609857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:39.609986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:39.610127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:39.610234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:39.610344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:39.634044Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:39.634884Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:39.634955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:39.635159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:39.635357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:39.635443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:39.635493Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:39.635621Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:39.635694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:39.635741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:39.635774Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:39.635985Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:39.636066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:39.636114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:39.636147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:39.636255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:39.636317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:39.636386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:39.636417Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:39.636464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:39.636529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:39.636575Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:39.636623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:39.636660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:39.636689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:39.636900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:39.637020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:39.637051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:39.637182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:39.637222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:39.637250Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:39.637297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:39.637337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:39.637371Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:39.637429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:39.637470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:39.637503Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:39.637639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:39.637691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... olumn_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7012; 2026-01-07T17:38:05.081900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2026-01-07T17:38:05.082600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=658; 2026-01-07T17:38:05.082642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8092; 2026-01-07T17:38:05.082678Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8235; 2026-01-07T17:38:05.082724Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-07T17:38:05.082811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=51; 2026-01-07T17:38:05.082849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8823; 2026-01-07T17:38:05.082984Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=90; 2026-01-07T17:38:05.083112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=93; 2026-01-07T17:38:05.083242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=94; 2026-01-07T17:38:05.083363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=95; 2026-01-07T17:38:05.084738Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1337; 2026-01-07T17:38:05.086207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1412; 2026-01-07T17:38:05.086269Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2026-01-07T17:38:05.086306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-01-07T17:38:05.086335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-07T17:38:05.086392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2026-01-07T17:38:05.086433Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=13; 2026-01-07T17:38:05.086524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2026-01-07T17:38:05.086567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:38:05.086641Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2026-01-07T17:38:05.086726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=47; 2026-01-07T17:38:05.086838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=70; 2026-01-07T17:38:05.086891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20863; 2026-01-07T17:38:05.087060Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:38:05.087180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:38:05.087278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:38:05.087353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:38:05.087405Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:38:05.087556Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:38:05.087653Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:38:05.087706Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:38:05.087761Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:38:05.087877Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:38:05.087944Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:05.087992Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:05.088109Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:38:05.088343Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.137000s; 2026-01-07T17:38:05.091043Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:38:05.092280Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:38:05.092383Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:38:05.092492Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:38:05.092549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:38:05.092614Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:38:05.092702Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:38:05.092766Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:05.092825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:05.092970Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:38:05.093042Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:38:05.093907Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.121000s; 2026-01-07T17:38:05.093986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TCmsTest::BridgeModeCollectInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityLocks [GOOD] Test command err: 2026-01-07T17:38:03.057767Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-07T17:38:03.057858Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-07T17:38:03.057886Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-07T17:38:03.057917Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-07T17:38:03.057944Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-07T17:38:03.057971Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-07T17:38:03.057994Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-07T17:38:03.058019Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 2026-01-07T17:38:03.065714Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-07T17:38:03.065795Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-07T17:38:03.065822Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-07T17:38:03.065845Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-07T17:38:03.065920Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-07T17:38:03.065956Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-07T17:38:03.065980Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-07T17:38:03.066003Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 2026-01-07T17:38:03.103909Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-07T17:38:03.103983Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-07T17:38:03.104006Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-07T17:38:03.104047Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-07T17:38:03.104076Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-07T17:38:03.104098Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-07T17:38:03.104118Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-07T17:38:03.104140Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 2026-01-07T17:38:07.214057Z node 28 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005531s |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved [GOOD] >> TCmsTest::Notifications >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TCmsTest::RequestReplaceDevicePDisk >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin >> TCmsTest::StateRequestUnknownNode >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2026-01-07T17:36:18.140203Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:18.170569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:18.170814Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:18.177794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:18.178093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:18.178350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:18.178516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:18.178635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:18.178753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:18.178884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:18.179013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:18.179160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:18.179300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:18.179399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:18.179554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:18.179664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:18.206696Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:18.207280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:18.207334Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:18.207520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:18.207658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:18.207716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:18.207750Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:18.207825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:18.207873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:18.207904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:18.207924Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:18.208060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:18.208118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:18.208150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:18.208169Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:18.208234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:18.208283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:18.208328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:18.208354Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:18.208388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:18.208412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:18.208436Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:18.208468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:18.208493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:18.208516Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:18.208658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:18.208747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:18.208771Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:18.208857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:18.208887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:18.208905Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:18.208964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:18.209010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:18.209055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:18.209126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:18.209167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:18.209201Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:18.209381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:18.209443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=6156; 2026-01-07T17:38:07.373884Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-07T17:38:07.375371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1411; 2026-01-07T17:38:07.375471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8048; 2026-01-07T17:38:07.375526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8242; 2026-01-07T17:38:07.375596Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-07T17:38:07.375724Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=65; 2026-01-07T17:38:07.375770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9096; 2026-01-07T17:38:07.375989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=156; 2026-01-07T17:38:07.376169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=129; 2026-01-07T17:38:07.376388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=168; 2026-01-07T17:38:07.376600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=159; 2026-01-07T17:38:07.377602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=931; 2026-01-07T17:38:07.378765Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1084; 2026-01-07T17:38:07.378837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2026-01-07T17:38:07.378891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-07T17:38:07.378934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-07T17:38:07.379013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2026-01-07T17:38:07.379060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:38:07.379190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=73; 2026-01-07T17:38:07.379238Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:38:07.379332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2026-01-07T17:38:07.379445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=58; 2026-01-07T17:38:07.379542Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=59; 2026-01-07T17:38:07.379587Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=24916; 2026-01-07T17:38:07.379763Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=126218384;raw_bytes=174224032;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:38:07.379910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:38:07.379995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:38:07.380081Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:38:07.380146Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:38:07.380310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:38:07.380391Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:38:07.380451Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:38:07.380505Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:38:07.380581Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:38:07.380638Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:07.380701Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:07.380816Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:38:07.381077Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.089000s; 2026-01-07T17:38:07.383084Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:38:07.386172Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:38:07.386272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:38:07.386373Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:38:07.386424Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:38:07.386486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:38:07.386582Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:38:07.386654Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:07.386720Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:07.386847Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:38:07.386909Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:38:07.387502Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.118000s; 2026-01-07T17:38:07.387560Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TCmsTenatsTest::TestTenantLimit >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin >> DirectReadWithServer::KillPQTablet [GOOD] >> DirectReadWithServer::KillPQRBTablet [GOOD] >> LocalPartition::Restarts >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser >> TCmsTest::BridgeModeCollectInfo [GOOD] >> TCmsTest::BridgeModeGroups >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateStorageNodesFromOneRing |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |87.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSubDomainTest::StartAndStopTenanNode >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table >> HttpRequest::ProbeServerless [GOOD] >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |87.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::WalleTasksDifferentPriorities >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::MustNotLoseSchemaSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:26.334165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:26.428169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:26.435714Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:26.436116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:26.436268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:26.899550Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:27.016495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:27.016622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:27.057774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:27.150616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:27.839062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:27.839921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:27.839956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:27.839975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:27.840134Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:27.908768Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:28.548023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:35:32.080676Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:32.087958Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:35:32.091665Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:32.135212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:32.135390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:32.180621Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:35:32.183963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:32.471629Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:32.471764Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:32.473774Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.474751Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.475504Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.476778Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.476962Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.477383Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.477540Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.477701Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.477972Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:32.510019Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:32.988539Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:33.020292Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:35:33.020375Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:35:33.084208Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:35:33.086141Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:35:33.086537Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:35:33.086645Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:35:33.086719Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:35:33.086784Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:35:33.086846Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:35:33.086910Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:35:33.087908Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:35:33.090833Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:35:33.093402Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:35:33.109684Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:35:33.109766Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:35:33.109860Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:35:33.119144Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:33.119297Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:33.151343Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:35:33.151849Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:35:33.154837Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:35:33.197207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:33.216263Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:35:33.216477Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:35:33.234017Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:35:33.436585Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:35:33.498793Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:35:33.939048Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:35:34.057425Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:35:34.057544Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:35:34.809535Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... Query with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-07T17:36:03.524806Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:5986:5096], Start read next stream part 2026-01-07T17:36:04.191120Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6013:5110]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:04.191378Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:36:04.191567Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-07T17:36:04.191644Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6016:5113] 2026-01-07T17:36:04.191703Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6016:5113] 2026-01-07T17:36:04.192076Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:6017:5114] 2026-01-07T17:36:04.192184Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6016:5113], server id = [2:6017:5114], tablet id = 72075186224037894, status = OK 2026-01-07T17:36:04.192269Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:6017:5114], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:36:04.192335Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:36:04.192486Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:36:04.192549Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6013:5110], StatRequests.size() = 1 2026-01-07T17:36:04.192646Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:37:17.017022Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrfae48az8renk4p9rpq5j", SessionId: ydb://session/3?node_id=2&id=ZTQxNDNjMi1kZjljNDgyMC04OTY0Njc5OC0zZjQwYWY2, Slow query, duration: 73.484793s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:37:17.019026Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:5986:5096], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:37:17.035680Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:5986:5096], Start read next stream part 2026-01-07T17:37:17.049745Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:6209:5287], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:37:17.050290Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-07T17:37:17.050640Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:5986:5096], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:37:17.050720Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:5986:5096], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzNjOGQzY2MtMzU2YTY0NTItM2QwZDhkMDktOTQ0NGQzMWU=, TxId: 2026-01-07T17:37:17.074197Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:6209:5287], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:37:17.074380Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:6209:5287], Start read next stream part 2026-01-07T17:37:17.316082Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6227:5302]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:37:17.316498Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:37:17.316562Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:6227:5302], StatRequests.size() = 1 2026-01-07T17:38:13.520318Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrhj8j0h9648ms6g1zejqa", SessionId: ydb://session/3?node_id=2&id=OWM4MDUzNjktYzhmNDQ0ZGQtOWFhMDVkMDYtYWQ3NDkzM2I=, Slow query, duration: 56.436505s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:38:13.523338Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:6209:5287], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:38:13.523488Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:6209:5287], Start read next stream part 2026-01-07T17:38:13.524164Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33540, txId: 18446744073709551615] shutting down 2026-01-07T17:38:13.524470Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:6209:5287], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:38:13.524522Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5976:2463], ActorId: [2:6209:5287], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2JjMjhlYTgtZWY4ODRjOGItMWJmMDAyYTMtZTJmYzljZTE=, TxId: 2026-01-07T17:38:13.525101Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6383:5442], ActorId: [2:6386:5445], Starting query actor #1 [2:6389:5447] 2026-01-07T17:38:13.525180Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6386:5445], ActorId: [2:6389:5447], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-07T17:38:13.544563Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6386:5445], ActorId: [2:6389:5447], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OWNhMjZjZDYtZmJmMTA5Y2UtNmNiYTIxN2YtMzUwMzkxMGQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:38:13.652266Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6401:5458]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:38:13.652547Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:38:13.652827Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2026-01-07T17:38:13.652872Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:38:13.653038Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:38:13.653102Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:6401:5458], StatRequests.size() = 1 2026-01-07T17:38:13.653325Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:38:13.859230Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6386:5445], ActorId: [2:6389:5447], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWNhMjZjZDYtZmJmMTA5Y2UtNmNiYTIxN2YtMzUwMzkxMGQ=, TxId: 2026-01-07T17:38:13.859326Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6386:5445], ActorId: [2:6389:5447], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWNhMjZjZDYtZmJmMTA5Y2UtNmNiYTIxN2YtMzUwMzkxMGQ=, TxId: 2026-01-07T17:38:13.859837Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6383:5442], ActorId: [2:6386:5445], Got response [2:6389:5447] SUCCESS 2026-01-07T17:38:13.860316Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:38:13.877025Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-07T17:38:13.877121Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId= Dnf¢M, ActorId=[1:4615:3699] 2026-01-07T17:38:13.878899Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:6419:4261]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:38:13.879353Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:38:13.879434Z node 1 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:38:13.879884Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:38:13.879959Z node 1 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-07T17:38:13.880038Z node 1 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:38:13.905316Z node 1 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table[Value]=100' |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks [GOOD] >> TCmsTest::RequestReplaceBrokenDevices |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |87.1%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy >> TCmsTest::Mirror3dcPermissions [GOOD] >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTest::StateStorageLockedNodes >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2026-01-07T17:36:49.395265Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:49.427064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:49.427254Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:49.434596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:49.434847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:49.435058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:49.435201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:49.435327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:49.435434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:49.435527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:49.435628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:49.435751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:49.435856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.435976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:49.436106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:49.436215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:49.461958Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:49.462543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:49.462597Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:49.462746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:49.462868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:49.462921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:49.462964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:49.463046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:49.463103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:49.463146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:49.463172Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:49.463362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:49.463461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:49.463508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:49.463538Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:49.463629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:49.463685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:49.463748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:49.463775Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:49.463816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:49.463841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:49.463863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:49.463901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:49.463936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:49.463962Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:49.464193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:49.464313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:49.464339Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:49.464491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:49.464533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.464561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:49.464605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:49.464650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:49.464689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:49.464731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:49.464755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:49.464775Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:49.464862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:49.464907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... icWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:37:54.094542Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:54.094629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-07T17:37:54.094694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:37:54.094854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=20;drop=0;skip=0;portions_counter=20;chunks=2240;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:54.095044Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.095118Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=ro_controller.cpp:41;event=CS::CLEANUP::PORTIONS;tablet_id=9437184; 2026-01-07T17:37:54.095213Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=abstract.cpp:13;event=new_stage;stage=Started;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.095958Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessorResources;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.096128Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessors;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.096337Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:54.096469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.631500s; 2026-01-07T17:37:54.096545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:54.097261Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.097624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_finished;consumer=CLEANUP_PORTIONS;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1; Cleanup old portions: 2 4 9 8 1 6 3 7 10 5 19 14 18 13 21 11 15 22 20 17 2026-01-07T17:37:54.098012Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2026-01-07T17:37:54.098092Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.098303Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[190] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2026-01-07T17:37:54.099241Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.099853Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=148108480;raw_bytes=150092142;count=25;records=1800002} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=123422240;raw_bytes=125076680;count=20;records=1500000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:37:54.112882Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.113001Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2026-01-07T17:37:54.113461Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::98c079b4-ebef11f0-a3c13740-51e0b7b1; 2026-01-07T17:37:54.113556Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:37:54.113658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-07T17:37:54.113741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:37:54.113836Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:37:54.113913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:54.113971Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:37:54.114083Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.630000s; 2026-01-07T17:37:54.114158Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=98c079b4-ebef11f0-a3c13740-51e0b7b1;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:37:54.114274Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:7:3:0:6171112:0] 2026-01-07T17:37:54.114349Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:19:3:0:6171112:0] 2026-01-07T17:37:54.114401Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:17:4:0:6171112:0] 2026-01-07T17:37:54.114450Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:22:3:0:6171112:0] 2026-01-07T17:37:54.114495Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:3:2:0:6171112:0] 2026-01-07T17:37:54.114538Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:21:2:0:6171112:0] 2026-01-07T17:37:54.114580Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:14:4:0:6171112:0] 2026-01-07T17:37:54.114625Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:8:4:0:6171112:0] 2026-01-07T17:37:54.114698Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2026-01-07T17:37:54.114751Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:9:2:0:6171112:0] 2026-01-07T17:37:54.114794Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:10:3:0:6171112:0] 2026-01-07T17:37:54.114836Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:13:3:0:6171112:0] 2026-01-07T17:37:54.114878Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:15:2:0:6171112:0] 2026-01-07T17:37:54.114919Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:11:4:0:6171112:0] 2026-01-07T17:37:54.114976Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:5:4:0:6171112:0] 2026-01-07T17:37:54.115019Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:6:2:0:6171112:0] 2026-01-07T17:37:54.115063Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:0:6171112:0] 2026-01-07T17:37:54.115102Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:18:2:0:6171112:0] 2026-01-07T17:37:54.115144Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2026-01-07T17:37:54.115211Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:20:4:0:6171112:0] GC for channel 4 deletes blobs: Compactions happened: 2 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |87.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropStream >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query [GOOD] >> TSubDomainTest::LsLs >> TCmsTest::BridgeModeGroups [GOOD] >> TCmsTest::BridgeModeNodeLimit >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> TxUsage::WriteToTopic_Demo_43_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] Test command err: 2026-01-07T17:38:05.926527Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin >> TxUsage::WriteToTopic_Demo_24_Table >> TxUsage::WriteToTopic_Demo_14_Query [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TSubDomainTest::UserAttributes >> TxUsage::WriteToTopic_Demo_16_Table >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanUpdatedRows >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 26549, MsgBus: 21455 2026-01-07T17:34:19.309872Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674363471112959:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:19.309957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:19.916901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:19.917085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:20.025616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:20.029395Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:20.033189Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674363471112832:2081] 1767807259260865 != 1767807259260868 2026-01-07T17:34:20.047546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:20.159399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:20.159427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:20.159437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:20.159535Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:20.277082Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:20.333081Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:20.812175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:20.829446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:34:20.939973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:21.211657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:21.665888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:22.025538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:24.309932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674363471112959:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:24.309998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:24.799275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674384945951205:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.799444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.799883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674384945951215:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:24.799970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.226741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.276086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.324873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.371288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.424375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.492311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.571756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.661138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:25.783554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674389240919385:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.783673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.783941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674389240919391:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.783945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674389240919390:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.783997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:25.788010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:25.804378Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674389240919394:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:34:25.898553Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674389240919447:3794] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:34:27.878933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:36:46.773048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:36:46.892712Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:46.904469Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592674998143905013:2081] 1767807406736343 != 1767807406736346 2026-01-07T17:36:46.922953Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:46.923073Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:46.948806Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:47.053617Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:36:47.053649Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:36:47.053660Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:36:47.053762Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:36:47.067115Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:36:47.660705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:47.678202Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:47.752284Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:47.763492Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:47.985818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:48.067741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.389088Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675019618743361:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:51.389213Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:51.393075Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675019618743371:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:51.393179Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:51.487784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.541094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.584405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.642933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.738746Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592674998143905041:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:36:51.738823Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:36:51.773697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.823825Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.888983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.959513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:52.083908Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675023913711547:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:52.084021Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:52.084472Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675023913711553:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:52.084531Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675023913711552:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:52.084632Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:52.089099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:36:52.110825Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592675023913711556:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:36:52.166747Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675023913711607:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:36:55.627800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:01.853002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:37:01.853040Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::StateStorageLockedNodes [GOOD] |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |87.2%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> TxUsage::WriteToTopic_Demo_43_Query >> TSubDomainTest::FailIfAffectedSetNotInterior >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |87.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 26314, MsgBus: 22608 2026-01-07T17:37:04.835755Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675074004208016:2129];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:04.835804Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:05.072325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:37:05.586296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:05.628779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:05.628902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:05.696973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:05.783627Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:05.798420Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675074004207927:2081] 1767807424816215 != 1767807424816218 2026-01-07T17:37:05.822634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:05.934238Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:06.077802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:37:06.077825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:37:06.077832Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:37:06.077913Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:07.204401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:07.224878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:37:09.837888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675074004208016:2129];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:09.838003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:37:10.363477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675099774012613:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:10.363622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675099774012602:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:10.363796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:10.365420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675099774012617:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:10.365533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:10.369467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:10.392663Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675099774012616:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:37:10.521250Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675099774012669:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:10.913887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:11.088812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:13.604882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 8325, MsgBus: 2810 ... waiting for SysViewsRoster update finished 2026-01-07T17:37:20.711531Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:37:20.719054Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:37:20.723795Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:37:20.724187Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:37:20.724387Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:37:21.032017Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:21.032176Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:21.062635Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807436911300 != 1767807436911304 2026-01-07T17:37:21.066526Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:21.115406Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:21.279431Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:37:21.717365Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:37:21.717441Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:37:21.717493Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:37:21.725338Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:21.737556Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:22.128645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:22.177983Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:912:2777], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.178123Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.178233Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.179563Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource poo ... 15662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.793440Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.802097Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.802201Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.802239Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.812023Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.812126Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.812177Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.820440Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.820547Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.820582Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.828558Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.828654Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.828689Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.838104Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.838211Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.838249Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.846187Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.846288Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.846338Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.854854Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.854932Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.854964Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.863357Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.863431Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.863461Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.926594Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.926703Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.926741Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.934637Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.934739Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.934780Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.941209Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.941328Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.941369Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.948376Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.948458Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.948485Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.956164Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.956248Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:11.956278Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:38:12.060819Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrjpb48n36shh7p4cd486h", SessionId: ydb://session/3?node_id=3&id=YjQzOTFiYzItMjQxNjNlYjgtN2MyM2YyYzYtNWQ5YzI4Y2U=, Slow query, duration: 18.035657s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled |87.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TCmsTest::BridgeModeNodeLimit [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true [GOOD] >> TSubDomainTest::CheckAccessCopyTable |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::BridgeModeNodeLimit [GOOD] Test command err: 2026-01-07T17:38:02.631212Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-07T17:38:02.631324Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-07T17:38:02.631466Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-07T17:38:02.633502Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-07T17:38:02.634196Z node 9 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-07T17:38:02.634497Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-07T17:38:02.634576Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2026-01-07T17:38:02.634638Z node 9 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 9 has not yet been completed) 2026-01-07T17:38:02.634801Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-07T17:38:02.635031Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:38:02.635087Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 9, marker# MARKER_DISK_FAULTY 2026-01-07T17:38:02.635350Z node 9 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2026-01-07T17:38:02.635407Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:38:02.635502Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-07T17:38:02.635550Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-07T17:38:02.635597Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-07T17:38:02.635622Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-07T17:38:02.635664Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-07T17:38:02.635690Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-07T17:38:02.635722Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, ... : sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:38:08.095180Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-07T17:38:08.095236Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 9:9, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-07T17:38:08.095278Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2026-01-07T17:38:08.095597Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-07T17:38:08.095800Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-07T17:38:08.095912Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2026-01-07T17:38:08.095954Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 9:9 2026-01-07T17:38:08.096002Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2026-01-07T17:38:08.109073Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-07T17:38:08.109157Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-07T17:38:08.127548Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-07T17:38:08.127664Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-07T17:38:08.127740Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:00Z 2026-01-07T17:38:08.128557Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:38:08.128676Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } 2026-01-07T17:38:08.128739Z node 9 :CMS DEBUG: node_checkers.cpp:115: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2026-01-07T17:38:08.128794Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 0; State: Ok 2026-01-07T17:38:08.128823Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 1; State: Ok 2026-01-07T17:38:08.128841Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 2; State: Ok 2026-01-07T17:38:08.128863Z node 9 :CMS DEBUG: cms.cpp:405: Result: ALLOW 2026-01-07T17:38:08.129059Z node 9 :CMS DEBUG: cms.cpp:1066: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user, priority# 0 2026-01-07T17:38:08.129121Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2026-01-07T17:38:08.129239Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-07T17:38:08.129431Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.210512Z, action# Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2026-01-07T17:38:08.129542Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:38:08.143599Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-07T17:38:08.143910Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } Deadline: 780210512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2026-01-07T17:38:08.143978Z node 9 :CMS DEBUG: cms.cpp:1094: Schedule cleanup at 1970-01-01T00:33:00.210512Z 2026-01-07T17:38:08.183382Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2026-01-07T17:38:08.183760Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-07T17:38:08.183842Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-07T17:38:08.183908Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:00Z 2026-01-07T17:38:08.184672Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:38:08.184785Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2026-01-07T17:38:08.184846Z node 9 :CMS DEBUG: node_checkers.cpp:115: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2026-01-07T17:38:08.184893Z node 9 :CMS DEBUG: cms.cpp:405: Result: ALLOW 2026-01-07T17:38:08.185225Z node 9 :CMS DEBUG: cms.cpp:1066: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user, priority# 0 2026-01-07T17:38:08.185309Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (10) (permission user-p-2 until 1970-01-01T00:13:00Z) 2026-01-07T17:38:08.185400Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-07T17:38:08.185561Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.312024Z, action# Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 2026-01-07T17:38:08.185667Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:38:08.200670Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-07T17:38:08.201001Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 } Deadline: 780312024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 10 InterconnectPort: 12002 } } } } 2026-01-07T17:38:08.201606Z node 9 :CMS INFO: cms.cpp:1363: User user is done with permissions user-p-1 2026-01-07T17:38:08.201664Z node 9 :CMS DEBUG: cms.cpp:1386: Resulting status: OK 2026-01-07T17:38:08.201737Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2026-01-07T17:38:08.201839Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 9 2026-01-07T17:38:08.201932Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2026-01-07T17:38:08.201999Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2026-01-07T17:38:08.214598Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2026-01-07T17:38:08.214814Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2026-01-07T17:38:08.215414Z node 9 :CMS INFO: cms.cpp:1363: User user is done with permissions user-p-2 2026-01-07T17:38:08.215477Z node 9 :CMS DEBUG: cms.cpp:1386: Resulting status: OK 2026-01-07T17:38:08.215566Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2026-01-07T17:38:08.215663Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 10 2026-01-07T17:38:08.215758Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2026-01-07T17:38:08.215801Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2026-01-07T17:38:08.231519Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2026-01-07T17:38:08.231777Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2026-01-07T17:38:20.149036Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 49 2026-01-07T17:38:20.150390Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 54 2026-01-07T17:38:20.150509Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 55 2026-01-07T17:38:20.150669Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 56 2026-01-07T17:38:20.150711Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 50 2026-01-07T17:38:20.150742Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 51 2026-01-07T17:38:20.150776Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 52 2026-01-07T17:38:20.150806Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 53 2026-01-07T17:38:20.150835Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 61 2026-01-07T17:38:20.150863Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 62 2026-01-07T17:38:20.150891Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 63 2026-01-07T17:38:20.150922Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 64 2026-01-07T17:38:20.150954Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 57 2026-01-07T17:38:20.150983Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 58 2026-01-07T17:38:20.151014Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 59 2026-01-07T17:38:20.151043Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 60 |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |87.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin >> TSubDomainTest::LsAltered [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> KqpScan::ScanRetryRead >> KqpScan::RemoteShardScan >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2026-01-07T17:38:14.584593Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675372662108120:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:14.584674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:14.663954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:15.004043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:38:15.041087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:15.041300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:15.051294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:15.105500Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:15.173140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:38:15.320030Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592675372662108363:2136] Handle TEvNavigate describe path dc-1 2026-01-07T17:38:15.320085Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592675376957076129:2445] HANDLE EvNavigateScheme dc-1 2026-01-07T17:38:15.320184Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592675372662108391:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:15.320286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592675372662108603:2292][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592675372662108391:2150], cookie# 1 2026-01-07T17:38:15.322009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675372662108655:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675372662108652:2292], cookie# 1 2026-01-07T17:38:15.322046Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675372662108656:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675372662108653:2292], cookie# 1 2026-01-07T17:38:15.322059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675372662108657:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675372662108654:2292], cookie# 1 2026-01-07T17:38:15.322110Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675372662108024:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675372662108656:2292], cookie# 1 2026-01-07T17:38:15.322141Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675372662108027:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675372662108657:2292], cookie# 1 2026-01-07T17:38:15.322173Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675372662108656:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675372662108024:2053], cookie# 1 2026-01-07T17:38:15.322189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675372662108657:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675372662108027:2056], cookie# 1 2026-01-07T17:38:15.322240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675372662108603:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675372662108653:2292], cookie# 1 2026-01-07T17:38:15.322262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592675372662108603:2292][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:38:15.322275Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675372662108603:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675372662108654:2292], cookie# 1 2026-01-07T17:38:15.322290Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592675372662108603:2292][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:38:15.322349Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592675372662108391:2150], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:38:15.325038Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675372662108021:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675372662108655:2292], cookie# 1 2026-01-07T17:38:15.325092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675372662108655:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675372662108021:2050], cookie# 1 2026-01-07T17:38:15.325141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675372662108603:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675372662108652:2292], cookie# 1 2026-01-07T17:38:15.325170Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592675372662108603:2292][/dc-1] Sync cookie mismatch: sender# [1:7592675372662108652:2292], cookie# 1, current cookie# 0 2026-01-07T17:38:15.330641Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592675372662108391:2150], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592675372662108603:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:15.330789Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592675372662108391:2150], cacheItem# { Subscriber: { Subscriber: [1:7592675372662108603:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:38:15.333881Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592675376957076130:2446], recipient# [1:7592675376957076129:2445], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:15.334006Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592675376957076129:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:15.366572Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592675376957076129:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:38:15.370230Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592675376957076129:2445] Handle TEvDescribeSchemeResult Forward to# [1:7592675376957076128:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:38:15.372951Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592675372662108363:2136] Handle TEvProposeTransaction 2026-01-07T17:38:15.372984Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592675372662108363:2136] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:38:15.373051Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592675372662108363:2136] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592675376957076135:2450] 2026-01-07T17:38:15.465676Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592675376957076135:2450] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: ... 14706090973:2559][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7592675401821188099:2056] 2026-01-07T17:38:23.357934Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592675401821188093:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592675414706090964:2558] 2026-01-07T17:38:23.357937Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7592675414706090972:2559][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7592675401821188096:2053] 2026-01-07T17:38:23.357947Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592675401821188093:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592675414706090971:2559] 2026-01-07T17:38:23.357961Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592675401821188099:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592675414706090965:2557] 2026-01-07T17:38:23.357964Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592675414706090955:2559][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7592675414706090968:2559] 2026-01-07T17:38:23.357976Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592675401821188099:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592675414706090967:2558] 2026-01-07T17:38:23.357987Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592675414706090955:2559][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7592675414706090970:2559] 2026-01-07T17:38:23.357988Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592675401821188099:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592675414706090973:2559] 2026-01-07T17:38:23.358001Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592675401821188096:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592675414706090963:2557] 2026-01-07T17:38:23.358008Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7592675414706090955:2559][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7592675401821188427:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:23.358014Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592675401821188096:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592675414706090966:2558] 2026-01-07T17:38:23.358024Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592675414706090955:2559][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7592675414706090969:2559] 2026-01-07T17:38:23.358024Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592675401821188096:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592675414706090972:2559] 2026-01-07T17:38:23.358044Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592675414706090955:2559][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7592675401821188427:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:23.358080Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7592675401821188427:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-07T17:38:23.358139Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7592675401821188427:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7592675414706090954:2558] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:23.358228Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592675401821188427:2126], cacheItem# { Subscriber: { Subscriber: [3:7592675414706090954:2558] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:23.358254Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7592675401821188427:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-07T17:38:23.358290Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7592675401821188427:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7592675414706090953:2557] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:23.358331Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592675401821188427:2126], cacheItem# { Subscriber: { Subscriber: [3:7592675414706090953:2557] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:23.358380Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7592675401821188427:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-07T17:38:23.358415Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7592675401821188427:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7592675414706090955:2559] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:23.358461Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592675401821188427:2126], cacheItem# { Subscriber: { Subscriber: [3:7592675414706090955:2559] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:23.358510Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592675414706090974:2560], recipient# [3:7592675414706090947:2302], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:23.358568Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592675414706090975:2561], recipient# [3:7592675414706090950:2305], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:24.097681Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592675401821188427:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:24.097836Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592675401821188427:2126], cacheItem# { Subscriber: { Subscriber: [3:7592675406116156308:2544] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:24.097946Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592675419001058279:2565], recipient# [3:7592675419001058278:2308], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |87.2%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TGRpcStreamingTest::WritesDoneFromClient >> GenericFederatedQuery::IcebergHiveTokenSelectAll >> GenericFederatedQuery::IcebergHiveSaSelectAll >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2026-01-07T17:38:19.124005Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675395934367699:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:19.124145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:19.320696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:38:19.369000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:19.369189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:19.374366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:19.545190Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:19.585980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:38:19.750142Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592675395934367840:2105] Handle TEvNavigate describe path dc-1 2026-01-07T17:38:19.750223Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592675395934368145:2267] HANDLE EvNavigateScheme dc-1 2026-01-07T17:38:19.750331Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592675395934367847:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:19.750443Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592675395934368013:2200][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592675395934367847:2108], cookie# 1 2026-01-07T17:38:19.752124Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675395934368036:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675395934368033:2200], cookie# 1 2026-01-07T17:38:19.752172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675395934368037:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675395934368034:2200], cookie# 1 2026-01-07T17:38:19.752187Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675395934368038:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675395934368035:2200], cookie# 1 2026-01-07T17:38:19.752242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675395934367544:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675395934368036:2200], cookie# 1 2026-01-07T17:38:19.752271Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675395934367547:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675395934368037:2200], cookie# 1 2026-01-07T17:38:19.752307Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675395934367550:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675395934368038:2200], cookie# 1 2026-01-07T17:38:19.752367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675395934368036:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675395934367544:2049], cookie# 1 2026-01-07T17:38:19.752383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675395934368037:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675395934367547:2052], cookie# 1 2026-01-07T17:38:19.752397Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675395934368038:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675395934367550:2055], cookie# 1 2026-01-07T17:38:19.752439Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675395934368013:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675395934368033:2200], cookie# 1 2026-01-07T17:38:19.752470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592675395934368013:2200][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:38:19.752488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675395934368013:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675395934368034:2200], cookie# 1 2026-01-07T17:38:19.752509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592675395934368013:2200][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:38:19.752567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675395934368013:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675395934368035:2200], cookie# 1 2026-01-07T17:38:19.752582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592675395934368013:2200][/dc-1] Sync cookie mismatch: sender# [1:7592675395934368035:2200], cookie# 1, current cookie# 0 2026-01-07T17:38:19.752652Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592675395934367847:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:38:19.760652Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592675395934367847:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592675395934368013:2200] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:19.760815Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592675395934367847:2108], cacheItem# { Subscriber: { Subscriber: [1:7592675395934368013:2200] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:38:19.763902Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592675395934368146:2268], recipient# [1:7592675395934368145:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:19.763994Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592675395934368145:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:19.803171Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592675395934368145:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:38:19.806547Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592675395934368145:2267] Handle TEvDescribeSchemeResult Forward to# [1:7592675395934368144:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:38:19.809218Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592675395934367840:2105] Handle TEvProposeTransaction 2026-01-07T17:38:19.809265Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592675395934367840:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:38:19.809350Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592675395934367840:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592675395934368151:2272] 2026-01-07T17:38:19.922825Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592675395934368151:2272] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-07T17:38:19.922892Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592675395934368151:2272] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatab ... Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:23.697536Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7592675414718890423:2334] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:23.697604Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7592675414718890423:2334] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2026-01-07T17:38:23.698278Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7592675414718890423:2334] Handle TEvDescribeSchemeResult Forward to# [2:7592675414718890422:2333] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1767807503217 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046644480 2026-01-07T17:38:23.703906Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7592675410423922705:2104] Handle TEvNavigate describe path /dc-1 2026-01-07T17:38:23.703946Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7592675414718890426:2337] HANDLE EvNavigateScheme /dc-1 2026-01-07T17:38:23.704039Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592675410423922711:2106], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:23.704128Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7592675410423922725:2112][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7592675410423922711:2106], cookie# 4 2026-01-07T17:38:23.704187Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592675410423922729:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592675410423922726:2112], cookie# 4 2026-01-07T17:38:23.704203Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592675410423922730:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592675410423922727:2112], cookie# 4 2026-01-07T17:38:23.704217Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592675410423922731:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592675410423922728:2112], cookie# 4 2026-01-07T17:38:23.704243Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592675410423922416:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592675410423922729:2112], cookie# 4 2026-01-07T17:38:23.704268Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592675410423922419:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592675410423922730:2112], cookie# 4 2026-01-07T17:38:23.704285Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592675410423922422:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592675410423922731:2112], cookie# 4 2026-01-07T17:38:23.704325Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592675410423922729:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592675410423922416:2049], cookie# 4 2026-01-07T17:38:23.704342Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592675410423922730:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592675410423922419:2052], cookie# 4 2026-01-07T17:38:23.704375Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592675410423922731:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592675410423922422:2055], cookie# 4 2026-01-07T17:38:23.704417Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592675410423922725:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592675410423922726:2112], cookie# 4 2026-01-07T17:38:23.704439Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7592675410423922725:2112][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:38:23.704459Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592675410423922725:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592675410423922727:2112], cookie# 4 2026-01-07T17:38:23.704478Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7592675410423922725:2112][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:38:23.704503Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592675410423922725:2112][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592675410423922728:2112], cookie# 4 2026-01-07T17:38:23.704516Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7592675410423922725:2112][/dc-1] Sync cookie mismatch: sender# [2:7592675410423922728:2112], cookie# 4, current cookie# 0 2026-01-07T17:38:23.704555Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7592675410423922711:2106], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:38:23.704623Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7592675410423922711:2106], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7592675410423922725:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807503182 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:23.704701Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592675410423922711:2106], cacheItem# { Subscriber: { Subscriber: [2:7592675410423922725:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807503182 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-07T17:38:23.704839Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592675414718890427:2338], recipient# [2:7592675414718890426:2337], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:23.704871Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7592675414718890426:2337] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:23.704956Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7592675414718890426:2337] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:38:23.705617Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7592675414718890426:2337] Handle TEvDescribeSchemeResult Forward to# [2:7592675414718890425:2336] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767807503182 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2026-01-07T17:37:03.757109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:37:03.799952Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:37:03.800221Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:37:03.808022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:37:03.808271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:37:03.808502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:37:03.808630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:37:03.808867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:37:03.808994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:37:03.809177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:37:03.809302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:37:03.809465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:37:03.809585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:37:03.809713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:37:03.809832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:37:03.809942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:37:03.848783Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:37:03.849255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:37:03.849330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:37:03.849539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:37:03.849711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:37:03.849781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:37:03.849826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:37:03.849944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:37:03.850010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:37:03.850056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:37:03.850095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:37:03.850289Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:37:03.850379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:37:03.850437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:37:03.850469Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:37:03.850564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:37:03.850618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:37:03.850688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:37:03.850722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:37:03.850787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:37:03.850824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:37:03.850873Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:37:03.850922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:37:03.850963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:37:03.851047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:37:03.851305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:37:03.851364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:37:03.851403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:37:03.851531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:37:03.851573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:37:03.851607Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:37:03.851660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:37:03.851698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:37:03.851745Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:37:03.851793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:37:03.851838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:37:03.851867Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:37:03.853829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:37:03.853906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ngines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7172; 2026-01-07T17:38:22.661440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=16; 2026-01-07T17:38:22.662367Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=864; 2026-01-07T17:38:22.662470Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8555; 2026-01-07T17:38:22.662527Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8708; 2026-01-07T17:38:22.662593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-07T17:38:22.662683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=47; 2026-01-07T17:38:22.662732Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9487; 2026-01-07T17:38:22.662918Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=125; 2026-01-07T17:38:22.663087Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=117; 2026-01-07T17:38:22.663320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=177; 2026-01-07T17:38:22.663535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=161; 2026-01-07T17:38:22.667004Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3398; 2026-01-07T17:38:22.670241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3131; 2026-01-07T17:38:22.670346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-07T17:38:22.670400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-07T17:38:22.670441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:38:22.670518Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2026-01-07T17:38:22.670567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-07T17:38:22.670662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2026-01-07T17:38:22.670730Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-07T17:38:22.670805Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=42; 2026-01-07T17:38:22.670922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=77; 2026-01-07T17:38:22.671030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=61; 2026-01-07T17:38:22.671088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=28961; 2026-01-07T17:38:22.671268Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:38:22.671384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:38:22.671477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:38:22.671583Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:38:22.671647Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:38:22.671801Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:38:22.671888Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:38:22.671934Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:38:22.671983Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:38:22.672053Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:38:22.672101Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:22.672142Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:22.672249Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:38:22.672472Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.143000s; 2026-01-07T17:38:22.675072Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:38:22.677546Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:38:22.677637Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:38:22.677727Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:38:22.677780Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:38:22.677844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:38:22.677927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:38:22.677993Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:22.678050Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:38:22.678144Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:38:22.678236Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:38:22.679242Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.003000s; 2026-01-07T17:38:22.679307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> GenericFederatedQuery::IcebergHadoopTokenSelectAll >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 >> Cdc::ShouldBreakLocksOnConcurrentDropStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2026-01-07T17:38:20.352721Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675399530157153:2186];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:20.352826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:20.737721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:20.737856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:20.769684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:38:20.790624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:20.827384Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:20.830296Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675399530157005:2081] 1767807500337993 != 1767807500337996 2026-01-07T17:38:20.960968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:38:20.996420Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592675399530157267:2105] Handle TEvNavigate describe path dc-1 2026-01-07T17:38:20.996478Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592675399530157571:2268] HANDLE EvNavigateScheme dc-1 2026-01-07T17:38:20.996628Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592675399530157273:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:20.996799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592675399530157464:2208][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592675399530157273:2107], cookie# 1 2026-01-07T17:38:20.999819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675399530157483:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675399530157480:2208], cookie# 1 2026-01-07T17:38:20.999908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675399530157484:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675399530157481:2208], cookie# 1 2026-01-07T17:38:20.999931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675399530157485:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675399530157482:2208], cookie# 1 2026-01-07T17:38:20.999981Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675399530156973:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675399530157483:2208], cookie# 1 2026-01-07T17:38:21.000013Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675399530156976:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675399530157484:2208], cookie# 1 2026-01-07T17:38:21.000031Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675399530156979:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675399530157485:2208], cookie# 1 2026-01-07T17:38:21.000081Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675399530157483:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675399530156973:2049], cookie# 1 2026-01-07T17:38:21.000100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675399530157484:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675399530156976:2052], cookie# 1 2026-01-07T17:38:21.000135Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675399530157485:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675399530156979:2055], cookie# 1 2026-01-07T17:38:21.000204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675399530157464:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675399530157480:2208], cookie# 1 2026-01-07T17:38:21.000242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592675399530157464:2208][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:38:21.000269Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675399530157464:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675399530157481:2208], cookie# 1 2026-01-07T17:38:21.000294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592675399530157464:2208][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:38:21.000332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675399530157464:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675399530157482:2208], cookie# 1 2026-01-07T17:38:21.000362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592675399530157464:2208][/dc-1] Sync cookie mismatch: sender# [1:7592675399530157482:2208], cookie# 1, current cookie# 0 2026-01-07T17:38:21.000432Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592675399530157273:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:38:21.007195Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592675399530157273:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592675399530157464:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:21.007368Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592675399530157273:2107], cacheItem# { Subscriber: { Subscriber: [1:7592675399530157464:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:38:21.010701Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592675403825124868:2269], recipient# [1:7592675399530157571:2268], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:21.010810Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592675399530157571:2268] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:21.044325Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592675399530157571:2268] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:38:21.048074Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592675399530157571:2268] Handle TEvDescribeSchemeResult Forward to# [1:7592675399530157570:2267] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:38:21.049721Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592675399530157267:2105] Handle TEvProposeTransaction 2026-01-07T17:38:21.049752Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592675399530157267:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:38:21.049836Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592675399530157267:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592675403825124873:2273] 2026-01-07T17:38:21.177842Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592675403825124873:2273] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" ... 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [2:7592675417939924710:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1767807504743 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [2:7592675417939924710:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1767807504743 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2026-01-07T17:38:24.820863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 2026-01-07T17:38:24.822413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-01-07T17:38:24.823461Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7592675417939924213:2090] Handle TEvNavigate describe path /dc-1/USER_0 2026-01-07T17:38:24.823492Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7592675417939924780:2364] HANDLE EvNavigateScheme /dc-1/USER_0 2026-01-07T17:38:24.823573Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592675417939924363:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:24.823638Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7592675417939924710:2310][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7592675417939924363:2114], cookie# 10 2026-01-07T17:38:24.823688Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592675417939924714:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7592675417939924711:2310], cookie# 10 2026-01-07T17:38:24.823705Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592675417939924715:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7592675417939924712:2310], cookie# 10 2026-01-07T17:38:24.823719Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592675417939924716:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7592675417939924713:2310], cookie# 10 2026-01-07T17:38:24.823749Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592675417939924064:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7592675417939924714:2310], cookie# 10 2026-01-07T17:38:24.823771Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592675417939924067:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7592675417939924715:2310], cookie# 10 2026-01-07T17:38:24.823788Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592675417939924070:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7592675417939924716:2310], cookie# 10 2026-01-07T17:38:24.823826Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592675417939924714:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7592675417939924064:2049], cookie# 10 2026-01-07T17:38:24.823842Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592675417939924715:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7592675417939924067:2052], cookie# 10 2026-01-07T17:38:24.823858Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592675417939924716:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7592675417939924070:2055], cookie# 10 2026-01-07T17:38:24.823905Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592675417939924710:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7592675417939924711:2310], cookie# 10 2026-01-07T17:38:24.823926Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7592675417939924710:2310][/dc-1/USER_0] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:38:24.823944Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592675417939924710:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7592675417939924712:2310], cookie# 10 2026-01-07T17:38:24.823965Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7592675417939924710:2310][/dc-1/USER_0] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:38:24.824024Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592675417939924710:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7592675417939924713:2310], cookie# 10 2026-01-07T17:38:24.824038Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7592675417939924710:2310][/dc-1/USER_0] Sync cookie mismatch: sender# [2:7592675417939924713:2310], cookie# 10, current cookie# 0 2026-01-07T17:38:24.824101Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7592675417939924363:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-01-07T17:38:24.824164Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7592675417939924363:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7592675417939924710:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1767807504743 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:24.824250Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592675417939924363:2114], cacheItem# { Subscriber: { Subscriber: [2:7592675417939924710:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1767807504743 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2026-01-07T17:38:24.824388Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592675417939924781:2365], recipient# [2:7592675417939924780:2364], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:24.824417Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7592675417939924780:2364] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:24.824479Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7592675417939924780:2364] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2026-01-07T17:38:24.825052Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7592675417939924780:2364] Handle TEvDescribeSchemeResult Forward to# [2:7592675417939924779:2363] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1767807504743 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |87.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy >> TWebLoginService::AuditLogLoginSuccess >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |87.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table [FAIL] >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::InitialScanAndLimits >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:36:55.409163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:55.588071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:36:55.609534Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:36:55.610228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:36:55.610296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:36:56.027953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:56.028109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:56.197471Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807412115537 != 1767807412115541 2026-01-07T17:36:56.222574Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:56.277179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:56.433660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:36:56.588285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:56.661570Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=N2ViZWEyNDMtY2U2MTVkYmMtY2IyMmFmMTgtZTMyOGFlOTA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# N2ViZWEyNDMtY2U2MTVkYmMtY2IyMmFmMTgtZTMyOGFlOTA= temp_dir_name# 0d654a04-413a-426c-a748-71afa3410605 trace_id# 2026-01-07T17:36:56.662416Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=N2ViZWEyNDMtY2U2MTVkYmMtY2IyMmFmMTgtZTMyOGFlOTA=, ActorId: [1:843:2738], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:36:56.663172Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=N2ViZWEyNDMtY2U2MTVkYmMtY2IyMmFmMTgtZTMyOGFlOTA=, ActorId: [1:843:2738], ActorState: ReadyState, LegacyTraceId: 01kecrgyaqbxz823nm62kmpb7q, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpc_actor# [0:0:0] database# database_id# /Root pool_id# default trace_id# 2026-01-07T17:36:56.940524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:846:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:56.940720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:56.941250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:864:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:56.941354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:56.956231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:56.995641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:36:56.996751Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:36:56.999092Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:893:2772] 2026-01-07T17:36:56.999433Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:36:57.079453Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:36:57.080298Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:36:57.080431Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:36:57.082283Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:36:57.082372Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:36:57.082435Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:36:57.082914Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:36:57.083089Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:36:57.083201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:909:2772] in generation 1 2026-01-07T17:36:57.083713Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:36:57.127137Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:36:57.127412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:36:57.127562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:911:2782] 2026-01-07T17:36:57.127616Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:36:57.127657Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:36:57.127699Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:36:57.127973Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:893:2772], Recipient [1:893:2772]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:57.128059Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:57.128483Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:36:57.128601Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:36:57.128709Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:36:57.128772Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:36:57.128831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:36:57.128872Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:36:57.129046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:36:57.129086Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:36:57.129142Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:36:57.129244Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:899:2775], Recipient [1:893:2772]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:57.129284Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:57.129337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:892:2771], serverId# [1:899:2775], sessionId# [0:0:0] 2026-01-07T17:36:57.129436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:899:2775] 2026-01-07T17:36:57.129476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:36:57.129598Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:36:57.129899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:36:57.129996Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:36:57.130128Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:36:57.130187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:36:57.130233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:36:57.130273Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSch ... 8:28.682112Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:38:28.682199Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:38:28.682404Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-07T17:38:28.682515Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2026-01-07T17:38:28.682578Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-07T17:38:28.682616Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2026-01-07T17:38:28.682649Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:38:28.682682Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:38:28.682725Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2026-01-07T17:38:28.682796Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715666] at 72075186224037888 2026-01-07T17:38:28.682838Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-07T17:38:28.682868Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:38:28.682895Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T17:38:28.682924Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit BlockFailPoint 2026-01-07T17:38:28.682953Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-07T17:38:28.682980Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T17:38:28.683007Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-07T17:38:28.683036Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-07T17:38:28.683135Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-07T17:38:28.683300Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2026-01-07T17:38:28.683432Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-07T17:38:28.683537Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-07T17:38:28.683571Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-07T17:38:28.683602Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:38:28.683632Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2026-01-07T17:38:28.683695Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:38:28.683827Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2026-01-07T17:38:28.683867Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:38:28.683902Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:38:28.683933Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:38:28.683996Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-07T17:38:28.684029Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:38:28.684057Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2026-01-07T17:38:28.684135Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:38:28.684174Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2026-01-07T17:38:28.684222Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:38:28.685858Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [13:70:2117], Recipient [13:1104:2925]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2026-01-07T17:38:29.040714Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1169:2969], Recipient [13:1104:2925]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T17:38:29.040996Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:38:29.041112Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2026-01-07T17:38:29.041189Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v600/18446744073709551615 2026-01-07T17:38:29.041311Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-07T17:38:29.041476Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:38:29.041557Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:38:29.041636Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:38:29.041693Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:38:29.041759Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-07T17:38:29.041836Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:38:29.041868Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:38:29.041895Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:38:29.041925Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:38:29.042095Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T17:38:29.042480Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[13:1169:2969], 0} after executionsCount# 1 2026-01-07T17:38:29.042578Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[13:1169:2969], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:38:29.042709Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[13:1169:2969], 0} finished in read 2026-01-07T17:38:29.042811Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:38:29.042847Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:38:29.042880Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:38:29.042911Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:38:29.042972Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:38:29.043000Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:38:29.043032Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-07T17:38:29.043097Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:38:29.043262Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:38:29.045119Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1169:2969], Recipient [13:1104:2925]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:38:29.045220Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |87.3%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CheckAccessCopyTable [GOOD] Test command err: 2026-01-07T17:38:16.765521Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675384054434369:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:16.766306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:17.023657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:17.023788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:17.029730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:17.095596Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:38:17.106397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:17.107834Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675384054434335:2081] 1767807496756134 != 1767807496756137 2026-01-07T17:38:17.263232Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:38:17.288277Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592675384054434598:2105] Handle TEvNavigate describe path dc-1 2026-01-07T17:38:17.288337Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592675388349402198:2267] HANDLE EvNavigateScheme dc-1 2026-01-07T17:38:17.288492Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592675384054434623:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:17.288621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592675384054434776:2197][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592675384054434623:2119], cookie# 1 2026-01-07T17:38:17.290444Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675384054434805:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675384054434802:2197], cookie# 1 2026-01-07T17:38:17.290494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675384054434806:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675384054434803:2197], cookie# 1 2026-01-07T17:38:17.290508Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675384054434807:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675384054434804:2197], cookie# 1 2026-01-07T17:38:17.290536Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675384054434306:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675384054434806:2197], cookie# 1 2026-01-07T17:38:17.290546Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675384054434303:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675384054434805:2197], cookie# 1 2026-01-07T17:38:17.290564Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675384054434309:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675384054434807:2197], cookie# 1 2026-01-07T17:38:17.290630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675384054434806:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675384054434306:2052], cookie# 1 2026-01-07T17:38:17.290681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675384054434805:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675384054434303:2049], cookie# 1 2026-01-07T17:38:17.290726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675384054434807:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675384054434309:2055], cookie# 1 2026-01-07T17:38:17.290814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675384054434776:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675384054434803:2197], cookie# 1 2026-01-07T17:38:17.290844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592675384054434776:2197][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:38:17.290865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675384054434776:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675384054434802:2197], cookie# 1 2026-01-07T17:38:17.290893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592675384054434776:2197][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:38:17.290935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675384054434776:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675384054434804:2197], cookie# 1 2026-01-07T17:38:17.290962Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592675384054434776:2197][/dc-1] Sync cookie mismatch: sender# [1:7592675384054434804:2197], cookie# 1, current cookie# 0 2026-01-07T17:38:17.291012Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592675384054434623:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:38:17.312484Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592675384054434623:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592675384054434776:2197] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:17.312624Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592675384054434623:2119], cacheItem# { Subscriber: { Subscriber: [1:7592675384054434776:2197] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:38:17.315417Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592675388349402199:2268], recipient# [1:7592675388349402198:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:17.315534Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592675388349402198:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:17.342507Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592675388349402198:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:38:17.345384Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592675388349402198:2267] Handle TEvDescribeSchemeResult Forward to# [1:7592675388349402197:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:38:17.352261Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592675384054434598:2105] Handle TEvProposeTransaction 2026-01-07T17:38:17.352294Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592675384054434598:2105] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:38:17.352349Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592675384054434598:2105] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592675388349402204:2272] 2026-01-07T17:38:17.475187Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592675388349402204:2272] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" ... : { Subscriber: [4:7592675426472737568:2233] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:30.270043Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675443652606869:2249], recipient# [4:7592675443652606868:2317], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:30.432696Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7592675422177770058:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:30.432841Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592675422177770058:2110], cacheItem# { Subscriber: { Subscriber: [4:7592675439357639502:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:30.432898Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592675422177770058:2110], cacheItem# { Subscriber: { Subscriber: [4:7592675439357639503:2243] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:30.433058Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675443652606870:2250], recipient# [4:7592675439357639497:2312], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:30.433436Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7592675439357639497:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:38:30.565692Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639502:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7592675439357639505:2242] 2026-01-07T17:38:30.565778Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639502:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.565807Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639502:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7592675439357639506:2242] 2026-01-07T17:38:30.565835Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639502:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.565862Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639502:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7592675439357639507:2242] 2026-01-07T17:38:30.565884Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639502:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.566858Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639504:2244][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7592675439357639511:2244] 2026-01-07T17:38:30.566908Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639504:2244][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.566931Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639504:2244][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7592675439357639512:2244] 2026-01-07T17:38:30.566953Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639504:2244][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.566974Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639504:2244][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7592675439357639513:2244] 2026-01-07T17:38:30.567015Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639504:2244][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.573872Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639503:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7592675439357639518:2243] 2026-01-07T17:38:30.573951Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639503:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.573977Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639503:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7592675439357639519:2243] 2026-01-07T17:38:30.574002Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639503:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.574023Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592675439357639503:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7592675439357639520:2243] 2026-01-07T17:38:30.574046Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592675439357639503:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7592675422177770058:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2026-01-07T17:38:27.563146Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675429491557658:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:27.563218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:27.980996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:27.998007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:27.998160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:28.094790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:28.158244Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:28.193311Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675429491557527:2081] 1767807507528141 != 1767807507528144 2026-01-07T17:38:28.255928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:28.273425Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7dc5c637f480] stream accepted Name# Session ok# true peer# ipv6:[::1]:54144 2026-01-07T17:38:28.274182Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7dc5c637f480] facade attach Name# Session actor# [1:7592675433786525593:2468] peer# ipv6:[::1]:54144 2026-01-07T17:38:28.274214Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7dc5c637f480] facade read Name# Session peer# ipv6:[::1]:54144 2026-01-07T17:38:28.274469Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7dc5c637f480] read finished Name# Session ok# false data# peer# ipv6:[::1]:54144 2026-01-07T17:38:28.274603Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:302: Received TEvReadFinished, success = 0 2026-01-07T17:38:28.274632Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7dc5c637f480] facade finish Name# Session peer# ipv6:[::1]:54144 grpc status# (9) message# Everything is A-OK 2026-01-07T17:38:28.274926Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7dc5c637f480] stream done notification Name# Session ok# true peer# ipv6:[::1]:54144 2026-01-07T17:38:28.274984Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7dc5c637f480] stream finished Name# Session ok# true peer# ipv6:[::1]:54144 grpc status# (9) message# Everything is A-OK 2026-01-07T17:38:28.275003Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7dc5c637f480] deregistering request Name# Session peer# ipv6:[::1]:54144 (finish done) 2026-01-07T17:38:28.277970Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:312: Received TEvNotifiedWhenDone |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query >> TxUsage::WriteToTopic_Demo_24_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNamesCore::NameListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::PrefixListIsUnchanged >> TSchemeShardSysNamesCore::PrefixListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::ExceptionsListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> TxUsage::WriteToTopic_Demo_24_Query >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table [GOOD] >> TxUsage::WriteToTopic_Demo_16_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:48.973931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:48.974028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:48.974089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:48.974133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:48.974175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:48.974206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:48.974275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:48.974363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:48.975321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:48.975633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:49.112696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:49.112791Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:49.113743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:49.130254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:49.131131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:49.131339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:49.161316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:49.161801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:49.162687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.163165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:49.168750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:49.168988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:49.170229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:49.170294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:49.170462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:49.170514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:49.170560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:49.170768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.178471Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:49.306739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:49.307048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.307293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:49.307355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:49.307622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:49.307691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:49.310594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.310837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:49.311068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.311137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:49.311174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:49.311214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:49.313477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.313551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:49.313597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:49.315786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.315841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.315967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.316040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:49.319831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:49.322141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:49.322298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:49.323208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.323340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:49.323382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.323608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:49.323650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.323778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:49.323858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:49.325603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 2 2026-01-07T17:38:33.446002Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-07T17:38:33.446033Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-07T17:38:33.446081Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-07T17:38:33.446108Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2026-01-07T17:38:33.446133Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-01-07T17:38:33.449004Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:38:33.449156Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:38:33.449197Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:38:33.449238Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-07T17:38:33.449279Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-07T17:38:33.450353Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:38:33.450443Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:38:33.450475Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:38:33.450512Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-07T17:38:33.450546Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-07T17:38:33.451425Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:38:33.451535Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:38:33.451574Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:38:33.451608Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2026-01-07T17:38:33.451647Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-07T17:38:33.453100Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:38:33.453195Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:38:33.453227Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:38:33.453260Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-07T17:38:33.453298Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2026-01-07T17:38:33.453375Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-07T17:38:33.457456Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:38:33.457629Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:38:33.457703Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:38:33.457800Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-07T17:38:33.459354Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-07T17:38:33.459402Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-07T17:38:33.461259Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-07T17:38:33.461380Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-07T17:38:33.461419Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [13:2732:4720] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-07T17:38:33.462764Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-07T17:38:33.462807Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-07T17:38:33.462897Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-07T17:38:33.462925Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-07T17:38:33.462983Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-07T17:38:33.463010Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-07T17:38:33.463069Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-07T17:38:33.463116Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-07T17:38:33.463186Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-07T17:38:33.463213Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-07T17:38:33.465094Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-07T17:38:33.465281Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-07T17:38:33.465323Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [13:2735:4723] 2026-01-07T17:38:33.465611Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-07T17:38:33.465810Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-07T17:38:33.465843Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [13:2735:4723] 2026-01-07T17:38:33.465961Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-07T17:38:33.466106Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-07T17:38:33.466204Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-07T17:38:33.466248Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-07T17:38:33.466277Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [13:2735:4723] 2026-01-07T17:38:33.466412Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-07T17:38:33.466443Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [13:2735:4723] 2026-01-07T17:38:33.466517Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-07T17:38:33.466547Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [13:2735:4723] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TxUsage::WriteToTopic_Demo_16_Query >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLogout >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 12257, MsgBus: 61004 2026-01-07T17:34:15.578712Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674348295817364:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:15.578745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:34:15.673916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:34:16.151172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:34:16.188000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:34:16.188105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:34:16.352705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:34:16.487390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:34:16.493935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:34:16.501050Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674348295817336:2081] 1767807255572178 != 1767807255572181 2026-01-07T17:34:16.655859Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:34:16.656519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:34:16.656526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:34:16.656532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:34:16.656611Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:34:17.270381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:34:17.279134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:34:17.354465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.521661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.775576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:17.848150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.166137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674369770655691:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.166260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.166640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674369770655701:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.166689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:20.533521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.581282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674348295817364:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:34:20.581331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:34:20.598032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.630078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.660236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.762971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.836414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.895680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:20.973145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:34:21.117446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674374065623872:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.117562Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.118134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674374065623877:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.118172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674374065623878:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.118224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:34:21.122802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:34:21.165209Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674374065623881:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:34:21.255168Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674374065623932:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... 01-07T17:36:06.106447Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:36:06.118451Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:36:06.196356Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:06.204610Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592674823985329352:2081] 1767807366105446 != 1767807366105449 2026-01-07T17:36:06.247513Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:06.247614Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:06.250030Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:06.282573Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:36:06.282599Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:36:06.282607Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:36:06.282682Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:36:06.333356Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:36:06.587402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:06.596408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:06.653739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:06.798937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:06.866368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:07.146329Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:09.384665Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674836870233113:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.384759Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.385081Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674836870233122:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.385173Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.450547Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.475453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.502840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.528431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.555779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.588492Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.643320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.683654Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:09.756686Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674836870233993:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.756766Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674836870233998:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.756774Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.756920Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592674836870234001:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.756987Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:09.760318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:36:09.771682Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592674836870234000:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:36:09.829701Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592674836870234053:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:36:11.106505Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592674823985329380:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:36:11.106574Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:36:11.353372Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:21.174912Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:36:21.174937Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> TWebLoginService::AuditLogLogout [GOOD] >> TSubDomainTest::GenericCases [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterTable >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table |87.3%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:38:31.475950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:38:31.476055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:38:31.476096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:38:31.476135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:38:31.476174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:38:31.476205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:38:31.476263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:38:31.476340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:38:31.477226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:38:31.477560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:38:31.615071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:38:31.615176Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:31.644513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:38:31.645001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:38:31.645223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:38:31.665181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:38:31.665653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:38:31.666402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:38:31.666695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:38:31.675164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:31.675421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:38:31.677058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:38:31.677143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:31.677298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:38:31.677376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:38:31.677436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:38:31.677654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:38:31.694187Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:38:31.860756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:38:31.861096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:38:31.861318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:38:31.861360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:38:31.861579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:38:31.861651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:31.864982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:38:31.865291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:38:31.865659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:38:31.865773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:38:31.865834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:38:31.865869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:38:31.872103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:38:31.872196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:38:31.872262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:38:31.877438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:38:31.877509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:38:31.877592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:38:31.877651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:38:31.881620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:38:31.885604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:38:31.885830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:38:31.886795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:38:31.886946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:38:31.886985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:38:31.887230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:38:31.887271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:38:31.887415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:38:31.887482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:38:31.889873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:38:31.889927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... done id#101:0 progress is 1/1 2026-01-07T17:38:38.254547Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:38:38.254598Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:38:38.254640Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:38:38.254706Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:38:38.254772Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:38:38.254820Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:38:38.254868Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:38:38.254909Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2026-01-07T17:38:38.254945Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2026-01-07T17:38:38.255276Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [5:273:2263] Bootstrap 2026-01-07T17:38:38.256588Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [5:273:2263] Become StateWork (SchemeCache [5:279:2269]) 2026-01-07T17:38:38.257713Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [5:273:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:38:38.262117Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:38:38.262272Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2026-01-07T17:38:38.262604Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:38:38.262656Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:38:38.262875Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:38.262925Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:38:38.263865Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:38:38.263987Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:38:38.264030Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:38:38.264074Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2026-01-07T17:38:38.264126Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:38:38.264245Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:38:38.264475Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-07T17:38:38.274171Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2026-01-07T17:38:38.274676Z node 5 :HTTP WARN: login_page.cpp:102: 127.0.0.1:0 POST /login 2026-01-07T17:38:38.276865Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:38:38.277009Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-07T17:38:38.506200Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:38:38.521025Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:38:38.521244Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:38:38.521294Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:38:38.521530Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-07T17:38:38.521578Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:38.521621Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2211], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-07T17:38:38.522710Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2026-01-07T17:38:38.523224Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:38:38.523422Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 218us result status StatusSuccess 2026-01-07T17:38:38.523908Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvYvY7qWBD9vMyIBoF6We\nrKUUwICyXkW0j8DE9TvISXySTvnw5A24wK8hVuyXeaDkTawg9WJW8qoTDdiJULcd\n8OTrZPRluTnHnwmSvmFJBU62aibZr2AtP5al3n7IatK4/qOcHwW80IYjRuo+Zn4A\n+KxD8cPv/OPizvMb9WdsiVepVoVQp5zE3aNowB83fdRWmjP0FbjSyHLd1A1yF6Lt\nBHDiTIc4uo/1T2mL1tY8cNNNMvhSsRs1Bz45ZW63pO02Ov6EhmjkSzWdaOBmVJNP\nM13JaeFIi/kC8KPfMFrbp2cK/lx5WuOn1m1aZK5M6A2qVn4v55YQJdhDYzdjPaqo\nGQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767893918500 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:38:38.524481Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2026-01-07T17:38:38.524533Z node 5 :HTTP ERROR: login_page.cpp:326: Logout: No ydb_session_id cookie 2026-01-07T17:38:38.524890Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2026-01-07T17:38:38.525794Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (589A015B): Token is not in correct format 2026-01-07T17:38:38.525863Z node 5 :HTTP ERROR: login_page.cpp:326: Logout: Token is not in correct format 2026-01-07T17:38:38.526265Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2026-01-07T17:38:38.206316Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:38:38.254191Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-07T17:38:38.509955Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY3ODUwNzE4LCJpYXQiOjE3Njc4MDc1MTgsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2026-01-07T17:38:38.527423Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY3ODUwNzE4LCJpYXQiOjE3Njc4MDc1MTgsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2026-01-07T17:38:38.527423Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY3ODUwNzE4LCJpYXQiOjE3Njc4MDc1MTgsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> DstCreator::Basic >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> DstCreator::NonExistentSrc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2026-01-07T17:38:22.071311Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675409692585708:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:22.075790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:22.141348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:22.497052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:38:22.531072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:22.531197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:22.559457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:22.658688Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:22.666581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:38:22.992834Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592675409692585917:2139] Handle TEvNavigate describe path dc-1 2026-01-07T17:38:22.992889Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592675409692586391:2454] HANDLE EvNavigateScheme dc-1 2026-01-07T17:38:22.993042Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592675409692585925:2142], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:22.993155Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592675409692586164:2295][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592675409692585925:2142], cookie# 1 2026-01-07T17:38:22.994739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675409692586195:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675409692586192:2295], cookie# 1 2026-01-07T17:38:22.994778Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675409692586196:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675409692586193:2295], cookie# 1 2026-01-07T17:38:22.994808Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675409692586197:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675409692586194:2295], cookie# 1 2026-01-07T17:38:22.994844Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675405397618274:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675409692586195:2295], cookie# 1 2026-01-07T17:38:22.994872Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675405397618277:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675409692586196:2295], cookie# 1 2026-01-07T17:38:22.994887Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675405397618280:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675409692586197:2295], cookie# 1 2026-01-07T17:38:22.994936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675409692586195:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675405397618274:2051], cookie# 1 2026-01-07T17:38:22.994957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675409692586196:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675405397618277:2054], cookie# 1 2026-01-07T17:38:22.994971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675409692586197:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675405397618280:2057], cookie# 1 2026-01-07T17:38:22.995011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675409692586164:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675409692586192:2295], cookie# 1 2026-01-07T17:38:22.995049Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592675409692586164:2295][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:38:22.995070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675409692586164:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675409692586193:2295], cookie# 1 2026-01-07T17:38:22.995095Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592675409692586164:2295][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:38:22.995117Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675409692586164:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675409692586194:2295], cookie# 1 2026-01-07T17:38:22.995133Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592675409692586164:2295][/dc-1] Sync cookie mismatch: sender# [1:7592675409692586194:2295], cookie# 1, current cookie# 0 2026-01-07T17:38:22.995186Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592675409692585925:2142], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:38:23.016349Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592675409692585925:2142], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592675409692586164:2295] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:23.016516Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592675409692585925:2142], cacheItem# { Subscriber: { Subscriber: [1:7592675409692586164:2295] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:38:23.019295Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592675413987553688:2455], recipient# [1:7592675409692586391:2454], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:23.019384Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592675409692586391:2454] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:23.077670Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592675409692585925:2142], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:23.077743Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592675409692585925:2142], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-07T17:38:23.077957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7592675413987553692:2456][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:38:23.078702Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592675405397618274:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7592675413987553696:2456] 2026-01-07T17:38:23.078719Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592675405397618274:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-07T17:38:23.078803Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592675405397618274:2051] Subscribe: subscriber# [1:7592675413987553696:2456], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:38:23.078841Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592675405397618277:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7592675413987553697:2456] 2026-01-07T17:38:23.078848Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592675405397618277:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-07T17:38:23.078864Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592675405397618277:2054] Subscribe: subscriber# [1:7592675413987553697:2456], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:38:23.078884Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592675405397618280:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7592675413987553698:2456] 2026-01-07T17:38:23.078905Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592675405397618280:2057] Upsert description: path# /dc-1/.metadata/initialization/migrat ... 480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:35.462334Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675465970108250:3046], recipient# [4:7592675465970108249:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:36.101347Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7592675444495270478:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:36.101502Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592675444495270478:2118], cacheItem# { Subscriber: { Subscriber: [4:7592675465970108241:3044] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:36.101601Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675470265075561:3054], recipient# [4:7592675470265075560:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:36.442534Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7592675444495270478:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:36.442690Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592675444495270478:2118], cacheItem# { Subscriber: { Subscriber: [4:7592675448790238434:2570] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:36.442784Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675470265075563:3055], recipient# [4:7592675470265075562:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:36.463205Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7592675444495270478:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:36.463369Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592675444495270478:2118], cacheItem# { Subscriber: { Subscriber: [4:7592675448790238434:2570] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:36.463490Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675470265075565:3056], recipient# [4:7592675470265075564:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:37.189139Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7592675444495270478:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:37.189349Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592675444495270478:2118], cacheItem# { Subscriber: { Subscriber: [4:7592675465970108241:3044] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:37.189478Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675474560042872:3060], recipient# [4:7592675474560042871:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:37.448662Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7592675444495270478:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:37.448809Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592675444495270478:2118], cacheItem# { Subscriber: { Subscriber: [4:7592675448790238434:2570] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:37.448893Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675474560042874:3061], recipient# [4:7592675474560042873:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:37.465820Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7592675444495270478:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:37.465967Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592675444495270478:2118], cacheItem# { Subscriber: { Subscriber: [4:7592675448790238434:2570] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:38:37.466055Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592675474560042876:3062], recipient# [4:7592675474560042875:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |87.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> TKesusTest::TestSessionDetach >> DstCreator::ExistingDst >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |87.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous >> TPQTest::TestSourceIdDropByUserWrites >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin >> TxUsage::WriteToTopic_Demo_11_Query [GOOD] >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> LocalPartition::Restarts [GOOD] >> LocalPartition::WithoutPartitionWithRestart >> TxUsage::WriteToTopic_Demo_43_Query [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn >> SlowTopicAutopartitioning::CDC_Write [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2026-01-07T17:38:41.014200Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:38:41.014307Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:38:41.033898Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:38:41.034870Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:38:41.069961Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:38:41.070511Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=10052654130355965355, session=0, seqNo=0) 2026-01-07T17:38:41.070675Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:38:41.083008Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=10052654130355965355, session=1) 2026-01-07T17:38:41.084636Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:136:2161], cookie=1792310757567881760, session=2) 2026-01-07T17:38:41.084760Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:136:2161], cookie=1792310757567881760) 2026-01-07T17:38:41.085391Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:136:2161], cookie=4878004153098276680 2026-01-07T17:38:41.086667Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=3152234598183372814, session=1, seqNo=0) 2026-01-07T17:38:41.105166Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=3152234598183372814, session=1) 2026-01-07T17:38:41.105601Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:38:41.105770Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:38:41.105865Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:38:41.106056Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:136:2161], cookie=9214283207968098823, session=1) 2026-01-07T17:38:41.116533Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-07T17:38:41.116614Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-07T17:38:41.116684Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-07T17:38:41.130338Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-07T17:38:41.130434Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:136:2161], cookie=9214283207968098823) 2026-01-07T17:38:41.130505Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-07T17:38:41.667001Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:38:41.667124Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:38:41.689274Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:38:41.689613Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:38:41.706292Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:38:41.706708Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:137:2161], cookie=1480010692228362010, path="") 2026-01-07T17:38:41.732624Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:137:2161], cookie=1480010692228362010, status=SUCCESS) 2026-01-07T17:38:41.733444Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:146:2168], cookie=111, session=0, seqNo=0) 2026-01-07T17:38:41.733625Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:38:41.733837Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[2:146:2168], cookie=486521833560152774, session=1) 2026-01-07T17:38:41.746750Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-07T17:38:41.746850Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-07T17:38:41.762876Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:146:2168], cookie=111, session=1) 2026-01-07T17:38:41.762981Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[2:146:2168], cookie=486521833560152774) 2026-01-07T17:38:41.763036Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-07T17:38:42.152557Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:38:42.152686Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:38:42.182294Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:38:42.182509Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:38:42.223840Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:38:42.224408Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=9466234943379799374, session=0, seqNo=0) 2026-01-07T17:38:42.224576Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:38:42.240872Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=9466234943379799374, session=1) 2026-01-07T17:38:42.241740Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:136:2161], cookie=7549873049007201935, session=1) 2026-01-07T17:38:42.241855Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-07T17:38:42.257707Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:136:2161], cookie=7549873049007201935) 2026-01-07T17:38:42.258705Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=15374664418741790311) 2026-01-07T17:38:42.258798Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=15374664418741790311) 2026-01-07T17:38:42.261533Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:157:2179], cookie=16642203591001816565, session=0, seqNo=0) 2026-01-07T17:38:42.261698Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:38:42.274191Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:157:2179], cookie=16642203591001816565, session=2) 2026-01-07T17:38:42.276233Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:136:2161], cookie=8840104311914241495, session=2) 2026-01-07T17:38:42.276390Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 2 2026-01-07T17:38:42.290304Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:136:2161], cookie=8840104311914241495) 2026-01-07T17:38:42.915027Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:38:42.915129Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:38:42.934174Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:38:42.934331Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:38:42.975155Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:38:42.976469Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=12345, session=0, seqNo=0) 2026-01-07T17:38:42.976663Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:38:42.991916Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=12345, session=1) 2026-01-07T17:38:42.993305Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-07T17:38:43.012557Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:144:2166], cookie=23456, session=1) 2026-01-07T17:38:43.423234Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:38:43.423346Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:38:43.443540Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:38:43.443676Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:38:43.479353Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:38:43.480244Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=12345, session=0, seqNo=0) 2026-01-07T17:38:43.480406Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:38:43.493084Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=12345, session=1) 2026-01-07T17:38:43.493843Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-07T17:38:43.506585Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:144:2166], cookie=23456, session=1) |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] >> GenericFederatedQuery::TestConnectorNotConfigured >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> TxUsage::WriteToTopic_Demo_45_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:36:52.474713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:52.585888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:36:52.602631Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:36:52.603132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:36:52.603222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:36:52.888273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:52.888411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:52.982861Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807409682676 != 1767807409682680 2026-01-07T17:36:53.007200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:53.056461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:53.154742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:36:53.522026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:53.537387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:53.673906Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:36:53.674001Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:36:53.674193Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:36:53.839495Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:36:53.839598Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:36:53.840280Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:36:53.840388Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:36:53.840766Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:36:53.840903Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:36:53.841101Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:36:53.841402Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:36:53.843197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:53.844384Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:36:53.844449Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:36:53.892028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:36:53.893361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:36:53.893698Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:36:53.893973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:36:53.948566Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:36:53.949397Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:36:53.949518Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:36:53.951301Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:36:53.951387Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:36:53.951473Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:36:53.951876Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:36:53.952028Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:36:53.952119Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:36:53.952655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:36:54.027581Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:36:54.027811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:36:54.027933Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:36:54.027980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:36:54.028017Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:36:54.028050Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:36:54.028354Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:54.028415Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:54.028785Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:36:54.028976Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:36:54.029080Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:36:54.029131Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:36:54.029173Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:36:54.029212Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:36:54.029243Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:36:54.029278Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:36:54.029321Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:36:54.029475Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:54.029526Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:54.029585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:36:54.030048Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:36:54.030100Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:36:54.030247Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:36:54.030498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:36:54.030557Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:36:54.030650Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... 075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-07T17:38:41.800307Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2026-01-07T17:38:41.800348Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-07T17:38:41.800442Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [13:886:2765], Recipient [13:971:2825]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T17:38:41.800476Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:38:41.800507Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-07T17:38:41.800553Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T17:38:41.800788Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1175:2962], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1880 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976715663 } 2026-01-07T17:38:41.801706Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [13:971:2825], Recipient [13:886:2765]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T17:38:41.801753Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:38:41.801794Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2026-01-07T17:38:41.801865Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T17:38:41.801997Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1175:2962], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:38:41.802524Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:38:41.802882Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1315 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976715663 } 2026-01-07T17:38:41.803641Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:38:41.814357Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-07T17:38:41.814612Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [13:886:2765], Recipient [13:971:2825]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-07T17:38:41.814714Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:38:41.814797Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2026-01-07T17:38:41.857428Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-07T17:38:41.857729Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [13:971:2825], Recipient [13:886:2765]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-07T17:38:41.857779Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:38:41.857824Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2026-01-07T17:38:42.114465Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T17:38:42.114584Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715667 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2026-01-07T17:38:42.120045Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1284:3070], Recipient [13:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T17:38:42.120252Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:38:42.120357Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v5000/18446744073709551615 ImmediateWriteEdge# v5001/0 ImmediateWriteEdgeReplied# v5001/0 2026-01-07T17:38:42.120441Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v5001/18446744073709551615 2026-01-07T17:38:42.120559Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-07T17:38:42.120729Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:38:42.120803Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:38:42.120877Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:38:42.120981Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:38:42.121055Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-07T17:38:42.121130Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:38:42.121166Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:38:42.121197Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:38:42.121226Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:38:42.121408Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T17:38:42.121888Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[13:1284:3070], 0} after executionsCount# 1 2026-01-07T17:38:42.121986Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[13:1284:3070], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:38:42.122119Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[13:1284:3070], 0} finished in read 2026-01-07T17:38:42.122261Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:38:42.122311Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:38:42.122361Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:38:42.122392Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:38:42.122450Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:38:42.122477Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:38:42.122517Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-07T17:38:42.122599Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:38:42.122789Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:38:42.124908Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1284:3070], Recipient [13:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:38:42.125031Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2026-01-07T17:36:29.402867Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674921779002933:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:36:29.403671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:36:29.444571Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:36:29.633137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:36:29.664831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:29.665010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:29.752594Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:29.761554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:29.867002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:36:29.899600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0031ab/r3tmp/yandexRker39.tmp 2026-01-07T17:36:29.899634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0031ab/r3tmp/yandexRker39.tmp 2026-01-07T17:36:29.899816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0031ab/r3tmp/yandexRker39.tmp 2026-01-07T17:36:29.899931Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:36:29.935737Z INFO: TTestServer started on Port 1305 GrpcPort 2091 PQClient connected to localhost:2091 2026-01-07T17:36:30.167868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:30.176785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:36:30.256021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:30.410560Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:32.926991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674934663905731:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:32.927140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:32.929031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674934663905744:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:32.929101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592674934663905745:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:32.929324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:32.933764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:36:32.946662Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592674934663905748:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:36:33.017103Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592674938958873108:2646] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:36:33.333775Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592674938958873116:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:36:33.334230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:33.336621Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmM2YjkyNjQtMzRiMjNiYjYtOTA4MmYwYzktYzJlZWE2OGE=, ActorId: [1:7592674934663905721:2330], ActorState: ExecuteState, LegacyTraceId: 01kecrg74hb5z29gdevjdr5x1p, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:36:33.339119Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:36:33.380256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:33.499899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592674938958873398:2822] 2026-01-07T17:36:34.400630Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592674921779002933:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:36:34.400716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-07T17:36:39.910675Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2026-01-07T17:36:40.185279Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037894][] pipe [1:7592674969023644791:2985] connected; active server actors: 1 2026-01-07T17:36:40.186499Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037894][streamImpl] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:36:40.197348Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037894][streamImpl] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037894 2026-01-07T17:36:40.197513Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037894][streamImpl] BALANCER INIT DONE for streamImpl: (0, 72075186224037893) 2026-01-07T17:36:40.208692Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037894][streamImpl] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 1 2026-01-07T17:36:40.246014Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037892:1][1:7592674969023644863:2479] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:51:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-07T17:36:40.264236Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037894][streamImpl] updating configuration. Deleted partitions []. Added partitions [] >>>>> 2026-01-07T17:36:40.273104Z Start table insert 2026-01-07T17:36:41.343646Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186224037894][streamImpl] TPersQueueReadBalancer::HandleWakeup 2026-01-07T17:36:41.343715 ... BUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 46 2026-01-07T17:38:42.366247Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=57, #merges=0 2026-01-07T17:38:42.366255Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2026-01-07T17:38:42.366276Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2026-01-07T17:38:42.366285Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 44 2026-01-07T17:38:42.366302Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=57, #merges=0 2026-01-07T17:38:42.366311Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2026-01-07T17:38:42.366322Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2026-01-07T17:38:42.366331Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 45 2026-01-07T17:38:42.366348Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=57, #merges=0 2026-01-07T17:38:42.366357Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2026-01-07T17:38:42.366377Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366389Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366407Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366425Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366435Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366453Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366463Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366480Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366534Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366552Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366562Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366579Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.366590Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367118Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367141Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367166Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367178Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367196Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367207Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2026-01-07T17:38:42.367216Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 50 2026-01-07T17:38:42.367239Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=57, #merges=0 2026-01-07T17:38:42.367246Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2026-01-07T17:38:42.367310Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367463Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2026-01-07T17:38:42.367476Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 52 2026-01-07T17:38:42.367496Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=57, #merges=0 2026-01-07T17:38:42.367505Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2026-01-07T17:38:42.367530Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2026-01-07T17:38:42.367538Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 53 2026-01-07T17:38:42.367553Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=57, #merges=0 2026-01-07T17:38:42.367560Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2026-01-07T17:38:42.367570Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2026-01-07T17:38:42.367580Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 54 2026-01-07T17:38:42.367596Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=57, #merges=0 2026-01-07T17:38:42.367604Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2026-01-07T17:38:42.367620Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367631Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367647Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367734Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367828Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367839Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367854Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367865Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367881Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367891Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367906Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367918Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367935Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367946Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.367964Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.368050Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-01-07T17:38:42.369830Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186224037894][streamImpl] Send TEvPeriodicTopicStats PathId: 51 Generation: 1 StatsReportRound: 121 DataSize: 15173955 UsedReserveSize: 0 2026-01-07T17:38:42.369859Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186224037894][streamImpl] ProcessPendingStats. PendingUpdates size 0 >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin >> DstCreator::CannotFindColumn [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system >> TxUsage::WriteToTopic_Demo_24_Query [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddIndex >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood >> DstCreator::EmptyReplicationConfig [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser >> GenericFederatedQuery::IcebergHiveTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> TxUsage::WriteToTopic_Demo_27_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2026-01-07T17:38:39.977556Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675483092232765:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:39.977629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:40.491115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:40.503282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:40.503413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:40.542314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:40.659939Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:40.669013Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675483092232641:2081] 1767807519943140 != 1767807519943143 2026-01-07T17:38:40.776638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:40.889592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:40.889621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:40.889628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:40.889720Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:41.013718Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:41.182319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:41.190338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:38:41.194349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:41.325766Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:38:41.325795Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:38:41.326787Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:38:43.524353Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807521291, tx_id: 281474976710658 } } } 2026-01-07T17:38:43.524808Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:38:43.526521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:43.527383Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-07T17:38:43.527398Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-07T17:38:43.562399Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-07T17:38:43.562434Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:38:44.514953Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:44.515044Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:38:44.661090Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675502386808379:2081] 1767807524360637 != 1767807524360640 2026-01-07T17:38:44.682241Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:44.711760Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:44.711878Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:44.744967Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:44.813173Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:45.096176Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:45.096205Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:45.096211Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:45.096831Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:45.410530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:45.417721Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:45.418535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:38:45.421707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:45.466943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:45.518107Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:38:45.518132Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:38:45.518608Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:38:48.205901Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807525498, tx_id: 281474976710658 } } } 2026-01-07T17:38:48.206242Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:38:48.271754Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:38:48.275812Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767807525547 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:38:48.276184Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2026-01-07T17:38:40.568358Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675485565247889:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:40.568439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:40.820367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:40.820471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:40.868587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:40.929405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:40.931920Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675485565247858:2081] 1767807520564096 != 1767807520564099 2026-01-07T17:38:40.942565Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:41.181781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:41.231370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:41.231404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:41.231417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:41.231542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:41.532156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:41.554479Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:38:41.554520Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:38:41.555357Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:38:41.580560Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:44.135034Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2026-01-07T17:38:44.135101Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2026-01-07T17:38:45.156835Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:45.156985Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:38:45.279353Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:45.279429Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:45.293918Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675508230272519:2081] 1767807525055483 != 1767807525055486 2026-01-07T17:38:45.304556Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:45.306078Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:45.387875Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:45.613737Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:45.613759Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:45.613767Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:45.613841Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:45.869050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:45.873727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:38:45.877940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:45.991828Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:46.061357Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:38:46.061386Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:38:46.065357Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:38:46.082871Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:48.816172Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807525967, tx_id: 281474976710658 } } } 2026-01-07T17:38:48.816583Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:38:48.865434Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:38:48.869203Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767807526079 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:38:48.869548Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2026-01-07T17:38:41.105196Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675490241468207:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:41.105743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:41.441015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:41.441116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:41.452524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:41.474634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:41.492992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:41.496643Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675490241468099:2081] 1767807521093569 != 1767807521093572 2026-01-07T17:38:41.671654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:41.889575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:41.889600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:41.889606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:41.889690Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:42.121147Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:42.212570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:42.218158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:38:42.221729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:42.402909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:42.453424Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:38:42.453452Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:38:42.454025Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:38:44.683563Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807522362, tx_id: 281474976710658 } } } 2026-01-07T17:38:44.685642Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:38:44.728214Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:38:44.732056Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767807522481 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:38:44.733533Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:38:45.497105Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675509016348048:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:45.497248Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:45.546022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:45.670798Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:45.677048Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675509016348025:2081] 1767807525471192 != 1767807525471195 2026-01-07T17:38:45.686225Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:45.686343Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:45.686549Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:45.746204Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:45.842207Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:45.949574Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:45.949603Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:45.949610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:45.949701Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:46.237505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:46.246445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:46.283179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:46.319799Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:38:46.319824Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:38:46.320277Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:38:46.524753Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:48.994495Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807526317, tx_id: 281474976710658 } } } 2026-01-07T17:38:48.994890Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:38:48.996736Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:38:48.997898Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767807526359 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:38:48.998144Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous >> TxUsage::WriteToTopic_Demo_16_Query [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> TSentinelTests::Smoke >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardHalfPile >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] Test command err: Trying to start YDB, gRPC: 8655, MsgBus: 2421 2026-01-07T17:38:29.805703Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675440171694701:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:29.805776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:29.880015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:30.478273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:30.478391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:30.545441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:30.589107Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675440171694475:2081] 1767807509761617 != 1767807509761620 2026-01-07T17:38:30.631753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:30.632742Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:30.868817Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:30.880480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:30.941697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:30.941726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:30.941740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:30.941803Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:31.719275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:38:31.719615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:38:31.721402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:31.722092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:38:31.724553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767807511771, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:38:31.725742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-07T17:38:31.725812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-07T17:38:31.726023Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592675444466662309:2253] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-07T17:38:31.726490Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675440171694443:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:31.726589Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675440171694446:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:31.726659Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675440171694449:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:31.726903Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675444466662258:2225][/Root] Path was updated to new version: owner# [1:7592675440171694769:2121], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:31.727237Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592675444466662309:2253] Ack update: ack to# [1:7592675440171694823:2140], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:38:31.727467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-01-07T17:38:31.728269Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675444466662379:2289][/Root] Path was updated to new version: owner# [1:7592675444466662367:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:31.728521Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675444466662376:2288][/Root] Path was updated to new version: owner# [1:7592675444466662366:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:31.737334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27720, MsgBus: 8522 2026-01-07T17:38:35.296710Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675466444101318:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:35.297070Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:35.361596Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:35.467634Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:35.470350Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675466444101272:2081] 1767807515284342 != 1767807515284345 2026-01-07T17:38:35.470478Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:35.470566Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:35.483655Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:35.550861Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:35.621730Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:35.621760Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:35.621770Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:35.621853Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:36.098240Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:38:36.098556Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:38:36.098595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:36.099249Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:38:36.105073Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767807516146, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:38:36.106467Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-07T17:38:36.106536Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-07T17:38:36.106773Z node 2 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [2:7592675466444101806:2251] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-07T17:38:36.107233Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7592675466444101240:2049] Updat ... : PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:45.249490Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:45.249522Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:45.249533Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:45.249632Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:45.827967Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:45.841554Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:38:45.851670Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:45.888019Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:45.968378Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:46.158413Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:46.245943Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:48.986817Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592675519817476069:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:48.986969Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:48.987315Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592675519817476078:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:48.987370Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:49.349927Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:49.388690Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:49.428626Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:49.471197Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:49.504790Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:49.550617Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:49.593343Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:49.644514Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:49.743666Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592675524112444241:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:49.743811Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:49.744167Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592675524112444247:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:49.744210Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592675524112444246:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:49.744336Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:49.749345Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:38:49.763243Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592675524112444250:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:38:49.794920Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592675502637605064:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:49.795015Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:38:49.818075Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592675524112444301:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:38:51.762548Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-07T17:38:51.776636Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-07T17:38:51.874099Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7592675532702379278:2543], status: GENERIC_ERROR, issues:
: Error: Table metadata loading, code: 1050
:2:17: Error: Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn't exist, please contact internal support 2026-01-07T17:38:51.875869Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=YWJjMmE4OWEtNDNhODE0NTctZTZiZjlhOWMtYWY0NThhMTE=, ActorId: [4:7592675532702379275:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrmes3ef1haht48vfefbqm, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 17 } message: "Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn\'t exist, please contact internal support" end_position { row: 2 column: 17 } severity: 1 } } tx_id# trace_id# |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpScan::ScanRetryReadRanges [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query [GOOD] >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] >> TxUsage::WriteToTopic_Demo_45_Table [GOOD] >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:38:33.671930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:33.812988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:38:33.823834Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:38:33.827852Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:38:33.828404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:38:33.828658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:38:33.838433Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:33.838920Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:38:33.839109Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:38:33.839304Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:38:34.649471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:34.797836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:34.797981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:34.798773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:34.798859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:34.851209Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:38:34.852047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:34.852390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:34.986765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:35.111334Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:38:36.036084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:36.123267Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:36.123425Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:36.515896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:37.491551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1735:3115], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:37.491701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1746:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:37.491811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:37.497255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1751:3125], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:37.497638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:37.507620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:38:38.087545Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1749:3123], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:38:38.248090Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1845:3191] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 2 -- EvScan [1:1924:3244] -> [2:1879:2439] -- EvScanData from [2:1926:2446]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":3,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":true} 2026-01-07T17:38:41.192415Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down ... waiting for SysViewsRoster update finished 2026-01-07T17:38:48.961619Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:48.962659Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:48.965772Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:38:48.977212Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:38:48.978657Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:38:48.980425Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:38:48.980990Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:38:48.981224Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:38:48.983259Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:38:48.983311Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:38:49.336493Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:49.437108Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:49.437277Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:49.438223Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:49.438310Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:49.487241Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:38:49.487847Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:49.488331Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:49.575739Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:49.601925Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:38:50.274715Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:50.331713Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:50.331823Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:50.684319Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:51.287060Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1722:3108], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:51.287174Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1732:3113], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:51.287282Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:51.288518Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1737:3117], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:51.288689Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:51.294309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:38:51.814686Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1736:3116], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:38:51.932602Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1832:3184] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 4 -- EvScan [3:1912:3238] -> [4:1866:2433] -- EvScanData from [4:1915:2440]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":3000,"TxId":281474976715661},"Finished":true} 2026-01-07T17:38:53.418308Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords >> RetryPolicy::RetryWithBatching [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin >> BasicUsage::WriteSessionNoAvailableDatabase >> TxUsage::WriteToTopic_Demo_45_Query >> TSentinelTests::PDiskPileGuardHalfPile [GOOD] >> TSentinelTests::PDiskPileGuardWithoutBridgeMode >> TPopulatorQuorumTest::OneWriteOnlyRingGroup >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> BasicUsage::FallbackToSingleDb >> TPopulatorTest::RemoveDir |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] >> TSentinelTests::PDiskUnknownState [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-07T17:38:57.246125Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-07T17:38:57.251330Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-07T17:38:57.251418Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-07T17:38:57.253413Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-07T17:38:57.253466Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-07T17:38:57.253511Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-07T17:38:57.253584Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-07T17:38:57.253604Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-07T17:38:57.253621Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-07T17:38:57.253680Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-07T17:38:57.253700Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-07T17:38:57.253716Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-07T17:38:57.253745Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2026-01-07T17:38:57.253764Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2026-01-07T17:38:57.253797Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2026-01-07T17:38:57.253837Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2026-01-07T17:38:57.253866Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2026-01-07T17:38:57.253885Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2026-01-07T17:38:57.253927Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2026-01-07T17:38:57.253951Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2026-01-07T17:38:57.253969Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2026-01-07T17:38:57.254046Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-07T17:38:57.254125Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:57.254290Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-07T17:38:57.254354Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:57.254447Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-07T17:38:57.254502Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-07T17:38:57.254640Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:57.254726Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-07T17:38:57.254767Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:57.254839Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-07T17:38:57.254901Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-07T17:38:57.254968Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:57.255048Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-07T17:38:57.255082Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:57.255135Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-07T17:38:57.255174Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-07T17:38:57.255233Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:57.255320Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2026-01-07T17:38:57.255358Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:57.255420Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-07T17:38:57.255470Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2026-01-07T17:38:57.255539Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:57.255606Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2026-01-07T17:38:57.255646Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:57.255700Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2026-01-07T17:38:57.255814Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-07T17:38:57.255882Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2026-01-07T17:38:57.255933Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:57.255984Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:57.256061Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-07T17:38:57.256103Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:57.256144Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-01-07T17:38:57.256177Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2026-01-07T17:38:57.256212Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-07T17:38:57.256238Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:57.256303Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-07T17:38:57.256352Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:57.256390Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-07T17:38:57.256424Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-01-07T17:38:57.256465Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:57.256515Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-07T17:38:57.256545Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:57.256574Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-07T17:38:57.256587Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 2026-01-07T17:38:57.256601Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:57.256676Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-07T17:38:57.256725Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:57.256770Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2026-01-07T17:38:57.256787Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 2026-01-07T17:38:57.256816Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:57.256887Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-07T17:38:57.256940Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:57.256988Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] 2026-01-07T17:38:57.257018Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2026-01-07T17:38:57.257035Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:57.257084Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-07T17:38:57.257155Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:57.257225Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2026-01-07T17:38:57.257254Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2026-01-07T17:38:57.257282Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2026-01-07T17:38:57.257469Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-07T17:38:57.267891Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-01-07T17:38:57.267974Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskUnknownState [GOOD] Test command err: 2026-01-07T17:38:53.861378Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-07T17:38:53.861433Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-07T17:38:53.861486Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:38:53.861515Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-07T17:38:53.861556Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-07T17:38:53.861616Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-07T17:38:53.862736Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-07T17:38:53.869829Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 00 TotalSize: 214748364800 State: Reserved15 } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.935673Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2026-01-07T17:38:55.935799Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.935932Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.935997Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:38:55.936394Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 10:40, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2026-01-07T17:38:55.936464Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-07T17:38:55.936715Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-01-07T17:38:55.936760Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:40 2026-01-07T17:38:55.936845Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:38:55.936888Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:38:55.937022Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-07T17:38:55.937092Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-07T17:38:55.937121Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-07T17:38:55.937149Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-07T17:38:55.937174Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-07T17:38:55.937199Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-07T17:38:55.937221Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-07T17:38:55.937243Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-07T17:38:55.937483Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.938172Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved16 } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.938210Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 16 2026-01-07T17:38:55.938386Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.938541Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.938657Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.938794Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.938937Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.939070Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved17 } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved15 } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:38:55.939095Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 17 2026-01-07T17:38:55.939115Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2026-01-07T17:38:55.939163Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:38:55.939508Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:57, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2026-01-07T17:38:55.939568Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-07T17:38:55.939814Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2026-01-07T17:38:55.939863Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:57 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] Test command err: 2026-01-07T17:35:39.994443Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674707378737952:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:39.994711Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:40.018802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:40.465029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:40.479158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:40.479253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:40.604158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:40.669147Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674707378737852:2081] 1767807339982274 != 1767807339982277 2026-01-07T17:35:40.677760Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:40.736131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:41.038145Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:41.073546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:41.073568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:41.073575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:41.073663Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:41.144232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:41.151560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2026-01-07T17:35:41.169862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:41.202105Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7592674715968673289:2297] 2026-01-07T17:35:41.202434Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:41.241321Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:41.241435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:41.243119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:41.243158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:41.243199Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:41.243621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:41.243675Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:41.243705Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7592674715968673304:2297] in generation 1 2026-01-07T17:35:41.248572Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:41.371200Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:41.371356Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:41.371409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7592674715968673306:2298] 2026-01-07T17:35:41.371419Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:41.371428Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:41.371440Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:41.371623Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:41.371714Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:41.371750Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:41.371772Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:41.371790Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:41.371806Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:41.376588Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592674715968673287:2503], serverId# [1:7592674715968673291:2504], sessionId# [0:0:0] 2026-01-07T17:35:41.376738Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:41.377023Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:41.377139Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-07T17:35:41.385409Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:41.392882Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:41.392993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:41.401178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592674715968673320:2520], serverId# [1:7592674715968673321:2521], sessionId# [0:0:0] 2026-01-07T17:35:41.410876Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1767807341447 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807341447 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:41.410940Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:41.411161Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:41.411247Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:41.411264Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:41.411291Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767807341447:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:41.411635Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1767807341447:281474976710657 keys extracted: 0 2026-01-07T17:35:41.411795Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:41.411897Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:41.411928Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:35:41.421653Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:35:41.422290Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:41.423629Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:35:41.423654Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:41.424179Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1767807341447} 2026-01-07T17:35:41.424219Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:41.424257Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:41.424271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:41.424283Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:41.424326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767807341447 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7592674711673705494:2144], exec latency: 10 ms, propose latency: 12 ms 2026-01-07T17:35:41.424350Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 st ... : 281474976710759] from 72075186224037891 at tablet 72075186224037891 send result to client [25:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:38:54.334788Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037891 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2026-01-07T17:38:54.334961Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T17:38:54.340062Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037891 state Ready 2026-01-07T17:38:54.340221Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2026-01-07T17:38:54.362412Z node 25 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 4500} 2026-01-07T17:38:54.362512Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:38:54.362552Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:38:54.362635Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976710759] from 72075186224037888 at tablet 72075186224037888 send result to client [25:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:38:54.362693Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2026-01-07T17:38:54.362773Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:38:54.366528Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037888 state Ready 2026-01-07T17:38:54.366615Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:38:54.413616Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.413692Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.413738Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.413773Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.413817Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.455831Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.455910Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.455947Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.455985Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.456024Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.487364Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.487467Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.487501Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.487544Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.487576Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.518976Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.519041Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.519068Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.519098Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.519124Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.550760Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.550828Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.550856Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.550895Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.550919Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.738127Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:8] at 72075186224037888 2026-01-07T17:38:54.738324Z node 25 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2026-01-07T17:38:54.738446Z node 25 :TX_DATASHARD INFO: datashard_write_operation.cpp:804: Write transaction 8 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2026-01-07T17:38:54.738750Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 8 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-07T17:38:54.739023Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 8 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-07T17:38:54.739125Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:38:54.739484Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [25:1402:3073], Table: `/Root/Table` ([72057594046644480:38:3]), SessionActorId: [25:1346:3073]Got LOCKS BROKEN for table `/Root/Table`. ShardID=72075186224037888, Sink=[25:1402:3073].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-07T17:38:54.739757Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [25:1396:3073], SessionActorId: [25:1346:3073], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[25:1346:3073]. 2026-01-07T17:38:54.740268Z node 25 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=25&id=NWY3Y2M1OTktNDUxMGM3NzgtZTI1NGNjNS04OTlkMDFlMA==, ActorId: [25:1346:3073], ActorState: ExecuteState, LegacyTraceId: 01kecrmhh9382mkfmgv63hvvbn, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [25:1483:3073] from: [25:1396:3073] trace_id# 2026-01-07T17:38:54.740509Z node 25 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [25:1483:3073] TxId: 281474976715665. Ctx: { TraceId: 01kecrmhh9382mkfmgv63hvvbn, Database: , SessionId: ydb://session/3?node_id=25&id=NWY3Y2M1OTktNDUxMGM3NzgtZTI1NGNjNS04OTlkMDFlMA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:38:54.741168Z node 25 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=25&id=NWY3Y2M1OTktNDUxMGM3NzgtZTI1NGNjNS04OTlkMDFlMA==, ActorId: [25:1346:3073], ActorState: ExecuteState, LegacyTraceId: 01kecrmhh9382mkfmgv63hvvbn, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Table`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:38:54.742492Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:9] at 72075186224037888 2026-01-07T17:38:54.742579Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:9] at 72075186224037888 2026-01-07T17:38:54.742841Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-07T17:38:54.747466Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-07T17:38:54.747603Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-07T17:38:54.748597Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-07T17:38:54.749239Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 139 count 1 last offset 0, current partition end offset: 1 2026-01-07T17:38:54.749343Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-07T17:38:54.749520Z node 25 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 139 accessed 0 times before, last time 1970-01-01T00:00:04.000000Z 2026-01-07T17:38:54.749707Z node 25 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2026-01-07T17:38:54.749893Z node 25 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:38:54.750072Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:38:54.750389Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 121 from pos 0 cbcount 1 2026-01-07T17:38:54.751177Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TPopulatorQuorumTest::OneRingGroup >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> TPopulatorQuorumTest::OneRingGroup [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2026-01-07T17:38:57.549852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:38:57.549917Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... 46678944, LocalPathId: 1] Version: 8 }: sender# [1:18:2065], cookie# 101 2026-01-07T17:38:57.939320Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:97:2125], cookie# 101 2026-01-07T17:38:57.939369Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 101 2026-01-07T17:38:57.939413Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 101 2026-01-07T17:38:57.939465Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 101 2026-01-07T17:38:57.939824Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:98:2126], cookie# 101 2026-01-07T17:38:57.939864Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 8 2026-01-07T17:38:57.940176Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:99:2127], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-07T17:38:57.940868Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:97:2125], cookie# 101 2026-01-07T17:38:57.940995Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2126], cookie# 101 2026-01-07T17:38:57.941031Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2026-01-07T17:38:57.941337Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2127], cookie# 101 2026-01-07T17:38:57.941372Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:38:57.942517Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 447, preserialized size# 65 2026-01-07T17:38:57.942576Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 9 2026-01-07T17:38:57.942719Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-07T17:38:57.942766Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-07T17:38:57.942806Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-07T17:38:57.943060Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000038, drop txId: 101" Path: "/Root/DirB" PathId: 38 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 307, preserialized size# 0 2026-01-07T17:38:57.943092Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 101, is deletion# true, version: 0 2026-01-07T17:38:57.943174Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:12:2059], cookie# 101 2026-01-07T17:38:57.943228Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:15:2062], cookie# 101 2026-01-07T17:38:57.943276Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:18:2065], cookie# 101 2026-01-07T17:38:57.943330Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-07T17:38:57.943365Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-07T17:38:57.943529Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:97:2125], cookie# 101 2026-01-07T17:38:57.943564Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-07T17:38:57.943715Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:98:2126], cookie# 101 2026-01-07T17:38:57.943749Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 9 2026-01-07T17:38:57.943803Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2026-01-07T17:38:57.943848Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2026-01-07T17:38:57.943890Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2026-01-07T17:38:57.943936Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:99:2127], cookie# 101 2026-01-07T17:38:57.944206Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:97:2125], cookie# 101 2026-01-07T17:38:57.944351Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:98:2126], cookie# 101 2026-01-07T17:38:57.944398Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 18446744073709551615 2026-01-07T17:38:57.944639Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:99:2127], cookie# 101 2026-01-07T17:38:57.944676Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2026-01-07T17:32:29.638245Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.638303Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.638356Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:32:29.642314Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:32:29.642378Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.642411Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.643776Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008773s 2026-01-07T17:32:29.644392Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:32:29.644428Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.644451Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.644502Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009373s 2026-01-07T17:32:29.644921Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:32:29.644964Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.645010Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:32:29.645065Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006703s 2026-01-07T17:32:29.661019Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1767807149660978 2026-01-07T17:32:30.217603Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592673895960417267:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:30.219946Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005869s 2026-01-07T17:32:30.218731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:30.298281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:30.325023Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:32:30.349895Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592673896349023646:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:30.349947Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:32:30.392150Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:32:30.392343Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:32:30.773048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:30.844867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:30.937987Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:32:30.973362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:30.973443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:31.001165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:32:31.001236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:32:31.014923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:31.029068Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:32:31.064357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:32:31.189897Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:32:31.221079Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:31.246079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:31.372319Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:32:31.517866Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:32:31.661601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038b1/r3tmp/yandexUTzLZF.tmp 2026-01-07T17:32:31.661627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0038b1/r3tmp/yandexUTzLZF.tmp 2026-01-07T17:32:31.661784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0038b1/r3tmp/yandexUTzLZF.tmp 2026-01-07T17:32:31.661866Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:32:31.769380Z INFO: TTestServer started on Port 18516 GrpcPort 20700 PQClient connected to localhost:20700 2026-01-07T17:32:32.038534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:32:35.211187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592673895960417267:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:35.211277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:35.350318Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592673896349023646:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:32:35.350380Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:32:35.459234Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673917823860448:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.459235Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673917823860457:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.459316Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.459628Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592673917823860464:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.459702Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:32:35.464471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:32:35.488132Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592673917823860463:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:32:35.699371Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592673917823860495:2143] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:32:35.961421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:32:35.966545Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592673917435255110:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:32:35.967281Z node 2 :KQP_COMPILE_ACTOR ... _written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 29 queued_in_partition_duration_ms: 50 } 2026-01-07T17:38:54.030847Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 1 2026-01-07T17:38:54.030895Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 2 2026-01-07T17:38:54.030935Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 3 2026-01-07T17:38:54.030957Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 4 2026-01-07T17:38:54.030982Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 5 2026-01-07T17:38:54.031015Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 6 2026-01-07T17:38:54.031038Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 7 2026-01-07T17:38:54.031068Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 8 2026-01-07T17:38:54.031103Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 9 2026-01-07T17:38:54.031127Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: acknoledged message 10 2026-01-07T17:38:54.031407Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: close. Timeout = 0 ms 2026-01-07T17:38:54.031462Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session will now close 2026-01-07T17:38:54.031517Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: aborting 2026-01-07T17:38:54.033149Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: gracefully shut down, all writes complete 2026-01-07T17:38:54.032948Z node 17 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 suffix '63' size 1208 2026-01-07T17:38:54.033045Z node 17 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 suffix '63' 2026-01-07T17:38:54.033339Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0 grpc read done: success: 0 data: 2026-01-07T17:38:54.033365Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0 grpc read failed 2026-01-07T17:38:54.033401Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0 grpc closed 2026-01-07T17:38:54.033430Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0 is DEAD 2026-01-07T17:38:54.034402Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:38:54.035717Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [17:7592675542846796616:2580] destroyed 2026-01-07T17:38:54.035772Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:38:54.035810Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.035834Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.035858Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.035884Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.035906Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.036596Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session is aborting and will not restart 2026-01-07T17:38:54.036748Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|efa70f99-989b56eb-db22424a-7c392a5_0] Write session: destroy 2026-01-07T17:38:54.044107Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.044147Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.044168Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.044200Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.044222Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.144353Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.144410Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.144429Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.144455Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.144483Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.245157Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.245207Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.245240Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.245277Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.245302Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.345531Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.345578Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.345601Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.345636Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.345658Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.445940Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.447642Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.447692Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.447730Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.447747Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.546267Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.546306Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.546322Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.546348Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.546364Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.646618Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.646667Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.646688Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.646717Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.646737Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:38:54.746989Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:38:54.747038Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.747056Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:38:54.747085Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:38:54.747104Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-07T17:38:58.359560Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:19:2066] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:10:2057] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-07T17:38:58.366591Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:19:2066] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:17:2064], cookie# 12345, event size# 36, preserialized size# 0 2026-01-07T17:38:58.366702Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:19:2066] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-07T17:38:58.368653Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:22:2069] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-07T17:38:58.368717Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:22:2069] Successful handshake: replica# [1:6:2053] 2026-01-07T17:38:58.368761Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:22:2069] Start full sync: replica# [1:6:2053] 2026-01-07T17:38:58.368889Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2026-01-07T17:38:58.368967Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:20:2067] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-07T17:38:58.368991Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:20:2067] Successful handshake: replica# [1:4:2051] 2026-01-07T17:38:58.369024Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:20:2067] Start full sync: replica# [1:4:2051] 2026-01-07T17:38:58.369061Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:21:2068] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-07T17:38:58.369079Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:21:2068] Successful handshake: replica# [1:5:2052] 2026-01-07T17:38:58.369096Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:21:2068] Start full sync: replica# [1:5:2052] 2026-01-07T17:38:58.369154Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:22:2069] 2026-01-07T17:38:58.369239Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-07T17:38:58.369395Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:20:2067] 2026-01-07T17:38:58.369448Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-07T17:38:58.369514Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-07T17:38:58.369592Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:20:2067] 2026-01-07T17:38:58.369654Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-07T17:38:58.369732Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-07T17:38:58.369793Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2026-01-07T17:38:58.369841Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-07T17:38:58.369888Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-07T17:38:58.369946Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:21:2068] 2026-01-07T17:38:58.369977Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-07T17:38:58.370044Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2026-01-07T17:38:58.370094Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-07T17:38:58.370153Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-07T17:38:58.370206Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:20:2067] 2026-01-07T17:38:58.370255Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-07T17:38:58.370304Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:22:2069] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] 2026-01-07T17:38:58.370342Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:20:2067] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-01-07T17:38:58.370386Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:22:2069], cookie# 0 2026-01-07T17:38:58.370434Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:22:2069], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:58.370503Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 0 2026-01-07T17:38:58.370521Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:20:2067], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:58.370567Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2026-01-07T17:38:58.370617Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-07T17:38:58.370708Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:21:2068] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-01-07T17:38:58.370739Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 0 2026-01-07T17:38:58.370758Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:21:2068], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:21:2068], replica: [1:1099535966835:0] populator: [1:22:2069], replica: [1:2199047594611:0] populator: [1:20:2067], replica: [1:24339059:0] 2026-01-07T17:38:58.370896Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:22:2069], cookie# 12345 2026-01-07T17:38:58.381348Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 12345 2026-01-07T17:38:58.381448Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:19:2066] Ack update: ack to# [1:17:2064], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> Cdc::ShouldBreakLocksOnConcurrentAddIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddStream >> TPopulatorQuorumTest::TwoRingGroups >> TPopulatorQuorumTest::TwoRingGroups [GOOD] >> TPopulatorTestWithResets::UpdateAck >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system >> TPopulatorTest::Boot >> BasicUsage::WriteSessionCloseWaitsForWrites >> TPopulatorTest::MakeDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::TwoRingGroups [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-07T17:38:59.421575Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-07T17:38:59.426121Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-07T17:38:59.426210Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-07T17:38:59.428019Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-07T17:38:59.428062Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-07T17:38:59.428097Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-07T17:38:59.428146Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-07T17:38:59.428159Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-07T17:38:59.428173Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-07T17:38:59.428196Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-07T17:38:59.428220Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-07T17:38:59.428240Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-07T17:38:59.428271Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2026-01-07T17:38:59.428288Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2026-01-07T17:38:59.428311Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2026-01-07T17:38:59.428336Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2026-01-07T17:38:59.428352Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2026-01-07T17:38:59.428364Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2026-01-07T17:38:59.428382Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2026-01-07T17:38:59.428402Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2026-01-07T17:38:59.428418Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2026-01-07T17:38:59.428469Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-07T17:38:59.428527Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:59.428644Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-07T17:38:59.428683Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:59.428750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-07T17:38:59.428802Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-07T17:38:59.428869Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:59.428910Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-07T17:38:59.428979Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:59.429022Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-07T17:38:59.429071Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-07T17:38:59.429108Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:59.429166Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-07T17:38:59.429190Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:59.429226Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-07T17:38:59.429249Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-07T17:38:59.429295Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:59.429343Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2026-01-07T17:38:59.429374Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:59.429423Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-07T17:38:59.429450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2026-01-07T17:38:59.429480Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:59.429525Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2026-01-07T17:38:59.429547Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:59.429611Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2026-01-07T17:38:59.429650Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-07T17:38:59.429691Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2026-01-07T17:38:59.429788Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:38:59.429825Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:38:59.429864Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-07T17:38:59.429895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:59.429940Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-01-07T17:38:59.429969Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2026-01-07T17:38:59.429999Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-07T17:38:59.430022Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:59.430080Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-07T17:38:59.430123Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:59.430170Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-07T17:38:59.430190Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-01-07T17:38:59.430214Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:59.430252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-07T17:38:59.430281Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:59.430319Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-07T17:38:59.430336Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 2026-01-07T17:38:59.430350Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:59.430384Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-07T17:38:59.430411Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:59.430456Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2026-01-07T17:38:59.430473Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 2026-01-07T17:38:59.430501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:59.430534Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-07T17:38:59.430559Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:59.430584Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] 2026-01-07T17:38:59.430603Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2026-01-07T17:38:59.430615Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:38:59.430643Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-07T17:38:59.430664Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:38:59.430716Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2026-01-07T17:38:59.430733Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2026-01-07T17:38:59.430747Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2026-01-07T17:38:59.430895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-07T17:38:59.430930Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-01-07T17:38:59.430968Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 12345 2026-01-07T17:38:59.441317Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 12345 2026-01-07T17:38:59.441379Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> TPopulatorTestWithResets::UpdateAck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] Test command err: 2026-01-07T17:38:54.326831Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-07T17:38:54.326888Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-07T17:38:54.326942Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:38:54.326968Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-07T17:38:54.327006Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-07T17:38:54.327075Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-07T17:38:54.328189Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-07T17:38:54.332645Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-07T17:38:58.536044Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-07T17:38:58.536126Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-07T17:38:58.536162Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:38:58.546648Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:38:58.546703Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:38:58.546794Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-07T17:38:58.546855Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-07T17:38:58.546883Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-07T17:38:58.546908Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-07T17:38:58.546945Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-07T17:38:58.546995Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-07T17:38:58.547036Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-07T17:38:58.547066Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-07T17:38:58.547423Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-07T17:38:58.547975Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-07T17:38:58.548170Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-07T17:38:58.548318Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-07T17:38:58.548444Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-07T17:38:58.548557Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-07T17:38:58.548666Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-07T17:38:58.548767Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-07T17:38:58.548814Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:38:58.549264Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:56, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:38:58.549328Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:58, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:38:58.549356Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:57, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:38:58.549387Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:59, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:38:58.549423Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 4 2026-01-07T17:38:58.549701Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2026-01-07T17:38:58.549743Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:56 2026-01-07T17:38:58.549774Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:57 2026-01-07T17:38:58.549796Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:58 2026-01-07T17:38:58.549816Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:59 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TPopulatorTest::Boot [GOOD] >> TPopulatorQuorumTest::OneDisconnectedRingGroup >> BasicUsage::WriteSessionWriteInHandlers >> TPopulatorTest::MakeDir [GOOD] >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2026-01-07T17:39:00.245940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:00.246021Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ulator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 33] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-07T17:39:00.636481Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/resource_pools PathId: [OwnerId: 72057594046678944, LocalPathId: 33] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.636550Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-07T17:39:00.636580Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.636655Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-07T17:39:00.636714Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.636800Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-07T17:39:00.636895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.637039Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-07T17:39:00.637097Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.637191Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-07T17:39:00.637257Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.637325Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-07T17:39:00.637376Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.637488Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-07T17:39:00.637553Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.637658Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-07T17:39:00.637741Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.637867Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-07T17:39:00.637926Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.638003Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-07T17:39:00.638116Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.638213Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-07T17:39:00.638273Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.638338Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-07T17:39:00.638405Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-07T17:39:00.638459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-07T17:39:00.638509Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-07T17:39:00.638590Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-07T17:39:00.638656Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-07T17:39:00.638739Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-07T17:39:00.638791Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-07T17:39:00.638859Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-07T17:39:00.638917Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-07T17:39:00.638966Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-07T17:39:00.638993Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-07T17:39:00.639066Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-07T17:39:00.639117Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:97:2125] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2026-01-07T17:39:00.639156Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:98:2126] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2026-01-07T17:39:00.639188Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-07T17:39:00.639232Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:99:2127] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] TestWaitNotification: OK eventTxId 100 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2026-01-07T17:39:00.354663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:00.354714Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> BasicUsage::WaitEventBlocksBeforeDiscovery |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2026-01-07T17:39:00.576382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:00.576446Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ck to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2026-01-07T17:39:00.935453Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:12:2059], cookie# 100 2026-01-07T17:39:00.935491Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:15:2062], cookie# 100 2026-01-07T17:39:00.935520Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:18:2065], cookie# 100 2026-01-07T17:39:00.935597Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:99:2127], cookie# 100 2026-01-07T17:39:00.935937Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:97:2125], cookie# 100 2026-01-07T17:39:00.936243Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:98:2126], cookie# 100 2026-01-07T17:39:00.936308Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 2 2026-01-07T17:39:00.936346Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:99:2127], cookie# 100 2026-01-07T17:39:00.936363Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000037 FAKE_COORDINATOR: Erasing txId 100 2026-01-07T17:39:00.938329Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 555, preserialized size# 119 2026-01-07T17:39:00.938391Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 7 2026-01-07T17:39:00.938521Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-07T17:39:00.938582Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-07T17:39:00.938619Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-07T17:39:00.938997Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 314, preserialized size# 2 2026-01-07T17:39:00.939047Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 100, is deletion# false, version: 3 2026-01-07T17:39:00.939148Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:12:2059], cookie# 100 2026-01-07T17:39:00.939201Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:15:2062], cookie# 100 2026-01-07T17:39:00.939244Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:18:2065], cookie# 100 2026-01-07T17:39:00.939350Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:97:2125], cookie# 100 2026-01-07T17:39:00.939389Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-07T17:39:00.939437Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-07T17:39:00.939481Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-07T17:39:00.939616Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:98:2126], cookie# 100 2026-01-07T17:39:00.939651Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 7 2026-01-07T17:39:00.939691Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 100 2026-01-07T17:39:00.939727Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 100 2026-01-07T17:39:00.939760Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 100 2026-01-07T17:39:00.940034Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:99:2127], cookie# 100 2026-01-07T17:39:00.940228Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:97:2125], cookie# 100 2026-01-07T17:39:00.940324Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2126], cookie# 100 2026-01-07T17:39:00.940355Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2026-01-07T17:39:00.940485Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2127], cookie# 100 2026-01-07T17:39:00.940502Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-07T17:39:00.961433Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-07T17:39:00.968581Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-07T17:39:00.968701Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-07T17:39:00.971137Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-07T17:39:00.971221Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-07T17:39:00.971273Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-07T17:39:00.971389Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-07T17:39:00.971449Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-07T17:39:00.971471Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-07T17:39:00.971491Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-07T17:39:00.971525Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-07T17:39:00.971545Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-07T17:39:00.971562Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-07T17:39:00.971637Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-07T17:39:00.971742Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:39:00.971902Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-07T17:39:00.971955Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:39:00.972009Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:39:00.972097Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-07T17:39:00.972145Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:39:00.972229Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-07T17:39:00.972343Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-07T17:39:00.972403Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-07T17:39:00.972472Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-07T17:39:00.972555Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-07T17:39:00.972599Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-07T17:39:00.972676Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-07T17:39:00.972722Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-07T17:39:00.972771Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:39:00.972842Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-07T17:39:00.972895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:39:00.972960Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] 2026-01-07T17:39:00.973025Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-01-07T17:39:00.973075Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-07T17:39:00.973115Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:39:00.973193Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-07T17:39:00.973213Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-07T17:39:00.973265Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-07T17:39:00.973304Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-07T17:39:00.973396Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-01-07T17:39:00.973432Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-07T17:39:00.973475Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:31:2078], replica: [1:2199047594611:0] 2026-01-07T17:39:00.973658Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 12345 2026-01-07T17:39:00.984072Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-07T17:39:00.984148Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |87.4%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskFaultyState [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> BasicUsage::GetAllStartPartitionSessions >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] >> BasicUsage::PropagateSessionClosed >> BasicUsage::RetryDiscoveryWithCancel >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:36:49.760835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:49.871095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:36:49.891850Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:36:49.892375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:36:49.892446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:36:50.172246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:50.172335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:50.249648Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807406654760 != 1767807406654764 2026-01-07T17:36:50.259637Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:50.306220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:50.435916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:36:50.767840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:50.781602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:50.903038Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:36:50.903113Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:36:50.903233Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:36:51.056580Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:36:51.056686Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:36:51.057352Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:36:51.057485Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:36:51.057950Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:36:51.058119Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:36:51.058264Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:36:51.058548Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:36:51.060433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:51.061808Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:36:51.061892Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:36:51.122618Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:36:51.123700Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:36:51.124029Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:36:51.124336Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:36:51.182167Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:36:51.182953Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:36:51.183079Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:36:51.184909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:36:51.185066Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:36:51.185133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:36:51.185520Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:36:51.185675Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:36:51.185812Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:36:51.186385Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:36:51.287269Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:36:51.287505Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:36:51.287635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:36:51.287673Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:36:51.287707Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:36:51.287741Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:36:51.287976Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:51.288029Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:51.288377Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:36:51.288482Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:36:51.288581Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:36:51.288642Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:36:51.288692Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:36:51.288728Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:36:51.288759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:36:51.288789Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:36:51.288838Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:36:51.289074Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:51.289113Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:51.289159Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:36:51.289640Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:36:51.289720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:36:51.289911Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:36:51.290207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:36:51.290269Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:36:51.290362Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... ode 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:01.335848Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:01.335906Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:39:01.335932Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:01.335964Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-07T17:39:01.336083Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:39:01.336153Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:01.336237Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2026-01-07T17:39:01.336432Z node 16 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-07T17:39:01.336546Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:01.336941Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [16:1105:2874], Table: `/Root/table` ([72057594046644480:38:1]), SessionActorId: [16:1044:2874]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:1105:2874].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2026-01-07T17:39:01.337170Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [16:1099:2874], SessionActorId: [16:1044:2874], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:1044:2874]. 2026-01-07T17:39:01.337662Z node 16 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=16&id=MWE3ZTc2YzEtNTFkYTJjMDAtOGZiOWU3ZGYtZmE4YTYwMTc=, ActorId: [16:1044:2874], ActorState: ExecuteState, LegacyTraceId: 01kecrmqr6eb58gtgratbj1bv5, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:1100:2874] from: [16:1099:2874] trace_id# 2026-01-07T17:39:01.337878Z node 16 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [16:1100:2874] TxId: 281474976715663. Ctx: { TraceId: 01kecrmqr6eb58gtgratbj1bv5, Database: , SessionId: ydb://session/3?node_id=16&id=MWE3ZTc2YzEtNTFkYTJjMDAtOGZiOWU3ZGYtZmE4YTYwMTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } trace_id# 2026-01-07T17:39:01.338365Z node 16 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=16&id=MWE3ZTc2YzEtNTFkYTJjMDAtOGZiOWU3ZGYtZmE4YTYwMTc=, ActorId: [16:1044:2874], ActorState: ExecuteState, LegacyTraceId: 01kecrmqr6eb58gtgratbj1bv5, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:39:01.339399Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [16:1099:2874], Recipient [16:894:2772]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-07T17:39:01.339441Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-07T17:39:01.339559Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [16:894:2772], Recipient [16:894:2772]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:39:01.339586Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:39:01.339637Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-07T17:39:01.339729Z node 16 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-07T17:39:01.339839Z node 16 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-07T17:39:01.339931Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-07T17:39:01.339966Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:39:01.339992Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-07T17:39:01.340019Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:01.340041Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:01.340096Z node 16 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v501/0 ImmediateWriteEdgeReplied# v501/0 2026-01-07T17:39:01.340163Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-07T17:39:01.340201Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:39:01.340224Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:01.340245Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T17:39:01.340265Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-07T17:39:01.340284Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:39:01.340303Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T17:39:01.340328Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-07T17:39:01.340349Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-07T17:39:01.340372Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-07T17:39:01.340475Z node 16 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-07T17:39:01.340548Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-07T17:39:01.340645Z node 16 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T17:39:01.340719Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:01.340761Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T17:39:01.340860Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T17:39:01.340915Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:01.340966Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-07T17:39:01.340990Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T17:39:01.341016Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:01.341042Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:01.341088Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:39:01.341113Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:01.341135Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-07T17:39:01.341182Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:39:01.341208Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:01.341241Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T17:39:01.341301Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:01.343528Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [16:70:2117], Recipient [16:894:2772]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2026-01-07T17:39:01.346698Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [16:1113:2924], Recipient [16:894:2772]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:01.346787Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:01.346884Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [16:1112:2923], serverId# [16:1113:2924], sessionId# [0:0:0] 2026-01-07T17:39:01.347007Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553224, Sender [16:828:2723], Recipient [16:894:2772]: NKikimr::TEvDataShard::TEvGetOpenTxs |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> BasicUsage::BasicWriteSession >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2026-01-07T17:36:47.180191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:36:47.213104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:36:47.213333Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:36:47.221589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:36:47.221859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:36:47.222105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:36:47.222240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:36:47.222343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:36:47.222445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:36:47.222566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:36:47.222693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:36:47.222819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:36:47.222932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:36:47.223032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:36:47.223187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:36:47.223321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:36:47.257826Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:36:47.258537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:36:47.258604Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:36:47.258792Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:47.258981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:36:47.259052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:36:47.259105Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:36:47.259214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:36:47.259277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:36:47.259339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:36:47.259370Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:36:47.259550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:36:47.259618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:36:47.259659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:36:47.259709Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:36:47.259811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:36:47.259867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:36:47.259933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:36:47.259975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:36:47.260025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:36:47.260127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:36:47.260175Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:36:47.260232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:36:47.260279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:36:47.260316Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:36:47.260625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:36:47.260680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:36:47.260707Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:36:47.260892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:36:47.260953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:36:47.260980Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:36:47.261030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:36:47.261071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:36:47.261109Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:36:47.261151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:36:47.261185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:36:47.261214Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:36:47.261352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:36:47.261407Z node 1 :TX_COLUMNSHARD WAR ... 4 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:38:58.153552Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:38:58.154139Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:38:58.154391Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:38:58.154441Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:38:58.154600Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-07T17:38:58.154678Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-07T17:38:58.154990Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-07T17:38:58.155161Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:38:58.155336Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:38:58.155502Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:38:58.155834Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:38:58.155993Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:38:58.156141Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:38:58.156402Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [54:459:2471] finished for tablet 9437184 2026-01-07T17:38:58.157065Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[54:458:2470];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":132068117,"name":"_full_task","f":132068117,"d_finished":0,"c":0,"l":132081361,"d":13244},"events":[{"name":"bootstrap","f":132068489,"d_finished":1527,"c":1,"l":132070016,"d":1527},{"a":132080709,"name":"ack","f":132079009,"d_finished":1425,"c":1,"l":132080434,"d":2077},{"a":132080693,"name":"processing","f":132070192,"d_finished":3690,"c":3,"l":132080438,"d":4358},{"name":"ProduceResults","f":132069519,"d_finished":2481,"c":6,"l":132081061,"d":2481},{"a":132081066,"name":"Finish","f":132081066,"d_finished":0,"c":0,"l":132081361,"d":295},{"name":"task_result","f":132070211,"d_finished":2199,"c":2,"l":132078475,"d":2199}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:38:58.157149Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:38:58.157698Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[54:458:2470];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":132068117,"name":"_full_task","f":132068117,"d_finished":0,"c":0,"l":132082090,"d":13973},"events":[{"name":"bootstrap","f":132068489,"d_finished":1527,"c":1,"l":132070016,"d":1527},{"a":132080709,"name":"ack","f":132079009,"d_finished":1425,"c":1,"l":132080434,"d":2806},{"a":132080693,"name":"processing","f":132070192,"d_finished":3690,"c":3,"l":132080438,"d":5087},{"name":"ProduceResults","f":132069519,"d_finished":2481,"c":6,"l":132081061,"d":2481},{"a":132081066,"name":"Finish","f":132081066,"d_finished":0,"c":0,"l":132082090,"d":1024},{"name":"task_result","f":132070211,"d_finished":2199,"c":2,"l":132078475,"d":2199}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:38:58.157781Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:38:58.140164Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-07T17:38:58.157827Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:38:58.157994Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table [GOOD] >> Cdc::Alter [GOOD] >> Cdc::DescribeStream >> DataShardOutOfOrder::UncommittedReadSetAck >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:36:55.916862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:56.183958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:36:56.211770Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:36:56.212321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:36:56.212391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:36:56.558083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:56.558228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:56.672464Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807412265052 != 1767807412265056 2026-01-07T17:36:56.700018Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:56.758297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:56.871662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:36:57.320602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:57.337588Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:57.467097Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:36:57.467183Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:36:57.467327Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:36:57.646541Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:36:57.646657Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:36:57.647374Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:36:57.647516Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:36:57.647875Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:36:57.648022Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:36:57.648146Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:36:57.648394Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:36:57.650167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:57.651393Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:36:57.651483Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:36:57.699687Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:36:57.700714Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:36:57.701058Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:36:57.701323Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:36:57.752475Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:36:57.753396Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:36:57.753534Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:36:57.755336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:36:57.755441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:36:57.755505Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:36:57.755894Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:36:57.756057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:36:57.756151Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:36:57.756793Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:36:57.801273Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:36:57.801486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:36:57.801594Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:36:57.801644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:36:57.801677Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:36:57.801723Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:36:57.801966Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:57.802032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:57.802358Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:36:57.802470Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:36:57.802574Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:36:57.802625Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:36:57.802665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:36:57.802710Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:36:57.802742Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:36:57.802774Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:36:57.802817Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:36:57.802916Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:57.802948Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:57.802990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:36:57.803394Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:36:57.803466Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:36:57.803583Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:36:57.803816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:36:57.803884Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:36:57.803956Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... node 13 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2026-01-07T17:39:01.837982Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1265:3068]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:39:02.532228Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T17:39:02.532358Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715671 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2026-01-07T17:39:02.537220Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1840:3545], Recipient [13:994:2849]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2026-01-07T17:39:02.537467Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-07T17:39:02.537562Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v9001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:39:02.537641Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037889 changed HEAD read to non-repeatable v10000/18446744073709551615 2026-01-07T17:39:02.537728Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-07T17:39:02.537905Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:39:02.537988Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-07T17:39:02.538064Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:02.538137Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-07T17:39:02.538205Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-07T17:39:02.538283Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:39:02.538321Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:02.538353Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-07T17:39:02.538383Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-07T17:39:02.538563Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-07T17:39:02.539020Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[13:1840:3545], 0} after executionsCount# 1 2026-01-07T17:39:02.539155Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[13:1840:3545], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-07T17:39:02.539312Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[13:1840:3545], 0} finished in read 2026-01-07T17:39:02.539443Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:39:02.539479Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-07T17:39:02.539511Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:39:02.539543Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:39:02.539598Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:39:02.539625Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:39:02.539667Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-07T17:39:02.539746Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-07T17:39:02.539931Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-07T17:39:02.541948Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1840:3545], Recipient [13:994:2849]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:02.542040Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2026-01-07T17:39:03.106364Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T17:39:03.106487Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715672 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2026-01-07T17:39:03.111271Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1870:3571], Recipient [13:1265:3068]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2026-01-07T17:39:03.111507Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2026-01-07T17:39:03.111602Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v9001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:39:03.111682Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037891 changed HEAD read to non-repeatable v10000/18446744073709551615 2026-01-07T17:39:03.111806Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2026-01-07T17:39:03.112030Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-07T17:39:03.112123Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2026-01-07T17:39:03.112196Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:03.112260Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2026-01-07T17:39:03.112323Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037891 2026-01-07T17:39:03.112384Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-07T17:39:03.112418Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:03.112444Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2026-01-07T17:39:03.112489Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2026-01-07T17:39:03.112652Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-07T17:39:03.113107Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037891 Complete read# {[13:1870:3571], 0} after executionsCount# 1 2026-01-07T17:39:03.113232Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037891 read iterator# {[13:1870:3571], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-07T17:39:03.113371Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037891 read iterator# {[13:1870:3571], 0} finished in read 2026-01-07T17:39:03.113490Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-07T17:39:03.113524Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2026-01-07T17:39:03.113559Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2026-01-07T17:39:03.113589Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2026-01-07T17:39:03.113645Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-07T17:39:03.113670Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2026-01-07T17:39:03.113710Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037891 has finished 2026-01-07T17:39:03.113779Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2026-01-07T17:39:03.113950Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2026-01-07T17:39:03.115747Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1870:3571], Recipient [13:1265:3068]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:03.115848Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser >> DataShardTxOrder::RandomDotRanges_DelayRS >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table >> TxUsage::WriteToTopic_Demo_45_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:36:56.485564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:56.599857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:36:56.618637Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:36:56.619207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:36:56.619269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:36:56.919749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:56.919880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:57.015788Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807413274343 != 1767807413274347 2026-01-07T17:36:57.053823Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:57.106567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:57.223508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:36:57.549589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:57.565296Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:57.673124Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:36:57.673194Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:36:57.673301Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:36:57.867549Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:36:57.867670Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:36:57.868494Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:36:57.868640Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:36:57.876040Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:36:57.876321Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:36:57.876544Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:36:57.877016Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:36:57.879235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:57.880751Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:36:57.880839Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:36:57.935648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:36:57.936865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:36:57.937292Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:36:57.937591Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:36:57.995708Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:36:58.004914Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:36:58.005196Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:36:58.007178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:36:58.007277Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:36:58.007372Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:36:58.007801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:36:58.007988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:36:58.008117Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:36:58.025613Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:36:58.104249Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:36:58.104545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:36:58.104679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:36:58.104728Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:36:58.104773Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:36:58.104813Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:36:58.105133Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:58.105205Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:58.105644Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:36:58.105790Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:36:58.105893Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:36:58.105955Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:36:58.106019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:36:58.106065Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:36:58.106108Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:36:58.106167Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:36:58.106236Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:36:58.106393Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:58.106448Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:58.106509Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:36:58.107077Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:36:58.107140Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:36:58.107277Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:36:58.107565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:36:58.107653Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:36:58.107764Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... efault, DatabaseId: /Root, IsStreamingQuery: 0}. ExecuteDatashardTransaction trace_id_verbosity# 0 trace_id# 2026-01-07T17:39:04.491533Z node 13 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [13:1185:2877] TxId: 281474976715665. Ctx: { TraceId: 01kecrmv207bzgnz48n7x8hdf9, Database: , SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 1 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:39:04.491579Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:146} ActorId: [13:1185:2877] TxId: 281474976715665. Ctx: { TraceId: 01kecrmv207bzgnz48n7x8hdf9, Database: , SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Datashard not finished yet actor_state# WaitResolveState shard_id# 72075186224037888 state# Executing trace_id# 2026-01-07T17:39:04.491625Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [13:1185:2877] TxId: 281474976715665. Ctx: { TraceId: 01kecrmv207bzgnz48n7x8hdf9, Database: , SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), trace_id# 2026-01-07T17:39:04.491666Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [13:1185:2877] TxId: 281474976715665. Ctx: { TraceId: 01kecrmv207bzgnz48n7x8hdf9, Database: , SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-07T17:39:04.491892Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [13:1185:2877], Recipient [13:1154:2961]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1185 RawX2: 55834577725 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010&8\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2026-01-07T17:39:04.491945Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:04.492060Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [13:1154:2961], Recipient [13:1154:2961]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:39:04.492092Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:39:04.492148Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:04.492292Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-07T17:39:04.492361Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2026-01-07T17:39:04.492397Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-07T17:39:04.492447Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2026-01-07T17:39:04.492476Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:04.492505Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:04.492541Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2026-01-07T17:39:04.492583Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715665] at 72075186224037888 2026-01-07T17:39:04.492612Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-07T17:39:04.492636Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:04.492658Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T17:39:04.492684Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit BlockFailPoint 2026-01-07T17:39:04.492707Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-07T17:39:04.492730Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T17:39:04.492752Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-07T17:39:04.492775Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-07T17:39:04.492850Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-07T17:39:04.493005Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2026-01-07T17:39:04.493106Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-07T17:39:04.493183Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-07T17:39:04.493210Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-07T17:39:04.493234Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:39:04.493259Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:04.493313Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:39:04.493409Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2026-01-07T17:39:04.493439Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:39:04.493466Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:04.493492Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:04.493533Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-07T17:39:04.493558Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:04.493581Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2026-01-07T17:39:04.493633Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:39:04.493662Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:04.493698Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:04.493849Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1413} ActorId: [13:1185:2877] TxId: 281474976715665. Ctx: { TraceId: 01kecrmv207bzgnz48n7x8hdf9, Database: , SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037888 status# COMPLETE error# trace_id# 2026-01-07T17:39:04.493974Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [13:1185:2877] TxId: 281474976715665. Ctx: { TraceId: 01kecrmv207bzgnz48n7x8hdf9, Database: , SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:39:04.494078Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [13:1185:2877] TxId: 281474976715665. Ctx: { TraceId: 01kecrmv207bzgnz48n7x8hdf9, Database: , SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:39:04.494269Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, ActorId: [13:1043:2877], ActorState: CleanupState, LegacyTraceId: 01kecrmv207bzgnz48n7x8hdf9, EndCleanup is_final# false trace_id# 2026-01-07T17:39:04.494454Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=13&id=OWY5NmI0Y2YtZDE2OTEzZGEtOWQ1MDVmNzctYjc2MGJkMWQ=, ActorId: [13:1043:2877], ActorState: CleanupState, LegacyTraceId: 01kecrmv207bzgnz48n7x8hdf9, Sent query response back to proxy proxy_request_id# 8 proxy_id# [13:66:2113] trace_id# 2026-01-07T17:39:04.748478Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [13:1194:2987], Recipient [13:1154:2961]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:04.748587Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:04.748671Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [13:1193:2986], serverId# [13:1194:2987], sessionId# [0:0:0] 2026-01-07T17:39:04.748915Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553224, Sender [13:828:2723], Recipient [13:1154:2961]: NKikimr::TEvDataShard::TEvGetOpenTxs |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] >> TxUsage::WriteToTopic_Demo_46_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:36:53.570555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:36:53.710383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:36:53.735603Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:36:53.736145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:36:53.736210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:36:54.176872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:36:54.181304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:36:54.359104Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807410543861 != 1767807410543865 2026-01-07T17:36:54.384359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:36:54.454392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:36:54.547759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:36:54.949827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:36:54.969575Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:36:55.087254Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:36:55.087330Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:36:55.087492Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:36:55.275006Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:36:55.275125Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:36:55.276000Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:36:55.276119Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:36:55.276493Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:36:55.276678Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:36:55.276847Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:36:55.277182Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:36:55.279329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:36:55.280823Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:36:55.280895Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:36:55.314541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:36:55.315689Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:36:55.316013Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:36:55.316312Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:36:55.366254Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:36:55.367035Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:36:55.367157Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:36:55.369044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:36:55.369156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:36:55.369259Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:36:55.369749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:36:55.369911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:36:55.370021Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:36:55.370514Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:36:55.412509Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:36:55.412742Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:36:55.412874Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:36:55.412916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:36:55.412978Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:36:55.413015Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:36:55.413302Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:55.413369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:36:55.413746Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:36:55.413859Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:36:55.413975Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:36:55.414039Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:36:55.414099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:36:55.414149Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:36:55.414191Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:36:55.414230Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:36:55.414286Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:36:55.414470Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:55.414534Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:36:55.414589Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:36:55.415005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:36:55.415065Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:36:55.415210Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:36:55.415501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:36:55.415594Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:36:55.415700Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... ROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [14:1198:2878] TxId: 281474976715665. Ctx: { TraceId: 01kecrmtzy590kck9fx3nqwe7p, Database: , SessionId: ydb://session/3?node_id=14&id=NTgxNDliYmEtYjBkZjExYWItZDlmMDI2Ni1lY2E2OTVkNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-01-07T17:39:04.425411Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=NTgxNDliYmEtYjBkZjExYWItZDlmMDI2Ni1lY2E2OTVkNQ==, ActorId: [14:1047:2878], ActorState: ExecuteState, LegacyTraceId: 01kecrmtzy590kck9fx3nqwe7p, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/table`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-01-07T17:39:04.426348Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [14:1068:2878], Recipient [14:895:2773]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } Op: Rollback } 2026-01-07T17:39:04.426392Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-07T17:39:04.426470Z node 14 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2026-01-07T17:39:04.426510Z node 14 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) ... waiting for blocked lock status ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR cookie 0 ... waiting for blocked lock status (done) 2026-01-07T17:39:04.428445Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 65543, Sender [14:829:2724], Recipient [14:895:2773]: NActors::TEvents::TEvPoison 2026-01-07T17:39:04.429184Z node 14 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2026-01-07T17:39:04.429274Z node 14 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:39:04.449464Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [14:1203:3008], Recipient [14:1205:3009]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:04.456101Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [14:1203:3008], Recipient [14:1205:3009]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:04.456273Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828684, Sender [14:1203:3008], Recipient [14:1205:3009]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:04.460293Z node 14 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1205:3009] 2026-01-07T17:39:04.460698Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:04.467238Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:04.468763Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:04.471814Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:04.471927Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:04.472033Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:04.472711Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:04.473004Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:04.473096Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:04.473194Z node 14 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state PreOffline tabletId 72075186224037888 2026-01-07T17:39:04.473365Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:39:04.473440Z node 14 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:39:04.473624Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [14:1219:3016] 2026-01-07T17:39:04.473698Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:04.473769Z node 14 :TX_DATASHARD INFO: datashard.cpp:1292: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2026-01-07T17:39:04.473831Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:04.474231Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [14:1205:3009], Recipient [14:1205:3009]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:04.474289Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:04.474609Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435075, Sender [14:1205:3009], Recipient [14:1205:3009]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2026-01-07T17:39:04.474654Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3181: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2026-01-07T17:39:04.475177Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [14:26:2073], Recipient [14:1205:3009]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 700} 2026-01-07T17:39:04.475225Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-07T17:39:04.475286Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 700 2026-01-07T17:39:04.475347Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:04.476335Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [14:70:2117], Recipient [14:1205:3009]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2026-01-07T17:39:04.476443Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:04.476509Z node 14 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037888 state 5 2026-01-07T17:39:04.476578Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:04.476848Z node 14 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-01-07T17:39:04.476919Z node 14 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2026-01-07T17:39:04.477020Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2026-01-07T17:39:04.477869Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [14:1205:3009], Recipient [14:1106:2922]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-07T17:39:04.477925Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:39:04.477998Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-07T17:39:04.478132Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-07T17:39:04.478214Z node 14 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 600:281474976715663 at 72075186224037889 2026-01-07T17:39:04.478290Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-07T17:39:04.478382Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-07T17:39:04.478922Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [14:1106:2922], Recipient [14:1205:3009]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-07T17:39:04.478975Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:04.479044Z node 14 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2026-01-07T17:39:04.479137Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:39:04.479326Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [14:26:2073], Recipient [14:1205:3009]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 500 NextReadStep# 700 ReadStep# 700 } 2026-01-07T17:39:04.479373Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-07T17:39:04.479442Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 500 next step 700 2026-01-07T17:39:04.671053Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous >> DataShardScan::ScanFollowedByUpdate >> Cdc::ShouldBreakLocksOnConcurrentAddStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey >> DataShardOutOfOrder::UncommittedReads >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> DataShardOutOfOrder::TestReadTableWriteConflict >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> DataShardScan::ScanFollowedByUpdate [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2026-01-07T17:39:08.690269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:08.690339Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:08.690897Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:08.704037Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:08.704470Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:08.704805Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:08.716696Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:08.752087Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:08.752719Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:08.754404Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:08.754466Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:08.754551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:08.754903Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:08.755089Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:08.755136Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:08.824293Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:08.859667Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:08.859893Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:08.859998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:08.860060Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:08.860105Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:08.860143Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:08.860315Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:08.860387Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:08.860726Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:08.860862Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:08.861027Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:08.861076Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:08.861121Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:08.861158Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:08.861210Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:08.861246Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:08.861294Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:08.861386Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:08.861440Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:08.861501Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:08.868752Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:08.868855Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:08.868988Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:08.869187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:08.869260Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:08.869333Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:08.869388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:08.869442Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:08.869487Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:08.869525Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:08.869851Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:08.869892Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:08.869937Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:08.869986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:08.870060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:08.870090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:08.870125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:08.870157Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:08.870190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:08.882628Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:08.882706Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:08.882744Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:08.882801Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:08.882868Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:08.883405Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:08.883467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:08.883521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:08.883669Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:08.883705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:08.883850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:08.883894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:08.883932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:08.883967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:08.887750Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:08.887830Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:08.888081Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:08.888125Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:08.888189Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:08.888230Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:08.888273Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:08.888333Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:08.888368Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... on::Execute at 9437185 2026-01-07T17:39:12.090073Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:12.090111Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2026-01-07T17:39:12.090154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2026-01-07T17:39:12.090187Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2026-01-07T17:39:12.090233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2026-01-07T17:39:12.090259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2026-01-07T17:39:12.090284Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2026-01-07T17:39:12.090312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2026-01-07T17:39:12.090499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is DelayComplete 2026-01-07T17:39:12.090543Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2026-01-07T17:39:12.090581Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2026-01-07T17:39:12.090611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2026-01-07T17:39:12.090646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2026-01-07T17:39:12.090677Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2026-01-07T17:39:12.090703Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437185 has finished 2026-01-07T17:39:12.090733Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:12.090759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-07T17:39:12.090791Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-07T17:39:12.090821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-07T17:39:12.090982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:460:2402], Recipient [1:460:2402]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:12.091027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:12.091082Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-07T17:39:12.091111Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:12.091137Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2026-01-07T17:39:12.091171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2026-01-07T17:39:12.091213Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2026-01-07T17:39:12.091246Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2026-01-07T17:39:12.091269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2026-01-07T17:39:12.091289Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2026-01-07T17:39:12.091310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2026-01-07T17:39:12.091439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is DelayComplete 2026-01-07T17:39:12.091454Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2026-01-07T17:39:12.091485Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2026-01-07T17:39:12.091503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2026-01-07T17:39:12.091521Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2026-01-07T17:39:12.091534Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2026-01-07T17:39:12.091548Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437186 has finished 2026-01-07T17:39:12.091565Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:12.091588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-07T17:39:12.091607Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-07T17:39:12.091622Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-07T17:39:12.091727Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:240:2232], Recipient [1:240:2232]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:12.091749Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:12.091783Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:12.091810Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:12.091828Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2026-01-07T17:39:12.091841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2026-01-07T17:39:12.091857Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2026-01-07T17:39:12.091881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2026-01-07T17:39:12.091895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2026-01-07T17:39:12.091919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2026-01-07T17:39:12.091936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2026-01-07T17:39:12.092036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is DelayComplete 2026-01-07T17:39:12.092051Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2026-01-07T17:39:12.092073Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2026-01-07T17:39:12.092097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2026-01-07T17:39:12.092127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2026-01-07T17:39:12.092140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2026-01-07T17:39:12.092152Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437184 has finished 2026-01-07T17:39:12.092166Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:12.092178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:12.092195Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:12.092213Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:12.109467Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:39:12.109553Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:39:12.109610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2026-01-07T17:39:12.109687Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 5 ms 2026-01-07T17:39:12.109741Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-07T17:39:12.110343Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:12.110381Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:12.110404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2026-01-07T17:39:12.110440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 5 ms 2026-01-07T17:39:12.110469Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:12.110615Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:12.110639Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:12.110679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2026-01-07T17:39:12.110714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 5 ms 2026-01-07T17:39:12.110739Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous >> TxUsage::WriteToTopic_Demo_27_Table [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:38:32.820600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:32.944506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:38:32.952669Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:38:32.953750Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:38:32.954121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:38:32.954334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:38:32.956397Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:32.956797Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:38:32.956992Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:38:32.957234Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:38:33.646764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:33.796690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:33.796822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:33.797657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:33.797752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:33.851893Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:38:33.852704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:33.853072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:33.956750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:34.107699Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:38:35.268551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:35.401908Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:35.402094Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:35.846929Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:225:2183] Handle TEvProposeTransaction 2026-01-07T17:38:35.847024Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:225:2183] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:38:35.847593Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:225:2183] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1570:3024] 2026-01-07T17:38:36.280084Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:1570:3024] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:38:36.280224Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:1570:3024] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:38:36.289602Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:1570:3024] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:38:36.289771Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:1570:3024] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:38:36.290437Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:1570:3024] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:36.290687Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:1570:3024] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:38:36.290887Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1570:3024] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-07T17:38:36.302867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:38:36.303758Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:1570:3024] txid# 281474976710657 HANDLE EvClientConnected 2026-01-07T17:38:36.322814Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:1570:3024] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-07T17:38:36.322924Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:1570:3024] txid# 281474976710657 SEND to# [1:1498:2982] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-07T17:38:36.519753Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1647:3082] 2026-01-07T17:38:36.520231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:38:36.587941Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1650:3083] 2026-01-07T17:38:36.588246Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:38:36.634255Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:38:36.634637Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:38:36.636988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:38:36.637120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:38:36.637210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:38:36.637696Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:38:36.638201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:38:36.638325Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:1744:3082] in generation 1 2026-01-07T17:38:36.742876Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1654:3086] 2026-01-07T17:38:36.743153Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:38:36.764635Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:38:36.764899Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:38:36.782503Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2026-01-07T17:38:36.782620Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2026-01-07T17:38:36.782678Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2026-01-07T17:38:36.783318Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:38:36.783844Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:38:36.783928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [1:1763:3083] in generation 1 2026-01-07T17:38:36.821559Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:38:36.822130Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:38:36.824198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2026-01-07T17:38:36.824289Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037892 2026-01-07T17:38:36.824342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037892 2026-01-07T17:38:36.824813Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:38:36.826347Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:38:36.826459Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037892 persisting started state actor id [1:1780:3086] in generation 1 2026-01-07T17:38:36.879204Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1746:2411] 2026-01-07T17:38:36.879585Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInit ... tx: { TraceId : 01kecrmz6bfzndkxt15abx05r6. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:39:11.368429Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:2007: SelfId: [5:2182:3385], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kecrmz6bfzndkxt15abx05r6. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll inputs 2026-01-07T17:39:11.368460Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:2022: SelfId: [5:2182:3385], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kecrmz6bfzndkxt15abx05r6. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll sources 2026-01-07T17:39:11.368597Z node 5 :KQP_COMPUTE TRACE: dq_sync_compute_actor_base.h:37: SelfId: [5:2182:3385], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kecrmz6bfzndkxt15abx05r6. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Resume execution, run status: Finished 2026-01-07T17:39:11.368631Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:405: SelfId: [5:2182:3385], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kecrmz6bfzndkxt15abx05r6. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. ProcessOutputsState.Inflight: 0 2026-01-07T17:39:11.368660Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [5:2182:3385], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kecrmz6bfzndkxt15abx05r6. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:39:11.368783Z node 5 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710665;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:39:11.371692Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710665, taskId: 4. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:39:11.372866Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:06.009000Z, after 1.509488s 2026-01-07T17:39:11.373166Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2180:3383] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 228538 Tasks { TaskId: 1 CpuTimeUs: 226991 Tables { TablePath: "/Root/table-1" ReadRows: 54 ReadBytes: 432 } ComputeCpuTimeUs: 40 BuildCpuTimeUs: 226951 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-z2c6jslqle" NodeId: 5 StartTimeMs: 1767807551363 CreateTimeMs: 1767807549691 UpdateTimeMs: 1767807551363 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:11.373214Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2181:3384], CA [5:2182:3385], CA [5:2180:3383], trace_id# 2026-01-07T17:39:11.373511Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2180:3383] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 229269 Tasks { TaskId: 1 CpuTimeUs: 227030 FinishTimeMs: 1767807551363 Tables { TablePath: "/Root/table-1" ReadRows: 54 ReadBytes: 432 } ComputeCpuTimeUs: 79 BuildCpuTimeUs: 226951 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-z2c6jslqle" NodeId: 5 StartTimeMs: 1767807551363 CreateTimeMs: 1767807549691 UpdateTimeMs: 1767807551364 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:11.373544Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2180:3383] 2026-01-07T17:39:11.373576Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2181:3384], CA [5:2182:3385], trace_id# 2026-01-07T17:39:11.373657Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:39:11.373687Z node 5 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:11.373708Z node 5 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:39:11.373745Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:39:11.373905Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2181:3384] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 113780 DurationUs: 14000 Tasks { TaskId: 3 StageId: 1 CpuTimeUs: 110687 FinishTimeMs: 1767807551366 InputRows: 2 InputBytes: 14 ComputeCpuTimeUs: 182 BuildCpuTimeUs: 110505 WaitOutputTimeUs: 3401 HostName: "ghrun-z2c6jslqle" NodeId: 5 StartTimeMs: 1767807551352 CreateTimeMs: 1767807549918 UpdateTimeMs: 1767807551366 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:11.373933Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2181:3384] 2026-01-07T17:39:11.373961Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2182:3385], trace_id# 2026-01-07T17:39:11.374481Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:334} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [5:1934:3244] SeqNo# 1 Rows# 1 trace_id# 2026-01-07T17:39:11.374802Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2182:3385] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 359600 Tasks { TaskId: 4 StageId: 2 CpuTimeUs: 358147 FinishTimeMs: 1767807551368 InputRows: 1 InputBytes: 7 ComputeCpuTimeUs: 129 BuildCpuTimeUs: 358018 HostName: "ghrun-z2c6jslqle" NodeId: 5 StartTimeMs: 1767807551368 CreateTimeMs: 1767807550029 UpdateTimeMs: 1767807551368 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:11.374836Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2182:3385] 2026-01-07T17:39:11.374889Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:39:11.374943Z node 5 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1509} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-07T17:39:11.374989Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [5:2172:3244] TxId: 281474976710665. Ctx: { TraceId: 01kecrmz6bfzndkxt15abx05r6, Database: , SessionId: ydb://session/3?node_id=5&id=MWY3YjY4MTctYThhZDg3ZDItNGFhZjUxY2EtM2U1MDBlZjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.007641s ReadRows: 100 ReadBytes: 800 RequestUnits# 100 ForceFlag# true trace_id# ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } format: FORMAT_VALUE } SeqNo: 1 QueryResultIndex: 0 ChannelId: 4 VirtualTimestamp { Step: 3500 TxId: 281474976710663 } Finished: true 2026-01-07T17:39:11.376169Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 4000 2026-01-07T17:39:11.376229Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037890: waitStep# 4000 readStep# 4000 observedStep# 4000 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 173 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2026-01-07T17:39:01.110042Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1767807541110009 2026-01-07T17:39:01.329839Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675576419196253:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:01.330822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:01.376282Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:01.380871Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675577451275450:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:01.382888Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:01.392735Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:01.568297Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:01.578517Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:01.601628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:01.601733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:01.603526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:01.603600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:01.619182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:01.630866Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:01.634301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:01.689990Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:01.813640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001b74/r3tmp/yandexMYhvWz.tmp 2026-01-07T17:39:01.813703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001b74/r3tmp/yandexMYhvWz.tmp 2026-01-07T17:39:01.813879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001b74/r3tmp/yandexMYhvWz.tmp 2026-01-07T17:39:01.813981Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:01.843799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:01.857035Z INFO: TTestServer started on Port 11320 GrpcPort 29761 2026-01-07T17:39:01.859998Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:29761 2026-01-07T17:39:02.041239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:02.338055Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:02.390591Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:04.314697Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675590336177581:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.314698Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675590336177573:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.314784Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.315109Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675590336177588:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.315154Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.321017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:04.337000Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592675590336177587:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-07T17:39:04.658049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:04.672591Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675590336177619:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:04.682230Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675589304099493:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:04.682699Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzkxOTYyMDEtZTYyYjZhYy1hNWVhMTdlZC01MWI4ZjE5Zg==, ActorId: [1:7592675589304099444:2328], ActorState: ExecuteState, LegacyTraceId: 01kecrmv0xb0pkfbh0m27hmb60, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:04.693904Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:04.698382Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675590336177628:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:04.698745Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MTMxYmFhYWQtZGQzYWQ2YTUtZDE3NmM1ZjAtNjY0MWE0MTY=, ActorId: [2:7592675590336177571:2300], ActorState: ExecuteState, LegacyTraceId: 01kecrmtzqd59ywav946jwrsm4, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:04.699026Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:0 ... E DEBUG: partition_write.cpp:636: [72075186224037892][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-07T17:39:11.719614Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:11.719633Z node 2 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:39:11.719657Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:39:11.719675Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:11.719695Z node 2 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:39:11.719777Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2026-01-07T17:39:11.720586Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2026-01-07T17:39:11.720630Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:39:11.720641Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:39:11.720656Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:11.721068Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 1 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000? size 169 WTime 1767807551720 2026-01-07T17:39:11.721238Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:39:11.721329Z node 2 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2026-01-07T17:39:11.723903Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [2:7592675620400949120:2388] 2026-01-07T17:39:11.723956Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:39:11.723973Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' size 169 2026-01-07T17:39:11.724004Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:39:11.724032Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-07T17:39:11.724059Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2026-01-07T17:39:11.724194Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:11.724212Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:11.724223Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:11.724241Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:11.724256Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:11.724284Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:39:11.724315Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2026-01-07T17:39:11.724520Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2026-01-07T17:39:11.725231Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2026-01-07T17:39:11.725385Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 3000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-07T17:39:11.725419Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2026-01-07T17:39:11.725439Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2026-01-07T17:39:11.725644Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2026-01-07T17:39:11.725777Z :INFO: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2026-01-07T17:39:11.725805Z :INFO: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session will now close 2026-01-07T17:39:11.725851Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session: aborting 2026-01-07T17:39:11.726565Z :WARNING: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2026-01-07T17:39:11.726691Z node 1 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0 grpc read done: success: 0 data: 2026-01-07T17:39:11.726712Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0 grpc read failed 2026-01-07T17:39:11.727045Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 2 sessionId: src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0 2026-01-07T17:39:11.727080Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0 is DEAD 2026-01-07T17:39:11.727161Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-07T17:39:11.727203Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session is aborting and will not restart 2026-01-07T17:39:11.727277Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3ec635e0-62fe664f-3e13b0cb-c2226a5e_0] MessageGroupId [src_id] Write session: destroy 2026-01-07T17:39:11.727423Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:39:11.727694Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7592675619368871597:2464] destroyed 2026-01-07T17:39:11.727732Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:39:11.727760Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:11.727774Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:11.727784Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:11.727800Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:11.727811Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:11.762964Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:11.762993Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:11.763006Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:11.763034Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:11.763044Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:11.863292Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:11.863333Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:11.863347Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:11.863367Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:11.863380Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:12.343779Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [1:7592675623663838935:2468] TxId: 281474976715676. Ctx: { TraceId: 01kecrn2ks8qskj36684467mw3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NzRmYjJkODktODM1MjIyZWMtOGQyNmIwNzgtZGMyNWU1YTY=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 trace_id# 2026-01-07T17:39:12.344366Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592675623663838942:2468], TxId: 281474976715676, task: 3. Ctx: { CheckpointId : . TraceId : 01kecrn2ks8qskj36684467mw3. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NzRmYjJkODktODM1MjIyZWMtOGQyNmIwNzgtZGMyNWU1YTY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [1:7592675623663838935:2468], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_27_Query >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2026-01-07T17:37:02.428307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:37:02.506370Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:37:02.506616Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:37:02.527567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:37:02.527866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:37:02.528335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:37:02.528495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:37:02.528607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:37:02.528741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:37:02.528875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:37:02.532091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:37:02.532307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:37:02.532417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:37:02.532535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:37:02.532666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:37:02.532785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:37:02.604271Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:37:02.605031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:37:02.605102Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:37:02.605305Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:37:02.605494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:37:02.605584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:37:02.605654Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:37:02.605800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:37:02.605913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:37:02.605970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:37:02.606006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:37:02.606211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:37:02.606298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:37:02.606372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:37:02.606408Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:37:02.606513Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:37:02.606593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:37:02.606669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:37:02.606705Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:37:02.606829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:37:02.606876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:37:02.606925Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:37:02.606994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:37:02.607043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:37:02.607076Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:37:02.607332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:37:02.607413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:37:02.607459Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:37:02.607653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:37:02.607720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:37:02.607759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:37:02.607834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:37:02.607890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:37:02.607953Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:37:02.608010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:37:02.608055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:37:02.608090Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:37:02.608254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:37:02.608325Z node 1 :TX_COLUMNSHARD WAR ... 4 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:39:07.433213Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:39:07.433786Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:39:07.434006Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:39:07.434052Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:39:07.434230Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-07T17:39:07.434312Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-07T17:39:07.434598Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-07T17:39:07.434780Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:39:07.434950Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:39:07.435099Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:39:07.435429Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:39:07.435618Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:39:07.435768Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:39:07.436038Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [54:459:2471] finished for tablet 9437184 2026-01-07T17:39:07.436632Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[54:458:2470];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":125990407,"name":"_full_task","f":125990407,"d_finished":0,"c":0,"l":126003634,"d":13227},"events":[{"name":"bootstrap","f":125990892,"d_finished":1399,"c":1,"l":125992291,"d":1399},{"a":126002932,"name":"ack","f":126001290,"d_finished":1367,"c":1,"l":126002657,"d":2069},{"a":126002918,"name":"processing","f":125992521,"d_finished":3608,"c":3,"l":126002661,"d":4324},{"name":"ProduceResults","f":125991868,"d_finished":2441,"c":6,"l":126003321,"d":2441},{"a":126003328,"name":"Finish","f":126003328,"d_finished":0,"c":0,"l":126003634,"d":306},{"name":"task_result","f":125992540,"d_finished":2174,"c":2,"l":126000766,"d":2174}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:39:07.436740Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:39:07.437341Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[54:458:2470];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":125990407,"name":"_full_task","f":125990407,"d_finished":0,"c":0,"l":126004333,"d":13926},"events":[{"name":"bootstrap","f":125990892,"d_finished":1399,"c":1,"l":125992291,"d":1399},{"a":126002932,"name":"ack","f":126001290,"d_finished":1367,"c":1,"l":126002657,"d":2768},{"a":126002918,"name":"processing","f":125992521,"d_finished":3608,"c":3,"l":126002661,"d":5023},{"name":"ProduceResults","f":125991868,"d_finished":2441,"c":6,"l":126003321,"d":2441},{"a":126003328,"name":"Finish","f":126003328,"d_finished":0,"c":0,"l":126004333,"d":1005},{"name":"task_result","f":125992540,"d_finished":2174,"c":2,"l":126000766,"d":2174}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:39:07.437436Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:39:07.420008Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-07T17:39:07.437481Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:39:07.437646Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin >> TxOrderInternals::OperationOrder [GOOD] >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous >> Cdc::DecimalKey [GOOD] >> Cdc::AddColumn |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> DataShardOutOfOrder::UncommittedReads [GOOD] >> DataShardOutOfOrder::TestSnapshotReadPriority >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:14.238848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:14.358182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:14.380387Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:14.380917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:14.381000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:14.661602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:14.661755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:14.750910Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807551425995 != 1767807551425999 2026-01-07T17:39:14.767133Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:14.814697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:14.912464Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:15.182109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:15.194891Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:15.298884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:15.330298Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:15.331102Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:15.331329Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:15.331522Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:15.365056Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:15.365739Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:15.365849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:15.367528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:15.367617Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:15.367662Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:15.367953Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:15.368062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:15.368140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:15.368550Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:15.391877Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:15.392079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:15.392183Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:15.392219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:15.392247Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:15.392281Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:15.392546Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:15.392613Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:15.393021Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:15.393110Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:15.393174Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:15.393208Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:15.393255Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:15.393285Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:15.393344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:15.393368Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:15.393400Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:15.393503Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:15.393530Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:15.393568Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:15.393928Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:39:15.393970Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:15.394074Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:15.394283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:15.394332Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:15.394395Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:15.394453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:15.394517Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:15.394546Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:15.394580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:15.394828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:15.394857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:39:15.394883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:39:15.394907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:15.394949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:39:15.394972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:39:15.395022Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:39:15.395045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:39:15.395065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:15.395690Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:39:15.395742Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:15.395781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:15.395808Z node 1 :TX_DATASHARD T ... 17:39:16.698641Z node 1 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2026-01-07T17:39:16.698691Z node 1 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:38:1] 2026-01-07T17:39:16.698793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-07T17:39:16.698847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:39:16.698882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-07T17:39:16.698946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:16.698992Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:16.699040Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-07T17:39:16.699108Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-07T17:39:16.699143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:39:16.699169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:16.699190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T17:39:16.699210Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-07T17:39:16.699233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:39:16.699252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T17:39:16.699270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-07T17:39:16.699291Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-07T17:39:16.699322Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-07T17:39:16.699397Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-07T17:39:16.699523Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:6] at 72075186224037888, row count=1 2026-01-07T17:39:16.699576Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T17:39:16.699637Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:16.699665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T17:39:16.699714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T17:39:16.699745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:16.699798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-07T17:39:16.699828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T17:39:16.699863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:16.699893Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:16.699949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:39:16.699977Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:16.700006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2026-01-07T17:39:16.844226Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1168:2957], Recipient [1:885:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T17:39:16.844439Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:39:16.844502Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-07T17:39:16.844543Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v3500/18446744073709551615 2026-01-07T17:39:16.844625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-07T17:39:16.844744Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:39:16.844787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:39:16.844823Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:16.844877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:16.844949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-07T17:39:16.844987Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:39:16.845017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:16.845041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:39:16.845064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:39:16.845168Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T17:39:16.845418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayComplete 2026-01-07T17:39:16.845459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:39:16.845499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:16.845534Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:16.845588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:39:16.845621Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:16.845653Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-07T17:39:16.845704Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:39:16.919707Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [1:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 4000 2026-01-07T17:39:16.919829Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [1:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2026-01-07T17:39:17.048427Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:39:17.048504Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:17.048550Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2026-01-07T17:39:17.048656Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:17.048782Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:39:17.048821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:39:17.048872Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1168:2957], 0} after executionsCount# 1 2026-01-07T17:39:17.048974Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1168:2957], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:39:17.049126Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1168:2957], 0} finished in read 2026-01-07T17:39:17.051991Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1168:2957], Recipient [1:885:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:17.052090Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:10.704900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:10.802457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:10.807887Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:10.808564Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:10.808841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:10.808989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:10.810389Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:10.810719Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:10.810842Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:10.810935Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:11.189653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:11.297749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:11.297896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:11.298620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:11.298717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:11.343296Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:11.343866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:11.344117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:11.421827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:11.546611Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:12.091036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:12.155906Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:12.155991Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:12.475991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:12.539125Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:1597:2386], Recipient [2:1623:2399]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:12.543444Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:1597:2386], Recipient [2:1623:2399]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:12.543793Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1623:2399] 2026-01-07T17:39:12.544055Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:12.591344Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:1597:2386], Recipient [2:1623:2399]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:12.598406Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:12.598567Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:12.600191Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:12.600284Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:12.600345Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:12.600720Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:12.600904Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:12.600985Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1647:2399] in generation 1 2026-01-07T17:39:12.603846Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:12.639594Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:12.639776Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:12.639871Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1651:2416] 2026-01-07T17:39:12.639904Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:12.639936Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:12.639965Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:12.640232Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1623:2399], Recipient [2:1623:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:12.640284Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:12.640544Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:12.640652Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:12.640853Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:12.640889Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:12.640953Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:12.641001Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:12.641031Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:12.641061Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:12.641101Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:12.641206Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1620:2397], Recipient [2:1623:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:12.641240Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:12.641287Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1606:3056], serverId# [2:1620:2397], sessionId# [0:0:0] 2026-01-07T17:39:12.641724Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:827:2464], Recipient [2:1620:2397] 2026-01-07T17:39:12.641766Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:12.641894Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:12.642153Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:12.642219Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:12.642319Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-07T17:39:12.642373Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:12.642412Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:12.642451Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:12.642496Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:12.642811Z node 2 :TX_DATASHARD TRACE: datashard_pipelin ... -01-07T17:39:16.335700Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:16.335750Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3502:281474976710670] at 72075186224037888 on unit ExecuteWrite 2026-01-07T17:39:16.335794Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976710670 2026-01-07T17:39:16.335833Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 3 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976710670 2026-01-07T17:39:16.335861Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3502:281474976710670] at 72075186224037888 on unit CompleteWrite 2026-01-07T17:39:16.335906Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:16.335973Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-07T17:39:16.336000Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-07T17:39:16.336022Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-07T17:39:16.336253Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:65:2065] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3502 UnfrozenTablets: 72075186224037888 2026-01-07T17:39:16.336883Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2360:2509], Recipient [2:2488:2539]: {TEvReadSet step# 3502 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2026-01-07T17:39:16.336969Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:39:16.337010Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976710670 2026-01-07T17:39:16.337074Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 3502 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2026-01-07T17:39:16.337986Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2360:2509], Recipient [1:2472:3598] 2026-01-07T17:39:16.338033Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:39:16.338105Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976710670 2026-01-07T17:39:16.338166Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3502 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-07T17:39:16.338229Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-07T17:39:16.338590Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3502} 2026-01-07T17:39:16.338757Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:39:16.338798Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3502:281474976710670] at 72075186224037890 on unit ExecuteWrite 2026-01-07T17:39:16.338835Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037890 from 72075186224037890 to 72075186224037888 txId 281474976710670 2026-01-07T17:39:16.338877Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037890 from 72075186224037890 to 72075186224037889 txId 281474976710670 2026-01-07T17:39:16.338907Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3502:281474976710670] at 72075186224037890 on unit CompleteWrite 2026-01-07T17:39:16.338964Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3502 : 281474976710670] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2581:3624] 2026-01-07T17:39:16.339027Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:39:16.339108Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-07T17:39:16.339521Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2488:2539], Recipient [2:2360:2509]: {TEvReadSet step# 3502 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T17:39:16.339565Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:39:16.339595Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976710670 2026-01-07T17:39:16.339649Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3502 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T17:39:16.339792Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3502 : 281474976710670] from 72075186224037888 at tablet 72075186224037888 send result to client [1:2579:3624] 2026-01-07T17:39:16.340233Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:16.341869Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:65:2065] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3502 UnfrozenTablets: 72075186224037890 2026-01-07T17:39:16.342160Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2488:2539], Recipient [1:2472:3598] 2026-01-07T17:39:16.342207Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:39:16.342254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976710670 2026-01-07T17:39:16.342317Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3502 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-07T17:39:16.342440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3502 : 281474976710670] from 72075186224037889 at tablet 72075186224037889 send result to client [1:2580:3624] 2026-01-07T17:39:16.342979Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:65:2065] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3502} 2026-01-07T17:39:16.343309Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:39:16.352585Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-07T17:39:16.352991Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-07T17:39:16.353310Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2488:2539], Recipient [2:2360:2509]: {TEvReadSet step# 3502 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2026-01-07T17:39:16.353369Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:16.353427Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976710670 2026-01-07T17:39:16.353543Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:39:16.354076Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2420:3557], Recipient [2:2360:2509] 2026-01-07T17:39:16.354121Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:16.354166Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976710670 2026-01-07T17:39:16.354219Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2420:3557], Recipient [2:2488:2539] 2026-01-07T17:39:16.354245Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:16.354275Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976710670 2026-01-07T17:39:16.354365Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2488:2539], Recipient [1:2420:3557] 2026-01-07T17:39:16.354398Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:16.354430Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976710670 2026-01-07T17:39:16.355066Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-07T17:39:16.355157Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2360:2509], Recipient [2:2488:2539]: {TEvReadSet step# 3502 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-07T17:39:16.355205Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:16.355245Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976710670 2026-01-07T17:39:16.355560Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2360:2509], Recipient [1:2420:3557] 2026-01-07T17:39:16.355600Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:16.355636Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976710670 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:07.687125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:07.796752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:07.814737Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:07.815210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:07.815271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:08.089411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:08.089538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:08.159438Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807544986068 != 1767807544986072 2026-01-07T17:39:08.173597Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:08.218912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:08.311165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:08.584511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:08.598065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:08.699757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:08.955606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:09.174589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:09.174725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:09.174801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:09.175898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:09.176100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:09.180688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:09.327081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:39:09.395550Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... performing the first select { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets ... captured readset ... captured readset ... performing an upsert ... performing the second select ... performing the third select ... performing the last upsert and commit 2026-01-07T17:39:11.522279Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NDMwMzhjNjctZjhjYjM4NDYtN2I4YzJhODMtNGEwZDllOTM=, ActorId: [1:1165:2960], ActorState: ExecuteState, LegacyTraceId: 01kecrn1zc42ks3hjsqwpen224, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# ... waiting for SysViewsRoster update finished 2026-01-07T17:39:14.559983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:14.564749Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:14.567217Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:14.567419Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:14.567508Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:14.789089Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:14.789218Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:14.811255Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807551962585 != 1767807551962589 2026-01-07T17:39:14.814213Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:14.856952Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:14.936212Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:15.200316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:15.214074Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:15.315493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:15.337851Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:15.338897Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:15.339191Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-07T17:39:15.339410Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:15.387083Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:15.387938Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:15.388089Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:15.389956Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:15.390057Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:15.390128Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:15.390519Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:15.390689Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:15.390790Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting start ... # 4001 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-07T17:39:16.883676Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:16.883701Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 ... performing the first select 2026-01-07T17:39:17.648185Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1271:3042], Recipient [2:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2026-01-07T17:39:17.648411Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1272:3043], Recipient [2:969:2824]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2026-01-07T17:39:17.648684Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:39:17.648792Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-07T17:39:17.648884Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:39:17.648943Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:39:17.648990Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:17.649038Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:17.649093Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-07T17:39:17.649138Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:39:17.649165Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:17.649190Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:39:17.649212Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:39:17.649338Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-07T17:39:17.649618Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:39:17.649687Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-07T17:39:17.649740Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1271:3042], 0} after executionsCount# 1 2026-01-07T17:39:17.649790Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1271:3042], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:39:17.649874Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1271:3042], 0} finished in read 2026-01-07T17:39:17.649958Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:39:17.649988Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:39:17.650017Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:17.650048Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:17.650094Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:39:17.650118Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:17.650145Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-07T17:39:17.650190Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:39:17.650288Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:39:17.650372Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-07T17:39:17.650420Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-07T17:39:17.650468Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:39:17.650493Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-07T17:39:17.650515Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:17.650538Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-07T17:39:17.650573Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-07T17:39:17.650602Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:39:17.650625Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:17.650646Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-07T17:39:17.650667Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-07T17:39:17.650765Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-07T17:39:17.650948Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:39:17.650989Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-07T17:39:17.651023Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[2:1272:3043], 0} after executionsCount# 1 2026-01-07T17:39:17.651057Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[2:1272:3043], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:39:17.651106Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[2:1272:3043], 0} finished in read 2026-01-07T17:39:17.651151Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:39:17.651178Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-07T17:39:17.651202Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:39:17.651227Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:39:17.651263Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:39:17.651286Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:39:17.651309Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-07T17:39:17.651336Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-07T17:39:17.651397Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-07T17:39:17.651629Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2026-01-07T17:39:17.651890Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:969:2824]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2026-01-07T17:39:17.653936Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1271:3042], Recipient [2:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:17.654005Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-07T17:39:17.654182Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1272:3043], Recipient [2:969:2824]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:17.654216Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser >> TxUsage::WriteToTopic_Demo_46_Table [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:10.320496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:10.408563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:10.426759Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:10.427414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:10.427511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:10.713822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:10.713924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:10.776988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807547509784 != 1767807547509788 2026-01-07T17:39:10.792825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:10.843626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:10.945881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:11.221257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:11.234493Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:11.336979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:11.371074Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:11.372036Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:11.372309Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:11.372535Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:11.409793Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:11.410521Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:11.410633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:11.412172Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:11.412246Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:11.412321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:11.412588Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:11.412699Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:11.412791Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:11.424512Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:11.459569Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:11.459794Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:11.459931Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:11.459978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:11.460026Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:11.460068Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:11.460319Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:11.460384Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:11.460711Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:11.460844Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:11.461124Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:11.461164Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:11.461222Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:11.461265Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:11.461318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:11.461351Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:11.461397Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:11.461517Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:11.461563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:11.461611Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:11.462019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:39:11.462081Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:11.462222Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:11.462411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:11.462464Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:11.462530Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:11.462567Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:11.462594Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:11.462621Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:11.462661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:11.462873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:11.462897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:39:11.462930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:39:11.462953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:11.462998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:39:11.463024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:39:11.463047Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:39:11.463069Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:39:11.463085Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:11.464187Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:39:11.464220Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:11.474968Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... t 72075186224037892 has no attached operations 2026-01-07T17:39:18.189746Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037892 2026-01-07T17:39:18.189771Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-01-07T17:39:18.189809Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-07T17:39:18.190028Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877764, Sender [2:1364:3110], Recipient [2:1221:3007]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:39:18.190063Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3190: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:39:18.190096Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1362:3108], serverId# [2:1364:3110], sessionId# [0:0:0] 2026-01-07T17:39:18.190420Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1221:3007]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2026-01-07T17:39:18.190454Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-07T17:39:18.190485Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 3000 2026-01-07T17:39:18.190524Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-07T17:39:18.190569Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-07T17:39:18.201719Z node 2 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037893 ack snapshot OpId 281474976715665 2026-01-07T17:39:18.201865Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037893 2026-01-07T17:39:18.201993Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:39:18.202074Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037893 2026-01-07T17:39:18.202129Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037893, actorId: [2:1372:3118] 2026-01-07T17:39:18.202159Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037893 2026-01-07T17:39:18.202199Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037893 2026-01-07T17:39:18.202229Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-07T17:39:18.202383Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037888 ack split to schemeshard 281474976715664 2026-01-07T17:39:18.203205Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553157, Sender [2:1228:3011], Recipient [2:970:2824]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2026-01-07T17:39:18.203279Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2026-01-07T17:39:18.203704Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1228:3011], Recipient [2:1228:3011]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:18.203744Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:18.203938Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [2:1363:3109], Recipient [2:970:2824]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1363:3109] ServerId: [2:1365:3111] } 2026-01-07T17:39:18.203975Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-07T17:39:18.204318Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [2:26:2073], Recipient [2:1228:3011]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 3000} 2026-01-07T17:39:18.204352Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-07T17:39:18.204380Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 3000 2026-01-07T17:39:18.204409Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-07T17:39:18.204732Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-07T17:39:18.204772Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:18.204803Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037893 2026-01-07T17:39:18.204832Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037893 has no attached operations 2026-01-07T17:39:18.204861Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037893 2026-01-07T17:39:18.204887Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037893 TxInFly 0 2026-01-07T17:39:18.204948Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-07T17:39:18.205835Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877764, Sender [2:1365:3111], Recipient [2:1228:3011]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:39:18.205881Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3190: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:39:18.205916Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1363:3109], serverId# [2:1365:3111], sessionId# [0:0:0] 2026-01-07T17:39:18.207035Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1228:3011]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2026-01-07T17:39:18.207074Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-07T17:39:18.207108Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 3000 2026-01-07T17:39:18.207150Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037893: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-07T17:39:18.207200Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-07T17:39:18.207829Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553158, Sender [2:400:2399], Recipient [2:887:2766] 2026-01-07T17:39:18.207893Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2026-01-07T17:39:18.210942Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037888 ack split partitioning changed to schemeshard 281474976715664 2026-01-07T17:39:18.211025Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:39:18.211651Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-07T17:39:18.228173Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976715665 2026-01-07T17:39:18.230703Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553158, Sender [2:400:2399], Recipient [2:972:2825] 2026-01-07T17:39:18.230769Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2026-01-07T17:39:18.232737Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2026-01-07T17:39:18.232802Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:39:18.233554Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [2:962:2819], Recipient [2:970:2824]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-07T17:39:18.659827Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [2:1170:2864], Recipient [2:886:2765]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1170 RawX2: 8589937456 } TxBody: " \0008\000`\200\200\200\005j\213\007\010\001\022\314\006\010\001\022\024\n\022\t\222\004\000\000\000\000\000\000\0210\013\000\000\002\000\000\000\032\262\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? L\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2026-01-07T17:39:18.659913Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:18.660030Z node 2 :TX_DATASHARD NOTICE: datashard.cpp:3109: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2026-01-07T17:39:18.660469Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2026-01-07T17:39:18.660989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> TxUsage::WriteToTopic_Demo_46_Query >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2026-01-07T17:39:21.170006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:21.170072Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:21.170605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:21.187708Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:21.188026Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:21.188274Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:21.196234Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:21.230618Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:21.231288Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:21.233074Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:21.233159Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:21.233229Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:21.233575Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:21.233766Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:21.233830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:21.304647Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:21.329952Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:21.330122Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:21.330201Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:21.330246Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:21.330285Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:21.330318Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:21.330452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.330498Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.330703Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:21.330786Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:21.330904Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:21.330940Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:21.330972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:21.330997Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:21.331032Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:21.331059Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:21.331096Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:21.331164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.331206Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.331273Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:21.337862Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:21.337925Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:21.338014Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:21.338154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:21.338232Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:21.338286Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:21.338341Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:21.338372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:21.338403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:21.338437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:21.338700Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:21.338743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:21.338794Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:21.338829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:21.338965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:21.339001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:21.339032Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:21.339066Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:21.339092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:21.351421Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:21.351498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:21.351532Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:21.351588Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:21.351662Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:21.352189Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.352254Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.352321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:21.352465Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:21.352498Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:21.352670Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:21.352734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:21.352769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:21.352799Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:21.356494Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:21.356556Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:21.356775Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.356818Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.356871Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:21.356911Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:21.356984Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:21.357022Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:21.357060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 39:22.346313Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.346459Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:39:22.346494Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:39:22.346517Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:39:22.346542Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:22.346568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2026-01-07T17:39:22.346610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:39:22.346654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-07T17:39:22.346679Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.346904Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:22.346936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2026-01-07T17:39:22.346993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:39:22.347055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-07T17:39:22.347082Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.347280Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:22.347309Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2026-01-07T17:39:22.347346Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:39:22.347397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-07T17:39:22.347425Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.347597Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:22.347631Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:14] at 9437184 on unit FinishPropose 2026-01-07T17:39:22.347678Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-07T17:39:22.347764Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.347909Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:22.347943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2026-01-07T17:39:22.347971Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:39:22.348014Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-07T17:39:22.348054Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.348246Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:22.348266Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2026-01-07T17:39:22.348292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:39:22.348332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-07T17:39:22.348374Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.348556Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:22.348592Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2026-01-07T17:39:22.348636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:22.348658Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.348747Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:39:22.348766Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:22.348790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2026-01-07T17:39:22.348811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:39:22.348836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-07T17:39:22.348851Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:22.349043Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:350:2317]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-07T17:39:22.349087Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:22.349118Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2026-01-07T17:39:22.349220Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:350:2317]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-07T17:39:22.349250Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:22.349277Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2026-01-07T17:39:22.349327Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:350:2317]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-07T17:39:22.349344Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:22.349359Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2026-01-07T17:39:22.349398Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:350:2317]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-07T17:39:22.349420Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:22.349434Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2026-01-07T17:39:22.349478Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:350:2317]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-07T17:39:22.349507Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:22.349521Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2026-01-07T17:39:22.349582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:350:2317]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-07T17:39:22.349605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:22.349642Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2026-01-07T17:39:22.349691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:350:2317]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-07T17:39:22.349714Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:22.349732Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2026-01-07T17:39:22.349765Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:350:2317]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-07T17:39:22.349779Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:22.349794Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2026-01-07T17:39:15.712216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:15.712261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:15.712669Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:15.723220Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:15.723554Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:15.723797Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:15.733829Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:15.775294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:15.775865Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:15.777409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:15.777532Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:15.777599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:15.777936Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:15.778138Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:15.778193Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:15.849281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:15.880922Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:15.881112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:15.881205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:15.881263Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:15.881301Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:15.881333Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:15.881472Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:15.881531Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:15.881789Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:15.881883Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:15.882006Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:15.882044Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:15.882086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:15.882121Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:15.882165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:15.882198Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:15.882232Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:15.882307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:15.882342Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:15.882398Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:15.888615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:15.888673Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:15.888790Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:15.888953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:15.889022Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:15.889075Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:15.889126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:15.889158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:15.889206Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:15.889235Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:15.889492Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:15.889535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:15.889573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:15.889601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:15.889662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:15.889693Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:15.889723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:15.889751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:15.889772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:15.901797Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:15.901858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:15.901890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:15.901943Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:15.901999Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:15.902444Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:15.902503Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:15.902551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:15.902672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:15.902701Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:15.902820Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:15.902856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:15.902890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:15.902922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:15.906545Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:15.906606Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:15.906834Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:15.906870Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:15.906919Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:15.906957Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:15.906987Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:15.907027Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:15.907064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:39:21.820080Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.820116Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-07T17:39:21.820202Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:21.820261Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2026-01-07T17:39:21.820328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:21.820380Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-07T17:39:21.820417Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:21.820544Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:21.820578Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:21.820603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2026-01-07T17:39:21.820634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:21.820689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-07T17:39:21.820714Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:21.820817Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:21.820851Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:21.820897Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:21.820961Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:21.820988Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2026-01-07T17:39:21.821029Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:21.821070Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-07T17:39:21.821098Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:21.821212Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:21.821234Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:21.821258Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2026-01-07T17:39:21.821293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:21.821332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-07T17:39:21.821363Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:21.821507Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:21.821532Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2026-01-07T17:39:21.821567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:21.821609Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-07T17:39:21.821633Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:21.821726Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:21.821748Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-07T17:39:21.821777Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:21.821817Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:39:21.821842Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:21.821937Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:21.821984Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:39:21.822022Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:21.822098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:39:21.822122Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:21.822362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-07T17:39:21.822399Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.822432Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-07T17:39:21.822513Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:39:21.822542Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.822566Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-07T17:39:21.822642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-07T17:39:21.822667Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.822749Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-07T17:39:21.822852Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-07T17:39:21.822880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.822903Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-07T17:39:21.822983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-07T17:39:21.823023Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.823047Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-07T17:39:21.823131Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-07T17:39:21.823162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.823185Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-07T17:39:21.823256Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:39:21.823280Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.823313Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-07T17:39:21.823386Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:39:21.823429Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:21.823459Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2026-01-07T17:38:30.237012Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675442624033726:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:30.242015Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:30.294790Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:30.538505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:30.538621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:30.542958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:30.585995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:30.640800Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675442624033703:2081] 1767807510225280 != 1767807510225283 2026-01-07T17:38:30.649161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:30.727008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:30.727041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:30.727048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:30.727143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:30.769003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:30.845336Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:38:30.849475Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:38:30.849506Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:38:30.850469Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:5448, port: 5448 2026-01-07T17:38:30.851267Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:38:30.884857Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:38:30.934169Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****NFHQ (ADEA698E) () has now valid token of ldapuser@ldap 2026-01-07T17:38:34.112038Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675460676484203:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:34.113420Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:34.156836Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:34.247285Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:34.247371Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:34.265308Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:34.285116Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:34.561065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:34.586890Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:34.590063Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:34.590083Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:34.590212Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:34.780450Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:38:34.784918Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:38:34.784977Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:38:34.785589Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:32252, port: 32252 2026-01-07T17:38:34.785678Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:38:34.810914Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:38:34.857426Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:38:34.858255Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:32252 return no entries 2026-01-07T17:38:34.858765Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****-Ezg (D87ABE92) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:32252 return no entries)' 2026-01-07T17:38:38.257657Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592675477788321154:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:38.257692Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:38.467865Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:38.472003Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:38.472079Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:38.472854Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:38.478995Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592675477788321129:2081] 1767807518231301 != 1767807518231304 2026-01-07T17:38:38.503245Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:38.630361Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:38.630391Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:38.630399Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:38.630476Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:38.690976Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:38.817070Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:38:38.819943Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:38:38.819974Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:38:38.820623Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14461, port: 14461 2026-01-07T17:38:38.820681Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:38:38.833693Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:38:38.877490Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:38:38.923024Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:38:38.923578Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:38:38.923625Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:38:38.971247Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:38:39.021309Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:38:39.022406Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****k7tQ (770AACC2) () has now valid token of ldapuser@ldap 2026-01-07T17:38:39.290152Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2 ... earch: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:39:01.154089Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:39:01.155058Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ch_w (24FDD6D2) () has now valid token of ldapuser@ldap 2026-01-07T17:39:01.714748Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:05.683592Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592675570812271201:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:05.683694Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:39:05.716303Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ch_w (24FDD6D2) 2026-01-07T17:39:05.716409Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:23041, port: 23041 2026-01-07T17:39:05.716483Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:39:05.738775Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:39:05.781413Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:39:05.785373Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:23041 return no entries 2026-01-07T17:39:05.786043Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****ch_w (24FDD6D2) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:23041 return no entries)' 2026-01-07T17:39:08.717336Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ch_w (24FDD6D2) 2026-01-07T17:39:11.830491Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592675617843873323:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:11.830546Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:11.843291Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:11.916844Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:11.966281Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:11.966394Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:11.969585Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:11.995116Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:12.013807Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:39:12.013839Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:39:12.013850Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:39:12.013950Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:12.106070Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:39:12.111452Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:39:12.111493Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:39:12.112328Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:28443, port: 28443 2026-01-07T17:39:12.112408Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:39:12.141125Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:39:12.189388Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:39:12.189869Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:28443. Server is busy 2026-01-07T17:39:12.190294Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****w7xw (59A4B825) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:28443. Server is busy)' 2026-01-07T17:39:12.190563Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:39:12.190580Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:39:12.191325Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:28443, port: 28443 2026-01-07T17:39:12.191371Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:39:12.201564Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:39:12.249384Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:39:12.249897Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:28443. Server is busy 2026-01-07T17:39:12.250313Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****w7xw (59A4B825) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:28443. Server is busy)' 2026-01-07T17:39:12.836403Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:14.833329Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****w7xw (59A4B825) 2026-01-07T17:39:14.833681Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:39:14.833703Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:39:14.834746Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:28443, port: 28443 2026-01-07T17:39:14.834799Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:39:14.851807Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:39:14.897404Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:39:14.897958Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:28443. Server is busy 2026-01-07T17:39:14.898446Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****w7xw (59A4B825) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:28443. Server is busy)' 2026-01-07T17:39:16.831002Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7592675617843873323:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:16.831123Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:39:18.839993Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****w7xw (59A4B825) 2026-01-07T17:39:18.840286Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:39:18.840304Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:39:18.841123Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:28443, port: 28443 2026-01-07T17:39:18.841189Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:39:18.861645Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:39:18.905533Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:39:18.949363Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:39:18.949982Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:39:18.950039Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:39:18.995481Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:39:19.041369Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:39:19.042532Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****w7xw (59A4B825) () has now valid token of ldapuser@ldap |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 65305, MsgBus: 8157 2026-01-07T17:38:27.733751Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675429047545051:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:27.734142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:28.213009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:28.298059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:28.298189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:28.402979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:28.424539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:28.436537Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675429047544828:2081] 1767807507673116 != 1767807507673119 2026-01-07T17:38:28.456061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:28.710564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:28.710599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:28.710609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:28.710711Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:28.729716Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:29.301181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:38:29.301490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:38:29.303186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:29.304218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:38:29.306263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767807509349, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:38:29.307246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-07T17:38:29.307277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2026-01-07T17:38:29.307439Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592675433342512643:2240] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-01-07T17:38:29.307652Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675429047544796:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:29.307768Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675429047544799:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:29.307833Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675429047544802:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:29.308021Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675433342512541:2182][/Root] Path was updated to new version: owner# [1:7592675429047545115:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:29.308309Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592675433342512643:2240] Ack update: ack to# [1:7592675429047545166:2139], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:38:29.308449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2026-01-07T17:38:29.309159Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675433342512790:2288][/Root] Path was updated to new version: owner# [1:7592675433342512784:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:29.309428Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675433342512791:2289][/Root] Path was updated to new version: owner# [1:7592675433342512785:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:29.321423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:38:32.014448Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:38:32.015616Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002de0/r3tmp/spilling-tmp-runner/node_1_7dcef95b-71c93de8-21f12f9d-a66fe18, actor: [1:7592675450522382158:2307] 2026-01-07T17:38:32.015811Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002de0/r3tmp/spilling-tmp-runner 2026-01-07T17:38:32.019524Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecrkrv2d4hzbfyxyx8ry7jt", Request has 18444976266197.533741s seconds to be completed 2026-01-07T17:38:32.022902Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecrkrv2d4hzbfyxyx8ry7jt", Created new session, sessionId: ydb://session/3?node_id=1&id=YTFmMTQzYzItMWYzMTE3MzAtZjZiMGQ5OTMtMTM5Y2E2NDM=, workerId: [1:7592675450522382178:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:38:32.023179Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecrkrv2d4hzbfyxyx8ry7jt 2026-01-07T17:38:32.023341Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:38:32.023364Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:38:32.023400Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:38:32.024898Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675450522382180:2499][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592675429047545115:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:32.033886Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675450522382181:2500][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592675429047545115:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0107 17:38:32.036077319 204565 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:32.036290730 204565 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0107 17:38:32.038621931 204565 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:32.038800940 204565 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:38:32.041245Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:38:32.041448Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675450522382197:2502][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592675429047545115:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0107 17:38:32.047603401 204565 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:32.047776573 204565 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:38:32.049012Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:38:32.059701569 204564 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:32.059796895 204564 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:38:32.060788Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:38:32.076724Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecrkvgc797swf3cfjgvqmy3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTFmMTQzYzItMWYzMTE3MzAtZjZiMGQ5OTMtMTM5Y2E2NDM=, Pool ... column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-07T17:39:21.740711Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592675663465972969:2338] TxId: 281474976710662. Ctx: { TraceId: 01kecrnbn32vqass5rtytd39cz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGU5MzY2ZGEtODMyNTFkOTYtY2U5NzllZjQtNzNiNmVlYzU=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:39:21.745717Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecrnbn32vqass5rtytd39cz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGU5MzY2ZGEtODMyNTFkOTYtY2U5NzllZjQtNzNiNmVlYzU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592675663465972973:2351] 2026-01-07T17:39:21.746691Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecrnbn32vqass5rtytd39cz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGU5MzY2ZGEtODMyNTFkOTYtY2U5NzllZjQtNzNiNmVlYzU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592675663465972974:2352] 2026-01-07T17:39:21.747358Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecrnbn32vqass5rtytd39cz", Forwarded response to sender actor, requestId: 4, sender: [9:7592675663465972870:2337], selfId: [9:7592675641991135694:2266], source: [9:7592675663465972871:2338] 2026-01-07T17:39:21.747920Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=OGU5MzY2ZGEtODMyNTFkOTYtY2U5NzllZjQtNzNiNmVlYzU=, workerId: [9:7592675663465972871:2338], local sessions count: 0 E0107 17:39:21.768259653 220179 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:39:21.768417708 220179 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:39:21.769770Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:39:21.798314Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7592675641991135465:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:21.798432Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:14.485269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:14.584590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:14.603560Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:14.604176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:14.604250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:14.891063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:14.891192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:14.953779Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807552009179 != 1767807552009183 2026-01-07T17:39:14.962903Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:15.008553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:15.094609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:15.378014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:15.391118Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:15.491771Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:39:15.491840Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:39:15.491950Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:39:15.590072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:39:15.590147Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:39:15.590605Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:39:15.590695Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:39:15.590954Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:39:15.591067Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:39:15.591161Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:39:15.591382Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:39:15.592651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:15.593583Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:39:15.593635Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:39:15.621820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:15.622731Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:15.622933Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2768] 2026-01-07T17:39:15.623120Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:15.630929Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:15.660185Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:15.660533Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:15.660760Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-07T17:39:15.660920Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:15.667893Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:15.668380Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:15.668507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:15.670058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:15.670149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:15.670204Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:15.670508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:15.670621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:15.670686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2768] in generation 1 2026-01-07T17:39:15.671097Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:15.671164Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:15.672282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:39:15.672340Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:39:15.672375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:39:15.672602Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:15.672685Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:15.672752Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-07T17:39:15.683606Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:15.734187Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:15.734384Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:15.734498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-07T17:39:15.734539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:15.734573Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:15.734629Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:15.734736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:15.734777Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:39:15.734850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:15.734900Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-07T17:39:15.734933Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:39:15.734954Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:39:15.734980Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:39:15.735100Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:891:2768], Recipient [1:891:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:15.735158Z ... Restarts 2026-01-07T17:39:22.433004Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T17:39:22.433031Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T17:39:22.433059Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:22.433141Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:22.433163Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T17:39:22.433199Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:22.433224Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:22.433258Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:39:22.433277Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:22.433308Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-07T17:39:22.444053Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:39:22.444139Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:22.444189Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T17:39:22.444290Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:22.446429Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-07T17:39:22.446497Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 281474976715661 ProcessProposeTransaction 2026-01-07T17:39:22.446566Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:1058:2877] DataReq marker# P0 2026-01-07T17:39:22.446703Z node 2 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [2:1058:2877] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2026-01-07T17:39:22.446953Z node 2 :TX_PROXY DEBUG: datareq.cpp:1467: Actor# [2:1058:2877] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2026-01-07T17:39:22.447177Z node 2 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [2:1058:2877] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2026-01-07T17:39:22.447272Z node 2 :TX_PROXY DEBUG: datareq.cpp:1204: Actor# [2:1058:2877] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2026-01-07T17:39:22.447586Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [2:1058:2877], Recipient [2:886:2765]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1058 RawX2: 8589937469 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020&\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\"\004\000\000\000\000\000\000\021=\013\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2026-01-07T17:39:22.447636Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:22.447749Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:22.447953Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:38:0] 2026-01-07T17:39:22.448046Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2026-01-07T17:39:22.448101Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-07T17:39:22.448141Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2026-01-07T17:39:22.448182Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:22.448219Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:22.448266Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-07T17:39:22.448339Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715661] at 72075186224037888 2026-01-07T17:39:22.448382Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-07T17:39:22.448405Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:22.448428Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2026-01-07T17:39:22.448457Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2026-01-07T17:39:22.448484Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-07T17:39:22.448506Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2026-01-07T17:39:22.448526Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2026-01-07T17:39:22.448548Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2026-01-07T17:39:22.448594Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:99: Requested stream clearance from [2:1058:2877] for [0:281474976715661] at 72075186224037888 2026-01-07T17:39:22.448627Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2026-01-07T17:39:22.448698Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:39:22.448806Z node 2 :TX_PROXY DEBUG: datareq.cpp:2504: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2026-01-07T17:39:22.448876Z node 2 :TX_PROXY DEBUG: datareq.cpp:2513: Collected all clerance requests, txid: 281474976715661 2026-01-07T17:39:22.448917Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2026-01-07T17:39:22.449091Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287942, Sender [2:1058:2877], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2026-01-07T17:39:22.449136Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2026-01-07T17:39:22.449256Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:1058:2877], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2026-01-07T17:39:22.449288Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-07T17:39:22.449356Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:22.449385Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:22.449451Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:22.449494Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:39:22.449543Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:39:22.449584Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2026-01-07T17:39:22.449627Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715661] at 72075186224037888 2026-01-07T17:39:22.449665Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-07T17:39:22.449703Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2026-01-07T17:39:22.449743Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2026-01-07T17:39:22.449778Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2026-01-07T17:39:22.450021Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2026-01-07T17:39:22.450058Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:39:22.450095Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-07T17:39:22.450130Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:22.450169Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:22.450233Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:22.450771Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1064:2882], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-07T17:39:22.450813Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2026-01-07T17:38:57.358095Z :FallbackToSingleDb INFO: Random seed for debugging is 1767807537358063 2026-01-07T17:38:57.555238Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675559551750250:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:57.556312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:57.583220Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675560749850059:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:57.583386Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:57.589099Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:38:57.608602Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:38:57.762280Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:57.762261Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:57.819228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:57.819356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:57.823372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:57.823453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:57.827942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:57.851117Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:38:57.853127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:57.908301Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:57.909435Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675559551750208:2082] 1767807537551615 != 1767807537551618 2026-01-07T17:38:57.960712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001b93/r3tmp/yandex9tDwPn.tmp 2026-01-07T17:38:57.960751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001b93/r3tmp/yandex9tDwPn.tmp 2026-01-07T17:38:57.960969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001b93/r3tmp/yandex9tDwPn.tmp 2026-01-07T17:38:57.961093Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:57.979455Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:57.985712Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:57.995486Z INFO: TTestServer started on Port 65266 GrpcPort 6832 PQClient connected to localhost:6832 2026-01-07T17:38:58.164588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:58.562678Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:58.605152Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:00.443925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675572436653451:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.443981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675572436653459:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.444090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.443299Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675573634752248:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.444814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675572436653466:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.444877Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.445201Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.448499Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675573634752260:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.448611Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.450941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:00.453103Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675573634752264:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:00.545602Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675573634752267:2131] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:39:00.550615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-07T17:39:00.552695Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592675573634752266:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-07T17:39:00.553325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675572436653465:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-07T17:39:00.620297Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675572436653562:2972] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:00.647271Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675573634752296:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:00.751973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:00.792460Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675572436653575:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:00.793355Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmIxNjc1ZDUtZTM2YTQxYjEtOTJhZjUwMWYtNTJjNTk2OWI=, ActorId: [1:7592675572436653449:2328], ActorState: ExecuteState, LegacyTraceId: 01kecrmq6v3vcmdawa4ank9b9m, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 ... nd tx pending commits 2026-01-07T17:39:20.997517Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:20.997530Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:21.098393Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.098426Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.098443Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.098462Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.098473Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:21.201026Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.201058Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.201078Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.201096Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.201107Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:21.301326Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.301356Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.301370Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.301388Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.301398Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:21.401889Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.401926Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.401940Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.401958Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.401968Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:21.502267Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.502302Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.502317Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.502338Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.502353Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:21.602603Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.602637Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.602654Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.602690Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.602704Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >>> Ready to answer: ok 2026-01-07T17:39:21.633868Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2026-01-07T17:39:21.633966Z :INFO: [/Root] [] [3a00386c-93f80dcd-2139aa6f-9c802a10] Open read subsessions to databases: { name: , endpoint: localhost:21347, path: /Root } 2026-01-07T17:39:21.634243Z :INFO: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Starting read session 2026-01-07T17:39:21.634284Z :DEBUG: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Starting single session 2026-01-07T17:39:21.634972Z :DEBUG: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-07T17:39:21.635043Z :DEBUG: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-07T17:39:21.635129Z :DEBUG: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] [] Reconnecting session to cluster in 0.000000s 2026-01-07T17:39:21.635336Z :ERROR: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:21347
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:21347. 2026-01-07T17:39:21.635394Z :DEBUG: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-07T17:39:21.635426Z :DEBUG: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-07T17:39:21.635567Z :INFO: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:21347" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:21347
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:21347. " } 2026-01-07T17:39:21.635860Z :NOTICE: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:39:21.635905Z :DEBUG: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:21347" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:21347
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:21347. " } 2026-01-07T17:39:21.636028Z :INFO: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Closing read session. Close timeout: 0.010000s 2026-01-07T17:39:21.636066Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:39:21.636118Z :INFO: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:39:21.636158Z :INFO: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Closing read session. Close timeout: 0.000000s 2026-01-07T17:39:21.636186Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:39:21.636221Z :INFO: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:39:21.636259Z :INFO: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Closing read session. Close timeout: 0.000000s 2026-01-07T17:39:21.636292Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:39:21.636337Z :INFO: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:39:21.636417Z :NOTICE: [/Root] [/Root] [3b042230-53a1f695-4ae75639-d3508f91] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:39:21.703006Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.703051Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.703068Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.703089Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.703103Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:21.803276Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.803318Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.803335Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.803354Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.803367Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:21.903648Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:21.903680Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.903696Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:21.903717Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:21.903728Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> DataShardTxOrder::ZigZag_oo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 28048, MsgBus: 15627 2026-01-07T17:38:28.837191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675435171457882:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:28.858350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:28.913677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:38:29.235021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:29.283148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:29.283278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:29.285031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:29.376957Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:29.381208Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675435171457842:2081] 1767807508832686 != 1767807508832689 2026-01-07T17:38:29.469655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:29.469687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:29.469699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:29.469789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:29.504331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:29.869214Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:30.166511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:38:30.166879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:38:30.168666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:30.173564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:38:30.176125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767807510224, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:38:30.181482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-07T17:38:30.181550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-07T17:38:30.181799Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592675439466425663:2245] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-07T17:38:30.182209Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675435171457810:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:30.182328Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675435171457813:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:30.182383Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675435171457816:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:30.182657Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675439466425622:2223][/Root] Path was updated to new version: owner# [1:7592675435171458141:2121], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.182999Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592675439466425663:2245] Ack update: ack to# [1:7592675435171458197:2147], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:38:30.183274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-01-07T17:38:30.185351Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675439466425829:2289][/Root] Path was updated to new version: owner# [1:7592675439466425823:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.185630Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675439466425828:2288][/Root] Path was updated to new version: owner# [1:7592675439466425822:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:30.189358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:38:32.428132Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:38:32.430600Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675452351327874:2495][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592675435171458141:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:32.447715Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecrkspf0mqym0wc3r2x6dw0", Request has 18444976266197.103936s seconds to be completed 2026-01-07T17:38:32.451780Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecrkspf0mqym0wc3r2x6dw0", Created new session, sessionId: ydb://session/3?node_id=1&id=MzlkNWQ3M2MtN2NjYmZlMDgtYTZlOTQ2MTMtNmVjODkzMGY=, workerId: [1:7592675452351327901:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:38:32.451996Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecrkspf0mqym0wc3r2x6dw0 2026-01-07T17:38:32.452054Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:38:32.452079Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:38:32.452108Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:38:32.455045Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675452351327903:2497][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592675435171458141:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:32.456875Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675452351327904:2498][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592675435171458141:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:32.457728Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002dde/r3tmp/spilling-tmp-runner/node_1_38f6a902-283ff24-106bf9aa-993d2239, actor: [1:7592675452351327882:2308] E0107 17:38:32.486636825 204903 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:32.486842135 204903 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0107 17:38:32.489776019 204903 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:32.494605640 204903 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:38:32.498997Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002dde/r3tmp/spilling-tmp-runner 2026-01-07T17:38:32.502903Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:38:32.505522Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecrkvxrefw1f7rcvfqqwy78, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzlkNWQ3M2MtN2NjYmZlMDgtYTZlOTQ2MTMtNmVjODkzMGY=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7592675452351327901:2326] 2026-01-07T17:38:32.505561Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7592675452351327920:2500] 2026-01-07T17:38:32.509839Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675452351327923:2501][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7592675435171458141:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShar ... catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-07T17:39:23.554689Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592675670119875050:2338] TxId: 281474976710662. Ctx: { TraceId: 01kecrndd22remvhxh7zcb1mhm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzE1YTkzYmMtNThkMjkzNDAtOWUyYmQ3ODEtYzRhYWM4ODE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# 2026-01-07T17:39:23.555107Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7592675648645037541:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:23.555276Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:39:23.560139Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecrndd22remvhxh7zcb1mhm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzE1YTkzYmMtNThkMjkzNDAtOWUyYmQ3ODEtYzRhYWM4ODE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592675670119875054:2351] 2026-01-07T17:39:23.561003Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecrndd22remvhxh7zcb1mhm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzE1YTkzYmMtNThkMjkzNDAtOWUyYmQ3ODEtYzRhYWM4ODE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592675670119875055:2352] 2026-01-07T17:39:23.561602Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecrndd22remvhxh7zcb1mhm", Forwarded response to sender actor, requestId: 4, sender: [9:7592675670119874951:2337], selfId: [9:7592675648645037771:2266], source: [9:7592675670119874952:2338] 2026-01-07T17:39:23.562189Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=MzE1YTkzYmMtNThkMjkzNDAtOWUyYmQ3ODEtYzRhYWM4ODE=, workerId: [9:7592675670119874952:2338], local sessions count: 0 E0107 17:39:23.639217678 220807 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:39:23.639417084 220807 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:39:23.651270Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin >> DataShardTxOrder::RandomPointsAndRanges >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> DataShardTxOrder::RandomPoints_DelayData |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:20.026363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:20.120461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:20.137680Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:20.138127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:20.138224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:20.404597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:20.404757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:20.478907Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807557535095 != 1767807557535099 2026-01-07T17:39:20.487901Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:20.532501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:20.626694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:20.914681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:20.917337Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-07T17:39:20.917405Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-07T17:39:20.928520Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:21.024154Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-07T17:39:21.024294Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-07T17:39:21.024666Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-07T17:39:21.025287Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-07T17:39:21.025355Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-07T17:39:21.026824Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-07T17:39:21.026923Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-07T17:39:21.026974Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-07T17:39:21.027020Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-07T17:39:21.032474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:21.073316Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2759], Recipient [1:884:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:21.074281Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2759], Recipient [1:884:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:21.074547Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:884:2765] 2026-01-07T17:39:21.074766Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:21.112419Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2759], Recipient [1:884:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:21.113265Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:21.113435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:21.115083Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:21.115134Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:21.115178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:21.115437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:21.115557Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:21.115639Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2765] in generation 1 2026-01-07T17:39:21.115971Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:21.144164Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:21.144329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:21.144414Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2775] 2026-01-07T17:39:21.144442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:21.144465Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:21.144497Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:21.144747Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:884:2765], Recipient [1:884:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.144824Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.145124Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:21.145195Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:21.145279Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:21.145316Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:21.145371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:21.145406Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:21.145439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:21.145461Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:21.145493Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:21.145584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:891:2769], Recipient [1:884:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.145617Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.145667Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:883:2764], serverId# [1:891:2769], sessionId# [0:0:0] 2026-01-07T17:39:21.146051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:891:2769] 2026-01-07T17:39:21.146104Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:21.146200Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:21.146383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:21.146433Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:21.146504Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:21.146558Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:21.146589Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:21.146624Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:21.146677Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:21.146933Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplet ... shard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1592:3263], Recipient [1:1535:3228]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-07T17:39:25.549791Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:39:25.549863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2026-01-07T17:39:25.549956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:39:25.549992Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:39:25.550024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:25.550058Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:25.550107Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-07T17:39:25.550142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:39:25.550164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:25.550192Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:39:25.550223Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:39:25.550346Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-07T17:39:25.550612Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715683, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:39:25.550667Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4500/18446744073709551615 2026-01-07T17:39:25.550717Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1592:3263], 0} after executionsCount# 1 2026-01-07T17:39:25.550765Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1592:3263], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:39:25.550836Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1592:3263], 0} finished in read 2026-01-07T17:39:25.550902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:39:25.550927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:39:25.550951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:25.550979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:25.551020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:39:25.551042Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:25.551068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-07T17:39:25.551118Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:39:25.551228Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:39:25.552410Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1592:3263], Recipient [1:1535:3228]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:25.552482Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2026-01-07T17:39:25.702561Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2026-01-07T17:39:25.702679Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:293: Coordinator# 72057594046316545 scheduling step 5500 in 0.499900s at 5.450000s 2026-01-07T17:39:25.705804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1614:3280], Recipient [1:1535:3228]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-07T17:39:25.706028Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:39:25.706115Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-07T17:39:25.706203Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:39:25.706247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:39:25.706288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:25.706327Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:25.706380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-07T17:39:25.706422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:39:25.706446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:25.706469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:39:25.706492Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:39:25.706609Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-07T17:39:25.706989Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715686, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:39:25.707051Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-07T17:39:25.707097Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1614:3280], 0} after executionsCount# 1 2026-01-07T17:39:25.707149Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1614:3280], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:39:25.707225Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1614:3280], 0} finished in read 2026-01-07T17:39:25.707308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:39:25.707339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:39:25.707364Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:25.707389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:25.707440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:39:25.707465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:25.707491Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-07T17:39:25.707528Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:39:25.707629Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:39:25.707852Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [1:70:2117], Recipient [1:1535:3228]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715686 LockNode: 1 Status: STATUS_SUBSCRIBED 2026-01-07T17:39:25.709093Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1614:3280], Recipient [1:1535:3228]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:25.709156Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 4231, MsgBus: 16066 2026-01-07T17:38:27.794814Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675432111546638:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:27.815968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:28.410456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:28.410572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:28.450243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:28.509213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:28.509444Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675432111546598:2081] 1767807507781448 != 1767807507781451 2026-01-07T17:38:28.523987Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:28.718252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:28.761478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:38:28.761503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:38:28.761513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:38:28.761591Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:28.977091Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:29.330758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:38:29.331116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:38:29.332899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:29.333848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:38:29.336192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767807509384, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:38:29.342301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-07T17:38:29.342369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-07T17:38:29.342689Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592675436406514429:2255] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-07T17:38:29.342959Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675432111546566:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:29.343035Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675432111546569:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:29.343054Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592675432111546572:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:38:29.343319Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675436406514369:2217][/Root] Path was updated to new version: owner# [1:7592675432111546739:2105], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:29.343393Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592675436406514429:2255] Ack update: ack to# [1:7592675436406514260:2157], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:38:29.343627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-07T17:38:29.344283Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675436406514543:2288][/Root] Path was updated to new version: owner# [1:7592675436406514537:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:29.344471Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592675436406514550:2289][/Root] Path was updated to new version: owner# [1:7592675436406514538:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:31.949682Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:38:31.952121Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002ddf/r3tmp/spilling-tmp-runner/node_1_865c03a2-81f6d549-7ab43fa3-a5169849, actor: [1:7592675449291416628:2307] 2026-01-07T17:38:31.952313Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002ddf/r3tmp/spilling-tmp-runner 2026-01-07T17:38:31.954642Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecrkrw13pw914vh7qszb2nm", Request has 18444976266197.597008s seconds to be completed 2026-01-07T17:38:31.957955Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675449291416651:2499][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592675432111546739:2105], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:31.958988Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecrkrw13pw914vh7qszb2nm", Created new session, sessionId: ydb://session/3?node_id=1&id=N2M4NTBlMDctOWYyZGI0YjAtMTVkNzVkMTktYzk3ODdlOGE=, workerId: [1:7592675449291416659:2328], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:38:31.959218Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecrkrw13pw914vh7qszb2nm 2026-01-07T17:38:31.959320Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:38:31.959340Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:38:31.959368Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:38:31.971440Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675449291416660:2501][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592675432111546739:2105], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:38:31.972234Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675449291416665:2502][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592675432111546739:2105], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0107 17:38:31.973699794 204614 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:31.973881131 204614 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0107 17:38:31.975909345 204614 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:31.976011824 204614 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:38:31.982364Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:38:31.992454355 204613 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:38:31.992622053 204613 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:38:31.997727Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:38:31.997905Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecrkvdxba6dfgvd9e0ye9j4, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2M4NTBlMDctOWYyZGI0YjAtMTVkNzVkMTktYzk3ODdlOGE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7592675449291416659:2328] 2026-01-07T17:38:31.997927Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7592675449291416677:2504] 2026-01-07T17:38:32.001920Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592675453586383976:2505][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7592675432111546739:2105], state# { Deleted: 1 Strong: 1 Version: : Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:39:25.109712Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592675677877247298:2339] TxId: 281474976715662. Ctx: { TraceId: 01kecrnexvfpn8nxqy5m4e8kk0, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YWQyNTAzMzEtYTk4ODQ3MTktY2U5ZTI2YWYtMWI3MWY2ODI=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:39:25.114892Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kecrnexvfpn8nxqy5m4e8kk0, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YWQyNTAzMzEtYTk4ODQ3MTktY2U5ZTI2YWYtMWI3MWY2ODI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592675677877247302:2352] 2026-01-07T17:39:25.116000Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kecrnexvfpn8nxqy5m4e8kk0, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YWQyNTAzMzEtYTk4ODQ3MTktY2U5ZTI2YWYtMWI3MWY2ODI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592675677877247303:2353] 2026-01-07T17:39:25.116776Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecrnexvfpn8nxqy5m4e8kk0", Forwarded response to sender actor, requestId: 4, sender: [9:7592675673582279902:2338], selfId: [9:7592675652107442686:2244], source: [9:7592675673582279903:2339] 2026-01-07T17:39:25.117519Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=YWQyNTAzMzEtYTk4ODQ3MTktY2U5ZTI2YWYtMWI3MWY2ODI=, workerId: [9:7592675673582279903:2339], local sessions count: 0 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2026-01-07T17:39:21.444063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:21.444113Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:21.444511Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:21.456333Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:21.456687Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:21.456991Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:21.466821Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:21.509494Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:21.510087Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:21.511571Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:21.511635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:21.511689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:21.512053Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:21.512246Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:21.512303Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:21.593321Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:21.619170Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:21.619405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:21.619530Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:21.619612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:21.619665Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:21.619709Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:21.619994Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.620084Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.620392Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:21.620490Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:21.620618Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:21.620660Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:21.620720Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:21.620760Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:21.620807Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:21.620840Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:21.620883Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:21.620986Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.621022Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.621085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:21.626949Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:21.627005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:21.627095Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:21.627220Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:21.627277Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:21.627314Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:21.627354Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:21.627380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:21.627410Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:21.627432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:21.627640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:21.627669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:21.627698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:21.627722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:21.627768Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:21.627790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:21.627818Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:21.627846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:21.627861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:21.639739Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:21.639794Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:21.639825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:21.639896Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:21.639961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:21.640477Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.640538Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.640597Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:21.640756Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:21.640790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:21.640917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:21.640983Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-07T17:39:21.641016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:21.641050Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:21.650097Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:21.650178Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:21.650400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.650440Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.650499Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:21.650538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:21.650573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:21.650611Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2026-01-07T17:39:21.650649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2026-01-07T17:39:21. ... 7184 consumer 9437184 txId 151 2026-01-07T17:39:26.690476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:39:26.690506Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.690555Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-07T17:39:26.690638Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:26.690671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:134] at 9437186 on unit CompleteOperation 2026-01-07T17:39:26.690710Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 134] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:26.690766Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-07T17:39:26.690807Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:26.690921Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:26.690951Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:26.690974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:137] at 9437186 on unit CompleteOperation 2026-01-07T17:39:26.691017Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 137] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:26.691063Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-07T17:39:26.691086Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:26.691196Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:26.691222Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:26.691252Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:26.691279Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:26.691301Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:140] at 9437186 on unit CompleteOperation 2026-01-07T17:39:26.691338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 140] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:26.691376Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-07T17:39:26.691420Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:26.691535Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:26.691557Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:26.691594Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:26.691619Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:143] at 9437186 on unit CompleteOperation 2026-01-07T17:39:26.691675Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 143] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:26.691727Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-07T17:39:26.691756Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:26.691861Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:26.691891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:146] at 9437186 on unit CompleteOperation 2026-01-07T17:39:26.691923Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 146] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:26.691975Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-07T17:39:26.692003Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:26.692111Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:26.692147Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2026-01-07T17:39:26.692211Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:26.692259Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:39:26.692283Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:26.692403Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:26.692434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2026-01-07T17:39:26.692465Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:39:26.692504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:39:26.692528Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:26.692794Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:240:2232]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-07T17:39:26.692835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.692866Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-07T17:39:26.692959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:39:26.692988Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.693016Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-07T17:39:26.693212Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:240:2232]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-07T17:39:26.693257Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.693288Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-07T17:39:26.693395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:240:2232]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-07T17:39:26.693422Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.693450Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-07T17:39:26.693542Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:240:2232]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-07T17:39:26.693574Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.693598Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-07T17:39:26.693689Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:240:2232]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-07T17:39:26.693725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.693774Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-07T17:39:26.693885Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:240:2232]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:39:26.693923Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.693959Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-07T17:39:26.694049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:240:2232]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:39:26.694075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:26.694096Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2026-01-07T17:39:00.114208Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1767807540114175 2026-01-07T17:39:00.381675Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675572458178879:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:00.388013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:00.446064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:00.449890Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:00.453366Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675573231929675:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:00.453420Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:00.499172Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:00.661013Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:00.685302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:00.716229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:00.716368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:00.720387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:00.720499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:00.761552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:00.776208Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:00.776425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:00.782541Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:00.877838Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:00.908665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:00.955125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001bba/r3tmp/yandexCqqNo1.tmp 2026-01-07T17:39:00.955159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001bba/r3tmp/yandexCqqNo1.tmp 2026-01-07T17:39:00.955339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001bba/r3tmp/yandexCqqNo1.tmp 2026-01-07T17:39:00.955436Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:00.994837Z INFO: TTestServer started on Port 4635 GrpcPort 22224 PQClient connected to localhost:22224 2026-01-07T17:39:01.180646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:01.403014Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:01.481361Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:03.672109Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675586116831896:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:03.672121Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675586116831885:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:03.672228Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:03.672536Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675586116831900:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:03.672648Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:03.678242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:03.694863Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592675586116831899:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-07T17:39:03.839226Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675586116831929:2140] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:04.058417Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675585343082140:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:04.058898Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=M2YyOTJlZWYtMWY4NjMwZGEtOWIzODkxNmQtNDU2MzkyOTc=, ActorId: [1:7592675585343082098:2328], ActorState: ExecuteState, LegacyTraceId: 01kecrmtfe0m5hgp3rqra1ry9c, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:04.060336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:04.061087Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675586116831944:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:04.061391Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:04.061496Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YjViNmY3NTEtZjgwMTEzMDEtMzlkMDE4MTctYjM0NjRhYzc=, ActorId: [2:7592675586116831882:2301], ActorState: ExecuteState, LegacyTraceId: 01kecrmtbpeq0z5dh9ed8sfx4k, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:04.061825Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you ... : partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:25.322133Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.322144Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:25.422405Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:25.422457Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.422473Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:25.422493Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.422505Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:25.522738Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:25.522767Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.522782Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:25.522803Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.522814Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:25.623082Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:25.623107Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.623117Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:25.623133Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.623142Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:25.723437Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:25.723462Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.723474Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:25.723487Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.723495Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:25.823783Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:25.823813Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.823825Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:25.823840Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.823849Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:25.924163Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:25.924199Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.924215Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:25.924246Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:25.924258Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.024522Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.024560Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.024577Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.024599Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.024627Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.124824Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.124852Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.124863Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.124878Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.124887Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.225218Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.225255Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.225273Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.225295Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.225308Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.325562Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.325600Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.325616Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.325649Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.325660Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.425916Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.425951Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.425966Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.425989Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.426000Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.526259Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.526293Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.526304Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.526322Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.526335Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.626627Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.626666Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.626681Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.626703Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.626724Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.726968Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.727002Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.727018Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.727038Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.727061Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:26.827297Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:26.827327Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.827345Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:26.827366Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:26.827378Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:18.301786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:18.411562Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:18.431169Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:18.431603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:18.431671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:18.702911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:18.703050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:18.778789Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807556023408 != 1767807556023412 2026-01-07T17:39:18.789534Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:18.834164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:18.931136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:19.203472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:19.216827Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:19.318911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:19.362865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:19.363821Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:19.364102Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:19.364352Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:19.404698Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:19.405445Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:19.405570Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:19.407227Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:19.407319Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:19.407373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:19.407713Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:19.407838Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:19.407911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:19.418706Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:19.440590Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:19.440763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:19.440859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:19.440896Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:19.440939Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:19.440970Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:19.441172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:19.441223Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:19.441484Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:19.441563Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:19.441653Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:19.441679Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:19.441723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:19.441754Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:19.441787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:19.441813Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:19.441847Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:19.441920Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:19.441962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:19.442002Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:19.442377Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:39:19.442425Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:19.442523Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:19.442738Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:19.442799Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:19.442866Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:19.442906Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:19.442929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:19.442954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:19.442991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:19.443202Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:19.443224Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:39:19.443257Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:39:19.443277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:19.443326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:39:19.443350Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:39:19.443375Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:39:19.443394Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:39:19.443409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:19.444443Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:39:19.444473Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:19.455059Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... :1271:3058] 2026-01-07T17:39:27.307349Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1276:3063], CA [2:1273:3060], CA [2:1274:3061], CA [2:1275:3062], CA [2:1272:3059], trace_id# 2026-01-07T17:39:27.307393Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1276:3063], CA [2:1273:3060], CA [2:1274:3061], CA [2:1275:3062], CA [2:1272:3059], trace_id# 2026-01-07T17:39:27.307460Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1277:3057], Recipient [2:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:27.307498Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-07T17:39:27.307624Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1272:3059] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 303 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 206 FinishTimeMs: 1767807567306 InputRows: 1 InputBytes: 6 ComputeCpuTimeUs: 158 BuildCpuTimeUs: 48 WaitOutputTimeUs: 332 HostName: "ghrun-z2c6jslqle" NodeId: 2 StartTimeMs: 1767807567306 CreateTimeMs: 1767807567301 UpdateTimeMs: 1767807567306 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:27.307643Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1272:3059] 2026-01-07T17:39:27.307670Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1276:3063], CA [2:1273:3060], CA [2:1274:3061], CA [2:1275:3062], trace_id# 2026-01-07T17:39:27.307694Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1276:3063], CA [2:1273:3060], CA [2:1274:3061], CA [2:1275:3062], trace_id# 2026-01-07T17:39:27.307758Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1278:3058], Recipient [2:969:2824]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:27.307785Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } 2026-01-07T17:39:27.307882Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1273:3060] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 207 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 122 FinishTimeMs: 1767807567306 InputRows: 1 InputBytes: 6 ComputeCpuTimeUs: 89 BuildCpuTimeUs: 33 WaitOutputTimeUs: 429 HostName: "ghrun-z2c6jslqle" NodeId: 2 StartTimeMs: 1767807567306 CreateTimeMs: 1767807567301 UpdateTimeMs: 1767807567306 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:27.307911Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1273:3060] 2026-01-07T17:39:27.307944Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1276:3063], CA [2:1274:3061], CA [2:1275:3062], trace_id# 2026-01-07T17:39:27.307975Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1276:3063], CA [2:1274:3061], CA [2:1275:3062], trace_id# 2026-01-07T17:39:27.308058Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1274:3061] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 337 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 247 FinishTimeMs: 1767807567306 InputRows: 2 InputBytes: 12 ComputeCpuTimeUs: 191 BuildCpuTimeUs: 56 WaitOutputTimeUs: 474 HostName: "ghrun-z2c6jslqle" NodeId: 2 StartTimeMs: 1767807567306 CreateTimeMs: 1767807567301 UpdateTimeMs: 1767807567306 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:27.308089Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1274:3061] 2026-01-07T17:39:27.308120Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1276:3063], CA [2:1275:3062], trace_id# 2026-01-07T17:39:27.308150Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1276:3063], CA [2:1275:3062], trace_id# 2026-01-07T17:39:27.308232Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1275:3062] TaskId# 6 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 169 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 96 FinishTimeMs: 1767807567306 InputRows: 2 InputBytes: 8 ComputeCpuTimeUs: 51 BuildCpuTimeUs: 45 WaitOutputTimeUs: 481 HostName: "ghrun-z2c6jslqle" NodeId: 2 CreateTimeMs: 1767807567301 UpdateTimeMs: 1767807567306 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:27.308257Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1275:3062] 2026-01-07T17:39:27.308286Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1276:3063], trace_id# 2026-01-07T17:39:27.308314Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1276:3063], trace_id# 2026-01-07T17:39:27.308464Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1276:3063] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 177 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 101 FinishTimeMs: 1767807567306 InputRows: 2 InputBytes: 8 ComputeCpuTimeUs: 73 BuildCpuTimeUs: 28 HostName: "ghrun-z2c6jslqle" NodeId: 2 StartTimeMs: 1767807567306 CreateTimeMs: 1767807567301 UpdateTimeMs: 1767807567306 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:27.308496Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1276:3063] 2026-01-07T17:39:27.308580Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:39:27.308657Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1266:3039] TxId: 281474976715667. Ctx: { TraceId: 01kecrnh4w5216zdh416b5d77c, Database: , SessionId: ydb://session/3?node_id=2&id=MmRiZDg5NzktYjRlMTRhZS1kODUzYjUzMi04ODI5NjQ3YQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002092s ReadRows: 2 ReadBytes: 16 RequestUnits# 2 ForceFlag# true trace_id# { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin >> DataShardTxOrder::ZigZag >> DataShardOutOfOrder::TestOutOfOrderLockLost ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2026-01-07T17:39:04.392400Z :BasicWriteSession INFO: Random seed for debugging is 1767807544392367 2026-01-07T17:39:04.649325Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675590434799152:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:04.650687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:04.687725Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675587395581830:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:04.688075Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:04.688697Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:04.697063Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:04.846802Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:04.893850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:04.929715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:04.929817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:04.933816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:04.933885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:04.941820Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:04.941959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:04.947824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:05.036979Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:05.052494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:05.067198Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:05.140895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001a94/r3tmp/yandexx26gdr.tmp 2026-01-07T17:39:05.140941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001a94/r3tmp/yandexx26gdr.tmp 2026-01-07T17:39:05.141107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001a94/r3tmp/yandexx26gdr.tmp 2026-01-07T17:39:05.141217Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:05.179644Z INFO: TTestServer started on Port 23761 GrpcPort 62447 PQClient connected to localhost:62447 2026-01-07T17:39:05.350905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:05.663030Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:05.691694Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:07.740691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675603319702364:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.740728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675603319702375:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.740830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.741626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675603319702379:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.741700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.750896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:07.756280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675603319702412:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.756794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.759332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675603319702422:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.759430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.770943Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675603319702378:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:39:08.101378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:08.102339Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675603319702456:2993] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:08.114500Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675600280484042:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:08.115051Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NDQzZWFjNGMtZmVkMjJmYjUtZDJmMGM3YWYtMzAyZDU4Nw==, ActorId: [2:7592675600280484025:2301], ActorState: ExecuteState, LegacyTraceId: 01kecrmye1egvnvpmssn61q5v7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:08.125981Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675607614669810:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:08.126419Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OTBhN2UzMDgtMjEzYmU0MTQtZmNiMmRiNTEtMjI3ODQyNjY=, ActorId: [1:7592675603319702361:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrmyag7rsrx9y7bbp9ebys, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:08.141870Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { messag ... G: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:27.264806Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.264820Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:27.264837Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.264848Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:27.326012Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__old_chooser_actor.h:67: TPartitionChooser [3:7592675687282941543:2450] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2026-01-07T17:39:27.326314Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675687282941580:2450] connected; active server actors: 1 2026-01-07T17:39:27.326374Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__old_chooser_actor.h:80: TPartitionChooser [3:7592675687282941543:2450] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2026-01-07T17:39:27.326392Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [3:7592675687282941543:2450] (SourceId=src, PreferedPartition=(NULL)) Update the table 2026-01-07T17:39:27.326634Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675687282941580:2450] disconnected. 2026-01-07T17:39:27.326660Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675687282941580:2450] disconnected; active server actors: 1 2026-01-07T17:39:27.326678Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675687282941580:2450] disconnected no session 2026-01-07T17:39:27.365079Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:27.365110Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.365124Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:27.365143Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.365153Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:27.406503Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7592675687282941543:2450] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-07T17:39:27.406547Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7592675687282941543:2450] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2026-01-07T17:39:27.406563Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7592675687282941543:2450] (SourceId=src, PreferedPartition=(NULL)) Start idle 2026-01-07T17:39:27.406590Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-07T17:39:27.407450Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [3:7592675687282941602:2450], now have 1 active actors on pipe 2026-01-07T17:39:27.407614Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2026-01-07T17:39:27.446372Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|8ab32661-9865202a-1a5af4e6-efe8bda3_0 2026-01-07T17:39:27.447119Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767807567447 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:39:27.447224Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|8ab32661-9865202a-1a5af4e6-efe8bda3_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-07T17:39:27.447403Z :INFO: [] MessageGroupId [src] SessionId [src|8ab32661-9865202a-1a5af4e6-efe8bda3_0] Write session: close. Timeout = 0 ms 2026-01-07T17:39:27.447454Z :INFO: [] MessageGroupId [src] SessionId [src|8ab32661-9865202a-1a5af4e6-efe8bda3_0] Write session will now close 2026-01-07T17:39:27.447490Z :DEBUG: [] MessageGroupId [src] SessionId [src|8ab32661-9865202a-1a5af4e6-efe8bda3_0] Write session: aborting 2026-01-07T17:39:27.445060Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-07T17:39:27.445109Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-07T17:39:27.445203Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|8ab32661-9865202a-1a5af4e6-efe8bda3_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2026-01-07T17:39:27.445240Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:27.445257Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:39:27.445277Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:39:27.445289Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:27.445311Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:39:27.445338Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:39:27.445352Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:39:27.445378Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:27.445416Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-07T17:39:27.445457Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:39:27.446008Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-07T17:39:27.446031Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-07T17:39:27.446092Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:39:27.448187Z :INFO: [] MessageGroupId [src] SessionId [src|8ab32661-9865202a-1a5af4e6-efe8bda3_0] Write session: gracefully shut down, all writes complete 2026-01-07T17:39:27.448206Z :DEBUG: [] MessageGroupId [src] SessionId [src|8ab32661-9865202a-1a5af4e6-efe8bda3_0] Write session is aborting and will not restart 2026-01-07T17:39:27.448268Z :DEBUG: [] MessageGroupId [src] SessionId [src|8ab32661-9865202a-1a5af4e6-efe8bda3_0] Write session: destroy 2026-01-07T17:39:27.448456Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|8ab32661-9865202a-1a5af4e6-efe8bda3_0 grpc read done: success: 0 data: 2026-01-07T17:39:27.448475Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|8ab32661-9865202a-1a5af4e6-efe8bda3_0 grpc read failed 2026-01-07T17:39:27.448500Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|8ab32661-9865202a-1a5af4e6-efe8bda3_0 grpc closed 2026-01-07T17:39:27.448513Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|8ab32661-9865202a-1a5af4e6-efe8bda3_0 is DEAD 2026-01-07T17:39:27.449746Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7592675687282941602:2450] destroyed 2026-01-07T17:39:27.449789Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:39:27.449814Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:27.449827Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.449839Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:27.449326Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:39:27.449854Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.449865Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist Session was created 2026-01-07T17:39:27.465433Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:27.465488Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.465504Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:27.465523Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.465533Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:27.565788Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:27.565817Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.565831Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:27.565848Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:27.565859Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> DataShardTxOrder::ImmediateBetweenOnline >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite |87.4%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] |87.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> DataShardTxOrder::RandomPoints_DelayRS >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system [GOOD] |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin [GOOD] |87.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:21.597708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:21.702857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:21.719279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:21.719859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:21.719932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:21.970725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:21.970878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:22.045868Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807559329939 != 1767807559329943 2026-01-07T17:39:22.066959Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:22.114635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:22.214028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:22.495759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:22.508897Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:22.613734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:22.941988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:23.154427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1114:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:23.154569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1125:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:23.154668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:23.155446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:23.155634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:23.160124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:23.319363Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1128:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:39:23.398349Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1186:2962] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets ... sending immediate upsert ... waiting for immediate propose ... immediate upsert is blocked 2026-01-07T17:39:24.409617Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2026-01-07T17:39:24.420356Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:1370:3015], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1261:3015]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1370:3015]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2026-01-07T17:39:24.436202Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:1364:3015], SessionActorId: [1:1261:3015], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1261:3015]. 2026-01-07T17:39:24.436741Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NzI4NWQ0ODMtOWIyMzM4MDAtODhlMmI0ZTQtNDQ2NjQ0MDI=, ActorId: [1:1261:3015], ActorState: ExecuteState, LegacyTraceId: 01kecrneh778316m2ta75fx8d7, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1365:3015] from: [1:1364:3015] trace_id# 2026-01-07T17:39:24.437432Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:1365:3015] TxId: 281474976715665. Ctx: { TraceId: 01kecrneh778316m2ta75fx8d7, Database: , SessionId: ydb://session/3?node_id=1&id=NzI4NWQ0ODMtOWIyMzM4MDAtODhlMmI0ZTQtNDQ2NjQ0MDI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } trace_id# 2026-01-07T17:39:24.440696Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NzI4NWQ0ODMtOWIyMzM4MDAtODhlMmI0ZTQtNDQ2NjQ0MDI=, ActorId: [1:1261:3015], ActorState: ExecuteState, LegacyTraceId: 01kecrneh778316m2ta75fx8d7, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`." issue_code: 2006 severity: 1 issues { message: "Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting" issue_code: 2006 severity: 1 } } trace_id# 2026-01-07T17:39:24.441956Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [1:1337:3017], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1263:3017]TEvDeliveryProblem was received from tablet: 72075186224037889 2026-01-07T17:39:24.442068Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:1329:3017], SessionActorId: [1:1263:3017], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1263:3017]. 2026-01-07T17:39:24.442812Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NTE5NGYwNGUtMTk1ZmZlYTEtNGExNzNkYjYtZmUyNjNlOA==, ActorId: [1:1263:3017], ActorState: ExecuteState, LegacyTraceId: 01kecrneea6mqdw8jct2c113sb, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1330:3017] from: [1:1329:3017] trace_id# 2026-01-07T17:39:24.443460Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:1330:3017] TxId: 281474976715664. Ctx: { TraceId: 01kecrneea6mqdw8jct2c113sb, Database: , SessionId: ydb://session/3?node_id=1&id=NTE5NGYwNGUtMTk1ZmZlYTEtNGExNzNkYjYtZmUyNjNlOA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } trace_id# 2026-01-07T17:39:24.444025Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTE5NGYwNGUtMTk1ZmZlYTEtNGExNzNkYjYtZmUyNjNlOA==, ActorId: [1:1263:3017], ActorState: ExecuteState, LegacyTraceId: 01kecrneea6mqdw8jct2c113sb, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889." issue_code: 2026 severity: 1 } trace_id# { items { uint32_value: 1 } items { uint32_value: 1 } } ... waiting for SysViewsRoster update finished 2026-01-07T17:39:27.964209Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:27.969669Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:27.972740Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:27.972996Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:27.973144Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:28.203420Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:28.203531Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:28.250013Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807565310796 != 1767807565310800 2026-01-07T17:39:28.253996Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:28.299192Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:28.367572Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:28.647761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:28.660862Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:28.771654Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:28.979468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2026-01-07T17:39:29.188421Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:29.188537Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:29.189240Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:29.190184Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:29.190347Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:29.194113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:29.379375Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:39:29.415615Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1097:2913] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet 2026-01-07T17:39:30.172040Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:1213:2954], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1159:2954]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2026-01-07T17:39:26.615101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:26.615149Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:26.615474Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:26.624597Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:26.624963Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:26.625199Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:26.634600Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:26.668061Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:26.668504Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:26.669762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:26.669879Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:26.669957Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:26.670226Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:26.670382Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:26.670435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:26.729109Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:26.753116Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:26.753266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:26.753338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:26.753376Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:26.753408Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:26.753435Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:26.753548Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.753601Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.753823Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:26.753909Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:26.754004Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:26.754043Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:26.754082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:26.754107Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:26.754139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:26.754162Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:26.754192Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:26.754261Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.754292Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.754346Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:26.760024Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:26.760092Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:26.760172Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:26.760304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:26.760370Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:26.760409Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:26.760450Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:26.760476Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:26.760503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:26.760529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:26.760737Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:26.760763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:26.760794Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:26.760813Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:26.760862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:26.760880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:26.760901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:26.760921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:26.760966Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:26.772759Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:26.772811Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:26.772837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:26.772879Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:26.772946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:26.773431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.773489Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.773551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:26.773692Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:26.773727Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:26.773879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:26.773924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:26.773962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:26.773996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:26.777006Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:26.777055Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:26.777202Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.777226Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.777266Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:26.777308Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:26.777334Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:26.777363Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:26.777393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 17:39:31.274973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2026-01-07T17:39:31.274999Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2026-01-07T17:39:31.275059Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:31.275081Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-07T17:39:31.275115Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:31.275163Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:39:31.275211Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:31.275308Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:31.275330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-07T17:39:31.275375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:31.275434Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:39:31.275455Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:31.275612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-07T17:39:31.275643Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:31.275687Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-07T17:39:31.275789Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-07T17:39:31.275814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:31.275837Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-07T17:39:31.275988Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:39:31.276035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:31.276072Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-07T17:39:31.276135Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:240:2232], Recipient [1:460:2402]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-07T17:39:31.276161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:39:31.276192Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-07T17:39:31.276255Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-07T17:39:31.276315Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-07T17:39:31.276377Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:31.276480Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:39:31.276504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:31.276534Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-07T17:39:31.276598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:460:2402], Recipient [1:460:2402]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:31.276639Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:31.276682Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-07T17:39:31.276712Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:31.276779Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-07T17:39:31.276809Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-07T17:39:31.276838Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:39:31.276865Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-07T17:39:31.276904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-07T17:39:31.276950Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-07T17:39:31.276974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:39:31.276995Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-07T17:39:31.277013Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-07T17:39:31.277033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-07T17:39:31.277505Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-07T17:39:31.277555Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:39:31.277597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-07T17:39:31.277621Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-07T17:39:31.277646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-07T17:39:31.277669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:39:31.277834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-07T17:39:31.277851Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-07T17:39:31.277895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-07T17:39:31.277924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-07T17:39:31.277967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:39:31.277996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-07T17:39:31.278020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-07T17:39:31.278045Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:31.278068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-07T17:39:31.278104Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-07T17:39:31.278127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-07T17:39:31.291497Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:31.291555Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:39:31.291603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:39:31.291667Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:39:31.291704Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:31.291902Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:39:31.291932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:31.291957Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query >> Cdc::AddIndex >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin [GOOD] >> DataShardTxOrder::ReadWriteReorder >> TxUsage::WriteToTopic_Demo_46_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy [GOOD] Test command err: 2026-01-07T17:39:22.864145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:22.864203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:22.864700Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:22.876619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:22.877010Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:22.877260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:22.886888Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:22.920291Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:22.920788Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:22.922178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:22.922252Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:22.922294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:22.922591Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:22.922742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:22.922792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:22.989977Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:23.031864Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:23.032055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:23.032174Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:23.032229Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:23.032268Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:23.032303Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:23.032457Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:23.032521Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:23.032838Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:23.032988Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:23.033134Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:23.033184Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:23.033236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:23.033275Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:23.033329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:23.033364Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:23.033406Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:23.033496Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:23.033556Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:23.033608Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:23.040570Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:23.040648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:23.040760Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:23.040946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:23.041030Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:23.041087Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:23.041142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:23.041181Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:23.041219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:23.041258Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:23.041603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:23.041642Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:23.041687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:23.041728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:23.041799Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:23.041829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:23.041865Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:23.041896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:23.041924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:23.053920Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:23.053986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:23.054013Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:23.054056Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:23.054104Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:23.054476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:23.054519Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:23.054551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:23.054658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:23.054683Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:23.054787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:23.054825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:23.054856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:23.054891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:23.057635Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:23.057698Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:23.057865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:23.057895Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:23.057934Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:23.057969Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:23.058001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:23.058041Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:23.058077Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-01-07T17:39:31.097271Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:31.097391Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:348:2315], Recipient [2:348:2315]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:31.097421Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:31.097467Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-07T17:39:31.097491Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:31.097510Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-07T17:39:31.097536Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2026-01-07T17:39:31.097558Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2026-01-07T17:39:31.097581Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.097625Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2026-01-07T17:39:31.097653Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2026-01-07T17:39:31.097669Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2026-01-07T17:39:31.098120Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2026-01-07T17:39:31.098147Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.098162Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2026-01-07T17:39:31.098176Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2026-01-07T17:39:31.098190Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2026-01-07T17:39:31.098211Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.098223Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2026-01-07T17:39:31.098235Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:31.098247Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2026-01-07T17:39:31.098290Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2026-01-07T17:39:31.098347Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2026-01-07T17:39:31.098379Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2026-01-07T17:39:31.098417Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.098437Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:31.098456Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2026-01-07T17:39:31.098474Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2026-01-07T17:39:31.098514Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.098534Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2026-01-07T17:39:31.098552Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2026-01-07T17:39:31.098570Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2026-01-07T17:39:31.098621Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.098654Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2026-01-07T17:39:31.098671Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2026-01-07T17:39:31.098685Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2026-01-07T17:39:31.098703Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.098715Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2026-01-07T17:39:31.098726Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2026-01-07T17:39:31.098741Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2026-01-07T17:39:31.098755Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.098766Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2026-01-07T17:39:31.098778Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2026-01-07T17:39:31.098791Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2026-01-07T17:39:31.098815Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.098830Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2026-01-07T17:39:31.098852Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2026-01-07T17:39:31.098869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2026-01-07T17:39:31.099095Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2026-01-07T17:39:31.099125Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:39:31.099187Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.099206Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2026-01-07T17:39:31.099219Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2026-01-07T17:39:31.099239Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2026-01-07T17:39:31.099346Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is DelayComplete 2026-01-07T17:39:31.099362Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2026-01-07T17:39:31.099377Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2026-01-07T17:39:31.099393Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2026-01-07T17:39:31.099409Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-07T17:39:31.099433Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2026-01-07T17:39:31.099448Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437185 has finished 2026-01-07T17:39:31.099466Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:31.099482Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-07T17:39:31.099511Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-07T17:39:31.099534Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-07T17:39:31.112970Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-07T17:39:31.113027Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-07T17:39:31.113074Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:39:31.113112Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-07T17:39:31.113160Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-07T17:39:31.113202Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-07T17:39:31.113499Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-07T17:39:31.113534Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-07T17:39:31.113565Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:31.113589Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-07T17:39:31.113620Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-07T17:39:31.113666Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> DataShardTxOrder::ReadWriteReorder [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query [GOOD] >> TxUsage::WriteToTopic_Demo_47_Table >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2026-01-07T17:39:33.527383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:33.527427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:33.527728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:33.537477Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:33.537749Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:33.537947Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:33.547117Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:33.601911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:33.602670Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:33.604706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:33.604828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:33.604910Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:33.605376Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:33.605654Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:33.605736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:33.677049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:33.720165Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:33.720330Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:33.720427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:33.720471Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:33.720505Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:33.720531Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:33.720669Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:33.720725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:33.721006Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:33.721131Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:33.721266Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:33.721310Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:33.721356Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:33.721393Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:33.721443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:33.721480Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:33.721525Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:33.721649Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:33.721713Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:33.721763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:33.729418Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:33.729478Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:33.729556Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:33.729692Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:33.729775Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:33.729818Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:33.729859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:33.729890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:33.729921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:33.729943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:33.730220Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:33.730265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:33.730307Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:33.730351Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:33.730425Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:33.730455Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:33.730492Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:33.730527Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:33.730558Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:33.742592Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:33.742651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:33.742675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:33.742723Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:33.742780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:33.743239Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:33.743286Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:33.743321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:33.743425Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:33.743448Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:33.743548Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:33.743577Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:33.743603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:33.743629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:33.746797Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:33.746873Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:33.747101Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:33.747149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:33.747229Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:33.747276Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:33.747315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:33.747356Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:33.747394Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000001: ... ions 2026-01-07T17:39:34.583489Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-07T17:39:34.583657Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:240:2232], Recipient [1:240:2232]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:34.583702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:34.583754Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:34.583788Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:34.583813Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:34.583852Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2026-01-07T17:39:34.583887Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2026-01-07T17:39:34.583911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.583928Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2026-01-07T17:39:34.583945Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2026-01-07T17:39:34.583959Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2026-01-07T17:39:34.584605Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2026-01-07T17:39:34.584655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.584683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2026-01-07T17:39:34.584702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-07T17:39:34.584718Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2026-01-07T17:39:34.584741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.584754Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-07T17:39:34.584767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:34.584778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:39:34.584827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:12] is the new logically complete end at 9437184 2026-01-07T17:39:34.584852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:12] is the new logically incomplete end at 9437184 2026-01-07T17:39:34.584870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:12] at 9437184 2026-01-07T17:39:34.584892Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.584904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:34.584990Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2026-01-07T17:39:34.585006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2026-01-07T17:39:34.585050Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.585065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2026-01-07T17:39:34.585079Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2026-01-07T17:39:34.585094Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2026-01-07T17:39:34.585115Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.585132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2026-01-07T17:39:34.585144Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2026-01-07T17:39:34.585171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2026-01-07T17:39:34.585206Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.585226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2026-01-07T17:39:34.585247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2026-01-07T17:39:34.585270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2026-01-07T17:39:34.585286Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.585298Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2026-01-07T17:39:34.585310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:39:34.585322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BlockFailPoint 2026-01-07T17:39:34.585335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.585346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:39:34.585361Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:39:34.585384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2026-01-07T17:39:34.585679Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2026-01-07T17:39:34.585715Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:39:34.585767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.585854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:39:34.585882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2026-01-07T17:39:34.585904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2026-01-07T17:39:34.586027Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is DelayComplete 2026-01-07T17:39:34.586047Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2026-01-07T17:39:34.586074Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2026-01-07T17:39:34.586100Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2026-01-07T17:39:34.586124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-07T17:39:34.586154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2026-01-07T17:39:34.586172Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:12] at 9437184 has finished 2026-01-07T17:39:34.586195Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:34.586214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:34.586234Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:34.586267Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:34.598692Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2026-01-07T17:39:34.598760Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2026-01-07T17:39:34.598832Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:39:34.598879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2026-01-07T17:39:34.598945Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:34.598999Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-07T17:39:34.599267Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2026-01-07T17:39:34.599296Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2026-01-07T17:39:34.599323Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:34.599349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2026-01-07T17:39:34.599380Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:34.599400Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin >> BasicUsage::ReadMirrored [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2026-01-07T17:39:30.674262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:30.674314Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:30.674752Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:30.684336Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:30.684691Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:30.684957Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:30.694600Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:30.729262Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:30.729897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:30.731450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:30.731560Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:30.731625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:30.731946Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:30.732139Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:30.732207Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:30.799973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:30.820368Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:30.820514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:30.820595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:30.820656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:30.820687Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:30.820729Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:30.820852Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:30.820903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:30.821210Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:30.821312Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:30.821431Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:30.821470Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:30.821513Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:30.821544Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:30.821589Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:30.821623Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:30.821664Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:30.821753Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:30.821803Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:30.821861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:30.827441Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:30.827489Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:30.827564Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:30.827688Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:30.827756Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:30.827810Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:30.827864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:30.827897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:30.827920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:30.827946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:30.828166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:30.828198Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:30.828243Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:30.828264Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:30.828325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:30.828353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:30.828374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:30.828396Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:30.828411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:30.839980Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:30.840047Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:30.840072Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:30.840124Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:30.840181Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:30.840686Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:30.840740Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:30.840799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:30.840942Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:30.840970Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:30.841098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:30.841136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:30.841168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:30.841200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:30.847597Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:30.847658Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:30.847820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:30.847846Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:30.847883Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:30.847918Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:30.847942Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:30.847970Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:30.847996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 17:39:35.602612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2026-01-07T17:39:35.602631Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2026-01-07T17:39:35.602670Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:35.602684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-07T17:39:35.602715Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:35.602752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:39:35.602773Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:35.602841Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:35.602860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-07T17:39:35.602895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:35.602936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:39:35.602953Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:35.603052Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-07T17:39:35.603089Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:35.603108Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-07T17:39:35.603172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-07T17:39:35.603186Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:35.603198Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-07T17:39:35.603312Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:39:35.603330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:35.603346Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-07T17:39:35.603402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:240:2232], Recipient [1:460:2402]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-07T17:39:35.603426Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:39:35.603448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-07T17:39:35.603489Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-07T17:39:35.603526Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-07T17:39:35.603570Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:39:35.603634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:39:35.603652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:35.603670Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-07T17:39:35.603709Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:460:2402], Recipient [1:460:2402]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:35.603727Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:35.603760Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-07T17:39:35.603783Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:35.603817Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-07T17:39:35.603837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-07T17:39:35.603875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:39:35.603901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-07T17:39:35.603930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-07T17:39:35.603960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-07T17:39:35.603975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:39:35.603987Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-07T17:39:35.603999Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-07T17:39:35.604012Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-07T17:39:35.604364Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-07T17:39:35.604406Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:39:35.604441Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-07T17:39:35.604481Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-07T17:39:35.604512Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-07T17:39:35.604529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:39:35.604686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-07T17:39:35.604702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-07T17:39:35.604719Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-07T17:39:35.604736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-07T17:39:35.604761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:39:35.604777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-07T17:39:35.604791Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-07T17:39:35.604810Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:35.604825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-07T17:39:35.604854Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-07T17:39:35.604883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-07T17:39:35.617989Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:39:35.618051Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:39:35.618102Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:39:35.618165Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:39:35.618218Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:39:35.618479Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:39:35.618528Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:39:35.618591Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TxUsage::WriteToTopic_Demo_27_Query [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> DataShardTxOrder::ZigZag [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TxUsage::WriteToTopic_Demo_38_Table |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |87.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |87.5%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system |87.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedTimeoutSplit |87.5%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:27.679342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:27.766228Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:27.780053Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:27.780565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:27.780643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:28.043411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:28.043514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:28.115840Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807565274913 != 1767807565274917 2026-01-07T17:39:28.125059Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:28.171674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:28.265450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:28.535524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:28.548720Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:28.651437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:28.685663Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:28.686852Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:28.687217Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:28.687549Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:28.732464Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:28.733438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:28.733595Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:28.735778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:28.735871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:28.735944Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:28.736428Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:28.736581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:28.736709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:28.747492Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:28.775092Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:28.775268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:28.775373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:28.775412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:28.775452Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:28.775479Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:28.775656Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:28.775706Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:28.775933Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:28.776007Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:28.776103Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:28.776151Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:28.776204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:28.776240Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:28.776269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:28.776292Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:28.776324Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:28.776411Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:28.776443Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:28.776478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:28.776763Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:39:28.776804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:28.776901Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:28.777182Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:28.777272Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:28.777357Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:28.777428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:28.777469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:28.777510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:28.777557Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:28.777916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:28.777957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:39:28.778013Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:39:28.778063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:28.778130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:39:28.778166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:39:28.778208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:39:28.778244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:39:28.778265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:28.779351Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:39:28.779386Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:28.790208Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... amingQuery: 0}. Shards nodes resolved SuccessNodes# 1 FailedNodes# 0 trace_id# 2026-01-07T17:39:37.263623Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] trace_id# 2026-01-07T17:39:37.263695Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-07T17:39:37.264033Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1369:3097] 2026-01-07T17:39:37.264124Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1369:3097], channels: 1 2026-01-07T17:39:37.264235Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:39:37.264301Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1369:3097], trace_id# 2026-01-07T17:39:37.264371Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1369:3097], trace_id# 2026-01-07T17:39:37.264438Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-07T17:39:37.265294Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1369:3097] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-07T17:39:37.265367Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1369:3097], trace_id# 2026-01-07T17:39:37.265430Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1369:3097], trace_id# 2026-01-07T17:39:37.265823Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1370:3097], Recipient [2:1286:3050]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-07T17:39:37.265949Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:39:37.266007Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715665 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v4000/18446744073709551615 2026-01-07T17:39:37.266055Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-07T17:39:37.266115Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-07T17:39:37.266194Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:39:37.266231Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:39:37.266272Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:37.266311Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:37.266361Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-07T17:39:37.266400Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:39:37.266429Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:37.266457Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:39:37.266490Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:39:37.266592Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-07T17:39:37.266813Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1370:3097], 0} after executionsCount# 1 2026-01-07T17:39:37.266876Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1370:3097], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:39:37.266956Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1370:3097], 0} finished in read 2026-01-07T17:39:37.267021Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:39:37.267050Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:39:37.267077Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:37.267104Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:37.267150Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:39:37.267174Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:37.267202Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-07T17:39:37.267243Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:39:37.268196Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1369:3097] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 828 Tasks { TaskId: 1 CpuTimeUs: 228 FinishTimeMs: 1767807577267 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 101 BuildCpuTimeUs: 127 HostName: "ghrun-z2c6jslqle" NodeId: 2 StartTimeMs: 1767807577267 CreateTimeMs: 1767807577264 UpdateTimeMs: 1767807577267 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:37.268293Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1369:3097] 2026-01-07T17:39:37.268411Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:39:37.268483Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1365:3097] TxId: 281474976715672. Ctx: { TraceId: 01kecrnv275gegnabn15wrh1sr, Database: , SessionId: ydb://session/3?node_id=2&id=M2JmZWI0MDEtMTc5MGRkMWItZDcyOWQ5MTUtMmE5ZWJlMmE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000828s ReadRows: 1 ReadBytes: 8 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:39:37.268662Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1370:3097], Recipient [2:1286:3050]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:37.268715Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2026-01-07T17:39:30.359850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:30.359910Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:30.360403Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:30.373737Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:30.374126Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:30.374392Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:30.385646Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:30.440706Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:30.441380Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:30.443202Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:30.443280Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:30.443337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:30.443742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:30.443980Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:30.444043Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:30.510409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:30.547606Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:30.547810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:30.547920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:30.547978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:30.548024Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:30.548062Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:30.548221Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:30.548294Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:30.548639Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:30.548759Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:30.548911Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:30.548997Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:30.549049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:30.549091Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:30.549144Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:30.549186Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:30.549239Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:30.549338Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:30.549416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:30.549475Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:30.557710Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:30.557802Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:30.557921Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:30.558151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:30.558254Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:30.558317Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:30.558377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:30.558414Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:30.558452Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:30.558487Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:30.558810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:30.558858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:30.558909Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:30.558964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:30.559062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:30.559104Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:30.559149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:30.559195Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:30.559230Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:30.571341Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:30.571418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:30.571459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:30.571514Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:30.571578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:30.572155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:30.572215Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:30.572261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:30.572423Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:30.572467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:30.572626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:30.572679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:30.572720Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:30.572756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:30.576587Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:30.576679Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:30.576909Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:30.576979Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:30.577033Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:30.577076Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:30.577116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:30.577154Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:30.577201Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-01-07T17:39:36.959188Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-07T17:39:36.959407Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:237:2229], Recipient [2:237:2229]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:36.959440Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:36.959485Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:36.959514Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:36.959537Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:36.959580Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2026-01-07T17:39:36.959607Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2026-01-07T17:39:36.959631Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.959652Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2026-01-07T17:39:36.959674Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2026-01-07T17:39:36.959704Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2026-01-07T17:39:36.960335Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2026-01-07T17:39:36.960388Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.960419Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2026-01-07T17:39:36.960441Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-07T17:39:36.960464Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2026-01-07T17:39:36.960496Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.960515Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-07T17:39:36.960535Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:36.960556Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:39:36.960618Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437184 2026-01-07T17:39:36.960645Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437184 2026-01-07T17:39:36.960684Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437184 2026-01-07T17:39:36.960717Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.960748Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:36.960773Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2026-01-07T17:39:36.960797Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2026-01-07T17:39:36.960838Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.960858Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2026-01-07T17:39:36.960876Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2026-01-07T17:39:36.960896Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2026-01-07T17:39:36.960915Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.960968Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2026-01-07T17:39:36.960990Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2026-01-07T17:39:36.961010Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2026-01-07T17:39:36.961052Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.961073Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2026-01-07T17:39:36.961092Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2026-01-07T17:39:36.961111Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2026-01-07T17:39:36.961132Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.961151Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2026-01-07T17:39:36.961169Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:39:36.961188Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BlockFailPoint 2026-01-07T17:39:36.961211Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.961257Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:39:36.961289Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:39:36.961322Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2026-01-07T17:39:36.961686Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2026-01-07T17:39:36.961733Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:39:36.961781Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.961805Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:39:36.961825Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2026-01-07T17:39:36.961846Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2026-01-07T17:39:36.962012Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is DelayComplete 2026-01-07T17:39:36.962040Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2026-01-07T17:39:36.962080Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2026-01-07T17:39:36.962113Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2026-01-07T17:39:36.962181Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:36.962210Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2026-01-07T17:39:36.962233Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437184 has finished 2026-01-07T17:39:36.962273Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:36.962299Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:36.962324Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:36.962348Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:36.974911Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-07T17:39:36.974963Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-07T17:39:36.975010Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:36.975042Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-07T17:39:36.975105Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:36.975140Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:36.975860Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-07T17:39:36.975890Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-07T17:39:36.975916Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:39:36.975934Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-07T17:39:36.975974Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-07T17:39:36.975998Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2026-01-07T17:39:03.909819Z :PropagateSessionClosed INFO: Random seed for debugging is 1767807543909783 2026-01-07T17:39:04.190375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675587694539160:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:04.190441Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:04.219513Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675590331562433:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:04.219588Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:04.221720Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:04.229174Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:04.394825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:04.393028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:04.439877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:04.439987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:04.441471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:04.441561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:04.455916Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:04.456081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:04.458609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:04.538052Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:04.605076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001a62/r3tmp/yandexMg4Ptx.tmp 2026-01-07T17:39:04.605121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001a62/r3tmp/yandexMg4Ptx.tmp 2026-01-07T17:39:04.605310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001a62/r3tmp/yandexMg4Ptx.tmp 2026-01-07T17:39:04.605421Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:04.644524Z INFO: TTestServer started on Port 23238 GrpcPort 12097 2026-01-07T17:39:04.644893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:04.656014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:12097 2026-01-07T17:39:04.825784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:05.198617Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:05.228234Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:07.429199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675600579442328:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.429376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.429739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675600579442340:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.429786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675600579442341:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.429828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.434356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:07.436662Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675603216464635:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.436660Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675603216464646:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.436732Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.437057Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675603216464652:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.437110Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.508658Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675603216464654:2132] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:39:07.514174Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675600579442344:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:39:07.575048Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675600579442440:2971] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:07.777748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:07.781722Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675600579442457:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:07.782936Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675603216464691:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:07.783474Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MzM4YWI2MjQtZjFhYzMzNC0xN2FmMWZhNS05NzlhMDA5NQ==, ActorId: [2:7592675603216464621:2301], ActorState: ExecuteState, LegacyTraceId: 01kecrmy18628px1h680rnetqm, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:07.784331Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Y2UxNjI4NWItYzZkZDBlYy1mODYyOWQ0MS04YjNjMTlkOQ==, ActorId: [1:7592675600579442311:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrmy126xctmtcnwq2j4zsw ... UEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.206150Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.206194Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_12146822806281839738_v1 grpc read done: success# 0, data# { } 2026-01-07T17:39:35.206168Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.206213Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_12146822806281839738_v1 grpc read failed 2026-01-07T17:39:35.206189Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.206204Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.206232Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_12146822806281839738_v1 grpc closed 2026-01-07T17:39:35.206285Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_12146822806281839738_v1 is DEAD 2026-01-07T17:39:35.207876Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7592675716661331591:2510] disconnected. 2026-01-07T17:39:35.207904Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7592675716661331591:2510] disconnected; active server actors: 1 2026-01-07T17:39:35.207927Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7592675716661331591:2510] client user disconnected session shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.207986Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675716661331592:2510] disconnected. 2026-01-07T17:39:35.208004Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675716661331592:2510] disconnected; active server actors: 1 2026-01-07T17:39:35.208016Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675716661331592:2510] client user disconnected session shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.207988Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.208030Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7592675716661331601:2515] destroyed 2026-01-07T17:39:35.208070Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7592675716661331593:2510] disconnected. 2026-01-07T17:39:35.208062Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.208084Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [3:7592675716661331599:2513] destroyed 2026-01-07T17:39:35.208092Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7592675716661331593:2510] disconnected; active server actors: 1 2026-01-07T17:39:35.208106Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.208105Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7592675716661331593:2510] client user disconnected session shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.208123Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [3:7592675716661331604:2516] destroyed 2026-01-07T17:39:35.208164Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.208187Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.208202Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_12146822806281839738_v1 2026-01-07T17:39:35.231469Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.231503Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.231515Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.231531Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.231542Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.249448Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.249477Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.249488Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.249500Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.249510Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.268870Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.268897Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.268907Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.268919Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.268952Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.331844Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.331889Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.331903Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.331922Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.331934Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.349794Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.349828Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.349847Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.349866Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.349879Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.369247Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.369281Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.369294Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.369312Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.369324Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.432213Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.432249Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.432265Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.432285Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.432312Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.450087Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.450134Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.450144Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.450156Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.450164Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:39:35.469616Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:35.469660Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.469686Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:35.469713Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:35.469730Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |87.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> TInterconnectTest::TestNotifyUndelivered |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent >> TestProtocols::TestResolveProtocol >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |87.7%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> TInterconnectTest::OldFormat >> TInterconnectTest::TestSimplePingPong >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin >> TActorTracker::Basic [GOOD] |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2026-01-07T17:35:36.908767Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674696076997745:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:36.908955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:36.963932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:37.281440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:37.281556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:37.309205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:37.328599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:37.347770Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592674696076997706:2081] 1767807336907120 != 1767807336907123 2026-01-07T17:35:37.356672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:37.540256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:37.540277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:37.540285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:37.540388Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:37.602107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:37.612913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:37.627580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2026-01-07T17:35:37.632017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:37.668745Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7592674700371965843:2296] 2026-01-07T17:35:37.669051Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:37.681482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:37.681563Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:37.683231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:37.683264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:37.683298Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:37.683630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:37.683681Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:37.683714Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7592674700371965857:2296] in generation 1 2026-01-07T17:35:37.689723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:37.744645Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:37.744824Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:37.744904Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7592674700371965859:2297] 2026-01-07T17:35:37.744916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:37.761513Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:37.761568Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:37.761792Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:37.761863Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:37.761902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592674700371965837:2501], serverId# [1:7592674700371965841:2503], sessionId# [0:0:0] 2026-01-07T17:35:37.762007Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:37.762019Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:37.762034Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:37.762052Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:37.762081Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:37.762481Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:37.762586Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-07T17:35:37.764298Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:37.767319Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:37.767399Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:37.769999Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592674700371965873:2520], serverId# [1:7592674700371965875:2522], sessionId# [0:0:0] 2026-01-07T17:35:37.774017Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:37.779603Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1767807337814 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807337814 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:37.779655Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:37.779845Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:37.779869Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:37.779909Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767807337814:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:37.780208Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1767807337814:281474976710657 keys extracted: 0 2026-01-07T17:35:37.780376Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:37.780472Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:37.780514Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:35:37.783434Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:35:37.783934Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:37.786550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1767807337813 2026-01-07T17:35:37.786578Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:37.786631Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1767807337814} 2026-01-07T17:35:37.786662Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:37.786691Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:37.786709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:37.786722Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:37.786779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767807337814 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7592674700371965378:2157], exec latency: 3 ms, propose latency: 6 ms 2026-01-07T17:35:37.786811Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-07T17:35:37.786840Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 720 ... artition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:38.203624Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:38.203657Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:39:38.225059Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:38.225146Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:38.225180Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:38.225214Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:38.225246Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:39:38.226305Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2026-01-07T17:39:38.226448Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:38.226616Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:42: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2026-01-07T17:39:38.226894Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-07T17:39:38.238779Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:78: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2026-01-07T17:39:38.238869Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 } 2026-01-07T17:39:38.238926Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:38.238991Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3842: Waiting for PlanStep# 12000 from mediator time cast 2026-01-07T17:39:38.239118Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037888:1][30:903:2775] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2026-01-07T17:39:38.239360Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:628: [CdcChangeSenderMain][72075186224037888:1][30:1161:2951] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2026-01-07T17:39:38.239541Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2026-01-07T17:39:38.239741Z node 30 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 1 change records: to# [30:1161:2951], at tablet# 72075186224037888 2026-01-07T17:39:38.239805Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2026-01-07T17:39:38.239922Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:633: [CdcChangeSenderMain][72075186224037888:1][30:1161:2951] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-07T17:39:38.240030Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:111: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1244:2951] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-07T17:39:38.240321Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-07T17:39:38.240365Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-07T17:39:38.240458Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 4 requestId: cookie: 3 2026-01-07T17:39:38.240621Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-07T17:39:38.240646Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-07T17:39:38.240706Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 5 partNo : 0 messageNo: 5 size 26 offset: -1 2026-01-07T17:39:38.240811Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-07T17:39:38.240870Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:38.240898Z node 30 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:39:38.240954Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:39:38.240984Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:38.241019Z node 30 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:39:38.241162Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2026-01-07T17:39:38.241222Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:39:38.241277Z node 30 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:39:38.241312Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:39:38.241345Z node 30 :PERSQUEUE INFO: partition_write.cpp:1746: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2026-01-07T17:39:38.241444Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2026-01-07T17:39:38.251210Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 4 PartNo 0 PackedSize 107 count 1 nextOffset 5 batches 1 2026-01-07T17:39:38.251732Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 4,1 HeadOffset 4 endOffset 4 curOffset 5 d0000000000_00000000000000000004_00000_0000000001_00000? size 93 WTime 8950 2026-01-07T17:39:38.252112Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:39:38.252250Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 0 count 1 size 93 2026-01-07T17:39:38.253498Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 1 size 93 actorID [30:1123:2903] 2026-01-07T17:39:38.253635Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' size 93 2026-01-07T17:39:38.253693Z node 30 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-07T17:39:38.254576Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-07T17:39:38.254634Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-07T17:39:38.254813Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2026-01-07T17:39:38.256688Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 2 blobs, size 452 count 4 last offset 3, current partition end offset: 4 2026-01-07T17:39:38.256806Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-07T17:39:38.257008Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 359 accessed 1 times before, last time 1970-01-01T00:00:07.000000Z 2026-01-07T17:39:38.257090Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-07T17:39:38.257246Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 2 blobs are from cache. 2026-01-07T17:39:38.257517Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2026-01-07T17:39:38.257624Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:39:38.257791Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-07T17:39:38.258154Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 339 from pos 0 cbcount 3 2026-01-07T17:39:38.258315Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-07T17:39:38.259113Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |87.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table [GOOD] >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |87.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> Cdc::AddIndex [GOOD] >> Cdc::AddStream >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2026-01-07T17:35:37.058313Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592674699608082915:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:35:37.058362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:35:37.319550Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:35:37.356830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:37.357086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:37.388074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:37.454621Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:37.531826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:37.531850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:37.531864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:37.531969Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:37.567681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:37.588681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:37.619515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:35:37.621709Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7592674699608083615:2296] 2026-01-07T17:35:37.621957Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:37.637282Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:37.637373Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:37.639263Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:37.639316Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:37.639349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:37.639709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:37.639758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:37.639786Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7592674699608083629:2296] in generation 1 2026-01-07T17:35:37.641638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:37.685575Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:37.685749Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:37.685810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7592674699608083631:2297] 2026-01-07T17:35:37.685825Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:37.685853Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:37.685865Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:37.686017Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:37.686095Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:37.686150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592674699608083588:2493], serverId# [1:7592674699608083592:2496], sessionId# [0:0:0] 2026-01-07T17:35:37.686254Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:37.686278Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:37.686297Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:37.686312Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:37.686327Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:37.686612Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:37.686701Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:35:37.688693Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:37.691446Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:37.691529Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:37.693733Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592674699608083646:2517], serverId# [1:7592674699608083647:2518], sessionId# [0:0:0] 2026-01-07T17:35:37.698490Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1767807337737 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807337737 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:37.698528Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:37.698673Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:37.698690Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:37.698737Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767807337737:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:37.699375Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1767807337737:281474976715657 keys extracted: 0 2026-01-07T17:35:37.699519Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:37.699628Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:37.699746Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:37.699796Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:35:37.703402Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:35:37.703925Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:37.705599Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1767807337737} 2026-01-07T17:35:37.705642Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:37.707128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1767807337744 2026-01-07T17:35:37.707146Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:37.707174Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:37.707209Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:37.707229Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:37.707268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767807337737 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7592674699608083123:2141], exec latency: 4 ms, propose latency: 7 ms 2026-01-07T17:35:37.707305Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:37.707343Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:37.707436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1767807337744 2026-01-07T17:35:37.708812Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:7592674699608083631:2297][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2026-01-07T17:35:37.713787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17: ... topic 'Table/Stream/streamImpl' partition 0 2026-01-07T17:39:39.140212Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2026-01-07T17:39:39.140374Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-07T17:39:39.140503Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:39.140620Z node 30 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:39:39.140727Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:39:39.140814Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:39.140945Z node 30 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:39:39.141108Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2026-01-07T17:39:39.141260Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:39:39.141360Z node 30 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:39:39.141466Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:39:39.141592Z node 30 :PERSQUEUE INFO: partition_write.cpp:1746: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2026-01-07T17:39:39.141826Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2026-01-07T17:39:39.142874Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2026-01-07T17:39:39.144012Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 5 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000? size 93 WTime 8979 2026-01-07T17:39:39.144519Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:39:39.144857Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2026-01-07T17:39:39.146269Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [30:1011:2835] 2026-01-07T17:39:39.146497Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' size 93 2026-01-07T17:39:39.146606Z node 30 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:39:39.157185Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:492: [72075186224037889][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:39:39.157422Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037889][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:39:39.157594Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037889][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-07T17:39:39.157795Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037889][Partition][0][StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2026-01-07T17:39:39.158276Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:39.158615Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:39.158697Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:39.158784Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:39.158867Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:39:39.158975Z node 30 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037889][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:39:39.159128Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2026-01-07T17:39:39.159505Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1112:2877] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2026-01-07T17:39:39.159693Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037888:1][30:1045:2877] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2026-01-07T17:39:39.159937Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2026-01-07T17:39:39.160027Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2026-01-07T17:39:39.160774Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-07T17:39:39.269897Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-07T17:39:39.270047Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-07T17:39:39.270403Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 10 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2026-01-07T17:39:39.272432Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 10 added 6 blobs, size 763 count 6 last offset 5, current partition end offset: 6 2026-01-07T17:39:39.272570Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 10. Send blob request. 2026-01-07T17:39:39.272774Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 93 accessed 5 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-07T17:39:39.272866Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 1 partno 0 count 1 parts_count 0 source 1 size 174 accessed 2 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-07T17:39:39.272905Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 2 partno 0 count 1 parts_count 0 source 1 size 93 accessed 1 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-07T17:39:39.272960Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-07T17:39:39.273058Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 4 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-07T17:39:39.273112Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 5 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-07T17:39:39.273234Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 10. All 6 blobs are from cache. 2026-01-07T17:39:39.273426Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:39:39.273511Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:39:39.273555Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:39:39.273596Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:39:39.273635Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:39:39.273674Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:39:39.273839Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 6 blobs 2026-01-07T17:39:39.274354Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-07T17:39:39.274520Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 1 totakecount 1 count 1 size 154 from pos 0 cbcount 1 2026-01-07T17:39:39.274599Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 2 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-07T17:39:39.274663Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2026-01-07T17:39:39.274731Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2026-01-07T17:39:39.274796Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-07T17:39:39.275085Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> KqpUserConstraint::KqpReadNull+UploadNull >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2026-01-07T17:39:41.833995Z node 4 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [4:22:2057] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-07T17:39:42.321607Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [5:20:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-07T17:39:42.798218Z node 8 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [8:22:2057] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-07T17:39:42.800588Z node 7 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [7:20:2058] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2026-01-07T17:39:42.269528Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @207 (null) -> PendingActivation 2026-01-07T17:39:42.269602Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [5:1:2048] [node 6] ICP01 ready to work 2026-01-07T17:39:42.270298Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @207 (null) -> PendingActivation 2026-01-07T17:39:42.270339Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [6:10:2048] [node 5] ICP01 ready to work 2026-01-07T17:39:42.270586Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2026-01-07T17:39:42.271541Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:22819 2026-01-07T17:39:42.271668Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @489 PendingNodeInfo -> PendingConnection 2026-01-07T17:39:42.272002Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:747: Handshake [5:21:2058] [node 6] ICH01 starting outgoing handshake 2026-01-07T17:39:42.272139Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-07T17:39:42.272720Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:751: Handshake [5:21:2058] [node 6] ICH05 connected to peer 2026-01-07T17:39:42.273129Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:48178 2026-01-07T17:39:42.273494Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:23:2058] [node 0] ICH02 starting incoming handshake 2026-01-07T17:39:42.274693Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 226451 ProgramStartTime: 3958556288298 Serial: 132868728 ReceiverNodeId: 6 SenderActorId: "[5:132868728:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 226451" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 226451" AcceptUUID: "Cluster for process with id: 226451" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\235\272\337\217F\210C\221-;d\313D\364vcf7c\347\006\310t/\037\332N\373\344\027\343\364" RequestXxhash: true RequestXdcShuffle: true 2026-01-07T17:39:42.275305Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [6:23:2058] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 226451 ProgramStartTime: 3958556288298 Serial: 132868728 ReceiverNodeId: 6 SenderActorId: "[5:132868728:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 226451" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 226451" AcceptUUID: "Cluster for process with id: 226451" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\235\272\337\217F\210C\221-;d\313D\364vcf7c\347\006\310t/\037\332N\373\344\027\343\364" RequestXxhash: true RequestXdcShuffle: true 2026-01-07T17:39:42.275396Z node 6 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [6:23:2058] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-07T17:39:42.275801Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2026-01-07T17:39:42.277206Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [6:10:2048] [node 5] ICP02 configured for host ::1:28599 2026-01-07T17:39:42.277278Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:263: Proxy [6:10:2048] [node 5] ICP17 incoming handshake (actor [6:23:2058]) 2026-01-07T17:39:42.277349Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @489 PendingNodeInfo -> PendingConnection 2026-01-07T17:39:42.277412Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:211: Proxy [6:10:2048] [node 5] ICP07 issued incoming handshake reply 2026-01-07T17:39:42.277475Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:220: Proxy [6:10:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2026-01-07T17:39:42.277524Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @221 PendingConnection -> PendingConnection 2026-01-07T17:39:42.278037Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [6:23:2058] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 226451 ProgramStartTime: 3958567003716 Serial: 2895900480 SenderActorId: "[6:2895900480:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 226451" AcceptUUID: "Cluster for process with id: 226451" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2026-01-07T17:39:42.278745Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 226451 ProgramStartTime: 3958567003716 Serial: 2895900480 SenderActorId: "[6:2895900480:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 226451" AcceptUUID: "Cluster for process with id: 226451" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2026-01-07T17:39:42.278826Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [5:21:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-07T17:39:42.278978Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-07T17:39:42.279820Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\235\272\337\217F\210C\221-;d\313D\364vcf7c\347\006\310t/\037\332N\373\344\027\343\364" 2026-01-07T17:39:42.279942Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [5:21:2058] [node 6] ICH04 handshake succeeded 2026-01-07T17:39:42.280277Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:339: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2026-01-07T17:39:42.280335Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:21:2058] poison: false 2026-01-07T17:39:42.280388Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @351 PendingConnection -> StateWork 2026-01-07T17:39:42.280626Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:378: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:25:2048] 2026-01-07T17:39:42.280696Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:21:2058] self: [5:132868728:0] peer: [6:2895900480:0] socket: 24 qp: -1 2026-01-07T17:39:42.280765Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [5:25:2048] [node 6] ICS10 traffic start 2026-01-07T17:39:42.280860Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2026-01-07T17:39:42.280946Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-01-07T17:39:42.280993Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2026-01-07T17:39:42.281053Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-01-07T17:39:42.281128Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:241: Session [5:25:2048] [node 6] ICS04 subscribe for session state for [5:19:2057] 2026-01-07T17:39:42.281225Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:48188 2026-01-07T17:39:42.281383Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [5:26:2048] [node 6] ICIS01 InputSession created 2026-01-07T17:39:42.281808Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:27:2059] [node 0] ICH02 starting incoming handshake 2026-01-07T17:39:42.282769Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-01-07T17:39:42.282888Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.283211Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [6:23:2058] [node 5] ICH04 handshake succeeded 2026-01-07T17:39:42.283449Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:335: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2026-01-07T17:39:42.283504Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:23:2058] poison: false 2026-01-07T17:39:42.283565Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @351 PendingConnection -> StateWork 2026-01-07T17:39:42.283688Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:378: Proxy [6:10:2048] [node 5] ICP22 created new session: [6:28:2048] 2026-01-07T17:39:42.283736Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:23:2058] self: [6:2895900480:0] peer: [5:132868728:0] socket: 25 qp: -1 2026-01-07T17:39:42.283777Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [6:28:2048] [node 5] ICS10 traffic start 2026-01-07T17:39:42.283858Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2026-01-07T17:39:42.283919Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2026-01-07T17:39:42.283960Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2026-01-07T17:39:42.283998Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2026-01-07T17:39:42.284049Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-01-07T17:39:42.284110Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.284223Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [6:29:2048] [node 5] ICIS01 InputSession created 2026-01-07T17:39:42.284268Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-01-07T17:39:42.284298Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.284336Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:29:2048] [node 5] ICIS02 ReceiveData called 2026-01-07T17:39:42.284414Z node 6 :I ... cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-07T17:39:42.294048Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:31:2059] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\361\246\212\277?#\352\201\247%*\010\000Z\364\001\315Z\3635\375\264\266\004T\030\320@\307\331\313R" 2026-01-07T17:39:42.294139Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [5:31:2059] [node 6] ICH04 handshake succeeded 2026-01-07T17:39:42.294406Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:339: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2026-01-07T17:39:42.294472Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:34:2060] poison: true 2026-01-07T17:39:42.294540Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:31:2059] poison: false 2026-01-07T17:39:42.294591Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @351 StateWork -> StateWork 2026-01-07T17:39:42.294656Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:31:2059] self: [5:132868728:0] peer: [6:2895900480:0] socket: 29 qp: -1 2026-01-07T17:39:42.294721Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [5:25:2048] [node 6] ICS10 traffic start 2026-01-07T17:39:42.294804Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2026-01-07T17:39:42.294861Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-07T17:39:42.294907Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2026-01-07T17:39:42.294974Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:961: Session [5:25:2048] [node 6] ICS22 outgoing packet Serial# 2 Confirm# 1 DataSize# 84 RdmaPayload# 0 InflightDataAmount# 84 RdmaInflightDataAmount# 0 2026-01-07T17:39:42.295057Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-07T17:39:42.295131Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:48202 2026-01-07T17:39:42.295547Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:38:2062] [node 0] ICH02 starting incoming handshake 2026-01-07T17:39:42.296228Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [5:37:2048] [node 6] ICIS01 InputSession created 2026-01-07T17:39:42.297613Z node 6 :INTERCONNECT NOTICE: interconnect_tcp_proxy.cpp:409: Proxy [6:10:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:28599) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:35:2061] held: no 2026-01-07T17:39:42.297681Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [6:10:2048] [node 5] ICP052 dropped outgoing handshake: [6:30:2060] poison: false 2026-01-07T17:39:42.297740Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:432: Proxy [6:10:2048] [node 5] ICP28 other handshake is still going on 2026-01-07T17:39:42.297797Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-01-07T17:39:42.297878Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.298208Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [6:35:2061] [node 5] ICH04 handshake succeeded 2026-01-07T17:39:42.298417Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:335: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2026-01-07T17:39:42.298484Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:35:2061] poison: false 2026-01-07T17:39:42.298534Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @351 StateWork -> StateWork 2026-01-07T17:39:42.298595Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:35:2061] self: [6:2895900480:0] peer: [5:132868728:0] socket: 31 qp: -1 2026-01-07T17:39:42.298642Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [6:28:2048] [node 5] ICS10 traffic start 2026-01-07T17:39:42.298726Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2026-01-07T17:39:42.298781Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-07T17:39:42.298825Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:62: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2026-01-07T17:39:42.298904Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1027: Session [6:28:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2026-01-07T17:39:42.298974Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2026-01-07T17:39:42.299010Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-07T17:39:42.299060Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-01-07T17:39:42.299116Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.299226Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [6:39:2048] [node 5] ICIS01 InputSession created 2026-01-07T17:39:42.299368Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-07T17:39:42.299427Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2026-01-07T17:39:42.299522Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.299552Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.299590Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-01-07T17:39:42.299634Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.299830Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-07T17:39:42.299882Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-07T17:39:42.299925Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-07T17:39:42.299978Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.300079Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-07T17:39:42.300139Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.300190Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-07T17:39:42.300222Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-07T17:39:42.300255Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-07T17:39:42.300278Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-07T17:39:42.300322Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:164: Session [6:28:2048] [node 5] ICS02 send event from: [6:20:2057] to: [5:19:2057] 2026-01-07T17:39:42.300407Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:961: Session [6:28:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 RdmaPayload# 0 InflightDataAmount# 84 RdmaInflightDataAmount# 0 2026-01-07T17:39:42.300512Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-07T17:39:42.300570Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-01-07T17:39:42.300609Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2026-01-07T17:39:42.300716Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-07T17:39:42.300772Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-07T17:39:42.300814Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-07T17:39:42.300867Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2026-01-07T17:39:42.300941Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:62: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2026-01-07T17:39:42.301044Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1027: Session [5:25:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2026-01-07T17:39:42.301097Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2026-01-07T17:39:42.301190Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:105: Session [5:25:2048] [node 6] ICS01 socket: 29 reason# 2026-01-07T17:39:42.301239Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:543: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:25:2048] VirtualId# [5:132868728:0] 2026-01-07T17:39:42.301291Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @207 StateWork -> PendingActivation 2026-01-07T17:39:42.301335Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:571: Session [5:25:2048] [node 6] ICS25 shutdown socket, reason# 2026-01-07T17:39:42.301482Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:461: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:32.730434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:32.837542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:32.857193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:32.857681Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:32.857735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:33.097589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:33.097725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:33.165533Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807570149604 != 1767807570149608 2026-01-07T17:39:33.176081Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:33.219424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:33.316547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:33.589101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:33.608273Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:33.716362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:33.762486Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:33.763277Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:33.763525Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:33.763836Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:33.799449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:33.800006Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:33.800102Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:33.801905Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:33.802001Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:33.802063Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:33.802469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:33.802633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:33.802727Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:33.813600Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:33.850133Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:33.850333Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:33.850456Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:33.850512Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:33.850568Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:33.850605Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:33.850871Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:33.850969Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:33.851338Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:33.851445Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:33.851596Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:33.851641Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:33.851694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:33.851739Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:33.851788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:33.851819Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:33.851864Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:33.851992Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:33.852044Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:33.852094Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:33.852523Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:39:33.852609Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:33.852731Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:33.853024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:33.853123Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:33.853211Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:33.853279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:33.853315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:33.853371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:33.853408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:33.853731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:33.853784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:39:33.853835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:39:33.853871Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:33.853935Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:39:33.853969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:39:33.854007Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:39:33.854039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:39:33.854063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:33.855586Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:39:33.855632Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:33.866450Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... mEtMWI5ODg4NDEtZTE1MzRmYjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2026-01-07T17:39:42.155739Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:39:42.157350Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:39:42.157731Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1087:2905] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 172230 Tasks { TaskId: 1 CpuTimeUs: 170096 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 14 BuildCpuTimeUs: 170082 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-z2c6jslqle" NodeId: 2 CreateTimeMs: 1767807581527 CurrentWaitInputTimeUs: 127181 UpdateTimeMs: 1767807582155 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:42.157794Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1087:2905] 2026-01-07T17:39:42.157873Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1210} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-07T17:39:42.157949Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1203} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1088:2906] TaskId# 2 trace_id# 2026-01-07T17:39:42.158051Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1203} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1089:2907] TaskId# 3 trace_id# 2026-01-07T17:39:42.158138Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:39:42.158264Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:04.000000Z, after 0.550000s 2026-01-07T17:39:42.158316Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1088:2906], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrnyxgdcg18091f8mt5n8v. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646735 2026-01-07T17:39:42.158393Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1088:2906], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrnyxgdcg18091f8mt5n8v. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Handle abort execution event from: [2:1082:2878], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-07T17:39:42.158537Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:39:42.159905Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:39:42.160038Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1089:2907], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kecrnyxgdcg18091f8mt5n8v. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-07T17:39:42.160100Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1089:2907], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kecrnyxgdcg18091f8mt5n8v. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1082:2878], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-07T17:39:42.160205Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:39:42.162754Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:39:42.162950Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1090:2908];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1087:2905];info={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:39:42.163041Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1090:2908];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:39:42.163493Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, ActorId: [2:1055:2878], ActorState: ExecuteState, LegacyTraceId: 01kecrnyxgdcg18091f8mt5n8v, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-07T17:39:42.163896Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T17:39:42.163943Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:68:2115] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-01-07T17:39:42.164288Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-01-07T17:39:42.164350Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-07T17:39:42.164379Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 0 ProcessProposeTransaction 2026-01-07T17:39:42.164445Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1123:2939] SnapshotReq marker# P0 2026-01-07T17:39:42.164911Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1126:2939] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-07T17:39:42.165052Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2026-01-07T17:39:42.165126Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1126:2939] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-07T17:39:42.165254Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-07T17:39:42.165318Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:1122:2878] TxId: 281474976715664. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:39:42.165410Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:1122:2878] TxId: 281474976715664. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:39:42.165479Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1122:2878] TxId: 281474976715664. Ctx: { TraceId: 01kecrnyxgdcg18091f8mt5n8v, Database: , SessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:39:42.165700Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:830:2725], selfId: [2:66:2113], source: [2:1055:2878] 2026-01-07T17:39:42.165786Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1123:2939] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2026-01-07T17:39:42.166066Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1123:2939], Recipient [2:887:2766]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-07T17:39:42.166722Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=OWU0OGRiOWQtYzUxNWRhYmEtMWI5ODg4NDEtZTE1MzRmYjc=, workerId: [2:1055:2878], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 184 >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query [GOOD] |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:34.975435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:35.064739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:35.078966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:35.079336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:35.079403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:35.325585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:35.325685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:35.383762Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807572656916 != 1767807572656920 2026-01-07T17:39:35.391339Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:35.432888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:35.526812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:35.807274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:35.820480Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:35.923139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:35.958108Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:35.959118Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:35.959397Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2768] 2026-01-07T17:39:35.959649Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:35.968177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:36.002021Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:36.002360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:36.002592Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-07T17:39:36.002774Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:36.010098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:36.010729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:36.010842Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:36.012305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:36.012389Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:36.012447Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:36.012816Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:36.012974Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:36.013059Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2768] in generation 1 2026-01-07T17:39:36.013477Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:36.013553Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:36.014715Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:39:36.014773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:39:36.014820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:39:36.015066Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:36.015141Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:36.015188Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-07T17:39:36.025934Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:36.051020Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:36.051168Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:36.051259Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-07T17:39:36.051300Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:36.051334Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:36.051362Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:36.051435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:36.051459Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:39:36.051515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:36.051541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-07T17:39:36.051553Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:39:36.051565Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:39:36.051577Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:39:36.051678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:891:2768], Recipient [1:891:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:36.051733Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:36.051784Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:894:2770], Recipient [1:894:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:36.051798Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:36.052053Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:36.052108Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:36.052133Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:39:36.052162Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:39:36.052253Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:36.052277Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:36.052338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:36.052363Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:36.052387Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:36.052406Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:36.052432Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:36.052462Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:39:36.052476Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:36.052495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2026-01-07T17:39:36.052522Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2026-01-07T17:39:36.052535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 202 ... riteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-07T17:39:37.047205Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=1 2026-01-07T17:39:37.047263Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T17:39:37.047335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:37.047357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T17:39:37.047389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T17:39:37.047418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:37.047490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:37.047524Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T17:39:37.047567Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:37.047597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:37.047630Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:39:37.047650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:37.047683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-07T17:39:37.061535Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:39:37.061604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:37.061694Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T17:39:37.061787Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:37.062304Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [1:1163:2945] TxId: 281474976715662. Ctx: { TraceId: 01kecrntwt532j040fv9tqm3zc, Database: , SessionId: ydb://session/3?node_id=1&id=Yzg0NDY2NjQtODlhYjNiMjUtNmMwNDM5MmMtZDNiYTQwM2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:39:37.062368Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1509} ActorId: [1:1163:2945] TxId: 281474976715662. Ctx: { TraceId: 01kecrntwt532j040fv9tqm3zc, Database: , SessionId: ydb://session/3?node_id=1&id=Yzg0NDY2NjQtODlhYjNiMjUtNmMwNDM5MmMtZDNiYTQwM2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# ... waiting for SysViewsRoster update finished 2026-01-07T17:39:40.311919Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:40.344865Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:40.349768Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:40.350146Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:40.350318Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:40.594479Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:40.594583Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:40.617650Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807577513169 != 1767807577513173 2026-01-07T17:39:40.621627Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:40.666503Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:40.733663Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:41.005618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:41.019087Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:41.122164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:41.379474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:41.595613Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1026:2867], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.595708Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1036:2872], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.595759Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.596596Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1042:2877], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.596727Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.599780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:41.783426Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1040:2875], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:39:41.819011Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1099:2915] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for readsets 2026-01-07T17:39:42.583203Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:1181:2954], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1159:2954]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-07T17:39:42.592653Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:1173:2954], SessionActorId: [2:1159:2954], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:1159:2954]. 2026-01-07T17:39:42.592917Z node 2 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976715664, task: 1, CA Id [2:1207:2992]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-07T17:39:42.593148Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=YTMyMzY5YzUtMzgzNjgzNS1hYTE5ZDU4ZC1hZGIzMDMzOQ==, ActorId: [2:1159:2954], ActorState: ExecuteState, LegacyTraceId: 01kecrnzrx47x8v00a4h1cc2q5, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:1174:2954] from: [2:1173:2954] trace_id# 2026-01-07T17:39:42.593858Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:1174:2954] TxId: 281474976715663. Ctx: { TraceId: 01kecrnzrx47x8v00a4h1cc2q5, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YTMyMzY5YzUtMzgzNjgzNS1hYTE5ZDU4ZC1hZGIzMDMzOQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } trace_id# 2026-01-07T17:39:42.594343Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YTMyMzY5YzUtMzgzNjgzNS1hYTE5ZDU4ZC1hZGIzMDMzOQ==, ActorId: [2:1159:2954], ActorState: ExecuteState, LegacyTraceId: 01kecrnzrx47x8v00a4h1cc2q5, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888." issue_code: 2026 severity: 1 } trace_id# { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> TestProtocols::TestHTTPRequest [GOOD] |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> LocalPartition::WithoutPartitionWithSplit >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:33.809454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:33.903707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:33.924209Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:33.924743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:33.924808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:34.172666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:34.172784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:34.243048Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807571331860 != 1767807571331864 2026-01-07T17:39:34.253786Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:34.298584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:34.394053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:34.656752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:34.669848Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:34.772628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:35.041875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:35.250485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.250576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.250645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.251353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.251486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.254415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:35.397197Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:39:35.459202Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-07T17:39:37.068227Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MzljNjFmODQtNmI5ZDEyYTYtNzZhY2Y0MjUtM2RkMGU1YzQ=, ActorId: [1:1165:2960], ActorState: ExecuteState, LegacyTraceId: 01kecrntxgacf1y6p5jryeg0pc, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# ... waiting for SysViewsRoster update finished 2026-01-07T17:39:40.494974Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:40.501428Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:40.505749Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:40.506069Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:40.506272Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:40.766642Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:40.766744Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:40.788959Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807577512655 != 1767807577512659 2026-01-07T17:39:40.792831Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:40.838262Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:40.907452Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:41.230377Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:41.243498Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:41.345920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:41.571561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:41.786971Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.787070Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.787406Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.788048Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.788158Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.791405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:41.975793Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:39:42.010514Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1097:2913] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-07T17:39:43.427251Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ODE1YTZlOGQtNTk2NzMyYzAtM2Y1YTFkOTAtZTZkNTc2NmE=, ActorId: [2:1160:2963], ActorState: ExecuteState, LegacyTraceId: 01kecrp13t9jk10mky5pma8g7k, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardWrite::UpsertWithDefaults ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2026-01-07T17:39:25.762781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:25.762833Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:25.763264Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:25.775190Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:25.775514Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:25.775759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:25.785766Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:25.828972Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:25.829593Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:25.831442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:25.831510Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:25.831562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:25.831900Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:25.832111Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:25.832165Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:25.904295Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:25.935336Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:25.935476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:25.935546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:25.935583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:25.935609Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:25.935636Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:25.935747Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:25.935791Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:25.936011Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:25.936078Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:25.936168Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:25.936195Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:25.936226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:25.936248Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:25.936280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:25.936302Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:25.936334Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:25.936389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:25.936416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:25.936450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:25.942140Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:25.942188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:25.942258Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:25.942383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:25.942435Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:25.942474Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:25.942508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:25.942535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:25.942583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:25.942615Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:25.942844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:25.942872Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:25.942909Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:25.942948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:25.943019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:25.943046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:25.943074Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:25.943105Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:25.943131Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:25.954992Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:25.955063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:25.955086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:25.955123Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:25.955170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:25.955513Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:25.955553Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:25.955587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:25.955698Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:25.955725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:25.955827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:25.955853Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:25.955879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:25.955905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:25.958730Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:25.958782Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:25.958931Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:25.958956Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:25.958995Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:25.959025Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:25.959046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:25.959075Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:25.959107Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-01-07T17:39:44.422235Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-07T17:39:44.422453Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [6:239:2231], Recipient [6:239:2231]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:44.422496Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:44.422537Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:44.422568Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:44.422622Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:44.422654Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2026-01-07T17:39:44.422680Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2026-01-07T17:39:44.422706Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.422726Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2026-01-07T17:39:44.422747Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2026-01-07T17:39:44.422768Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2026-01-07T17:39:44.423527Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2026-01-07T17:39:44.423588Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.423615Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2026-01-07T17:39:44.423657Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-07T17:39:44.423688Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2026-01-07T17:39:44.423728Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.423752Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-07T17:39:44.423778Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:44.423804Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:39:44.423852Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437184 2026-01-07T17:39:44.423884Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437184 2026-01-07T17:39:44.423916Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437184 2026-01-07T17:39:44.423962Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.423985Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:44.424011Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2026-01-07T17:39:44.424040Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2026-01-07T17:39:44.424089Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.424126Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2026-01-07T17:39:44.424154Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2026-01-07T17:39:44.424197Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2026-01-07T17:39:44.424227Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.424249Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2026-01-07T17:39:44.424272Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2026-01-07T17:39:44.424295Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2026-01-07T17:39:44.424327Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.424348Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2026-01-07T17:39:44.424371Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2026-01-07T17:39:44.424396Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2026-01-07T17:39:44.424419Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.424442Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2026-01-07T17:39:44.424478Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:39:44.424505Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BlockFailPoint 2026-01-07T17:39:44.424543Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.424566Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:39:44.424591Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:39:44.424615Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2026-01-07T17:39:44.424991Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2026-01-07T17:39:44.425064Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:39:44.425122Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.425148Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:39:44.425171Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2026-01-07T17:39:44.425194Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2026-01-07T17:39:44.425351Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is DelayComplete 2026-01-07T17:39:44.425372Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2026-01-07T17:39:44.425396Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2026-01-07T17:39:44.425421Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2026-01-07T17:39:44.425447Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-07T17:39:44.425465Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2026-01-07T17:39:44.425484Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437184 has finished 2026-01-07T17:39:44.425510Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:44.425531Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:44.425561Z node 6 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:44.425589Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:44.439199Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-07T17:39:44.439285Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-07T17:39:44.439385Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:44.439447Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-07T17:39:44.439530Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:44.439592Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:44.439965Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-07T17:39:44.440012Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-07T17:39:44.440077Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:39:44.440108Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-07T17:39:44.440155Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:39:44.440190Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous >> TFlatTest::SplitEmptyToMany >> TFlatTest::RejectByPerShardReadSize >> TLocksTest::SetLockFail >> TLocksFatTest::RangeSetBreak >> KqpWorkloadServiceActors::TestPoolFetcher >> TLocksTest::Range_BrokenLockMax >> TObjectStorageListingTest::Listing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:50.881943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:50.882028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:50.882085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:50.882128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:50.882170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:50.882200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:50.882266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:50.882343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:50.883192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:50.883467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:51.004084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:51.004160Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:51.005035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:51.015926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:51.016665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:51.016840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:51.030023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:51.030382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:51.031120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:51.031455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:51.035547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:51.035724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:51.036724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:51.036782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:51.036943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:51.037005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:51.037048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:51.037228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:51.043745Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:51.172766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:51.173030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:51.173250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:51.173311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:51.173550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:51.173607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:51.177950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:51.178157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:51.178378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:51.178461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:51.178507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:51.178553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:51.181709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:51.181778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:51.181828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:51.183901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:51.183984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:51.184058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:51.184113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:51.187812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:51.190173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:51.190377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:51.191546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:51.191697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:51.191747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:51.192002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:51.192054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:51.192215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:51.192323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:51.194832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-07T17:39:43.362717Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-07T17:39:43.362750Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-07T17:39:43.362780Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-07T17:39:43.362807Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-07T17:39:43.362834Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-07T17:39:43.364295Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:43.364392Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:43.364432Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:39:43.364468Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-07T17:39:43.364511Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-07T17:39:43.366124Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:43.366216Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:43.366257Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:39:43.366293Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-07T17:39:43.366332Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-07T17:39:43.367292Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:43.367377Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:43.367411Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:39:43.367443Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-07T17:39:43.367479Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-07T17:39:43.369281Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:43.369371Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:43.369430Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:39:43.369465Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-07T17:39:43.369504Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-07T17:39:43.369580Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-07T17:39:43.370705Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:39:43.372633Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:39:43.372758Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:39:43.374340Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-07T17:39:43.375855Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-07T17:39:43.375900Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-07T17:39:43.377785Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-07T17:39:43.377879Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-07T17:39:43.377918Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2694:4682] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-07T17:39:43.379310Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-07T17:39:43.379350Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-07T17:39:43.379431Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-07T17:39:43.379459Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-07T17:39:43.379519Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-07T17:39:43.379546Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-07T17:39:43.379604Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-07T17:39:43.379634Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-07T17:39:43.379696Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-07T17:39:43.379724Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-07T17:39:43.381723Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-07T17:39:43.381873Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-07T17:39:43.381912Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2697:4685] 2026-01-07T17:39:43.382392Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-07T17:39:43.382567Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-07T17:39:43.382640Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-07T17:39:43.382675Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2697:4685] 2026-01-07T17:39:43.382825Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-07T17:39:43.382894Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-07T17:39:43.382926Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2697:4685] 2026-01-07T17:39:43.383096Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-07T17:39:43.383222Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-07T17:39:43.383263Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2697:4685] 2026-01-07T17:39:43.383418Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-07T17:39:43.383450Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2697:4685] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> KqpWorkloadService::TestQueueSizeSimple |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer [GOOD] >> BasicUsage::ConflictingWrites >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin >> DefaultPoolSettings::TestResourcePoolsSysViewFilters >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:40.361875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:40.483419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:40.493864Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:40.495003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:40.495451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:40.495649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:40.497819Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:40.498162Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:40.498381Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:40.498543Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:40.879397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:40.982597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:40.982754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:40.983540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:40.983675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:41.028086Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:41.028732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:41.029043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:41.119826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:41.211732Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:41.807230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:41.876025Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:41.876136Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:42.198332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:42.252054Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:1597:2386], Recipient [2:1623:2399]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:42.255581Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:1597:2386], Recipient [2:1623:2399]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:42.255844Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1623:2399] 2026-01-07T17:39:42.256042Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:42.296797Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:1597:2386], Recipient [2:1623:2399]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:42.301505Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:42.301597Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:42.302894Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:42.302947Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:42.302997Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:42.303272Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:42.303396Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:42.303444Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1647:2399] in generation 1 2026-01-07T17:39:42.305983Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:42.331869Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:42.331998Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:42.332073Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1651:2416] 2026-01-07T17:39:42.332109Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:42.332133Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:42.332158Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:42.332385Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1623:2399], Recipient [2:1623:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:42.332421Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:42.332653Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:42.332728Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:42.332863Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:42.332891Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:42.332959Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:42.332993Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:42.333016Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:42.333046Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:42.333082Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:42.333150Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1620:2397], Recipient [2:1623:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:42.333187Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:42.333219Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1606:3056], serverId# [2:1620:2397], sessionId# [0:0:0] 2026-01-07T17:39:42.333506Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:827:2464], Recipient [2:1620:2397] 2026-01-07T17:39:42.333533Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:42.333598Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:42.333743Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:42.333782Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:42.333841Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-07T17:39:42.333887Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:42.333915Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:42.333941Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:42.333973Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:42.334180Z node 2 :TX_DATASHARD TRACE: datashard_pipelin ... iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:39:45.109693Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:39:45.110476Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2007:2455], Recipient [2:1623:2399]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:45.110527Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-07T17:39:45.265604Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:2023:2456], Recipient [2:1623:2399]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976710662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-07T17:39:45.266137Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:39:45.266220Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-07T17:39:45.266313Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:39:45.266355Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:39:45.266391Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:45.266426Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:45.266476Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-07T17:39:45.266517Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:39:45.266547Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:45.266571Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:39:45.266594Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:39:45.266716Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976710662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-07T17:39:45.267142Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:39:45.267202Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-07T17:39:45.267250Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:2023:2456], 0} after executionsCount# 1 2026-01-07T17:39:45.267295Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:2023:2456], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:39:45.267371Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:2023:2456], 0} finished in read 2026-01-07T17:39:45.267434Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:39:45.267461Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:39:45.267485Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:45.267513Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:45.267561Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:39:45.267582Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:45.267606Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-07T17:39:45.267643Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:39:45.267743Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:39:45.268781Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2023:2456], Recipient [2:1623:2399]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:45.268846Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2026-01-07T17:39:45.466919Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:2045:2457], Recipient [2:1623:2399]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976710666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-07T17:39:45.467271Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:39:45.467353Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2026-01-07T17:39:45.467462Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-07T17:39:45.467505Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:39:45.467549Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:45.467593Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:45.467639Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 72075186224037888 2026-01-07T17:39:45.467680Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-07T17:39:45.467706Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:45.467732Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:39:45.467756Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:39:45.467886Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976710666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-07T17:39:45.468171Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:39:45.468234Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-07T17:39:45.468282Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:2045:2457], 0} after executionsCount# 1 2026-01-07T17:39:45.468327Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:2045:2457], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:39:45.468407Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:2045:2457], 0} finished in read 2026-01-07T17:39:45.468493Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-07T17:39:45.468536Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:39:45.468569Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:45.468600Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:45.468648Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-07T17:39:45.468692Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:45.468740Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 72075186224037888 has finished 2026-01-07T17:39:45.468786Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:39:45.468882Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:39:45.469788Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2045:2457], Recipient [2:1623:2399]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:39:45.469840Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-07T17:39:45.470706Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:261:2140], Recipient [2:1623:2399]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query [GOOD] >> DataShardWrite::UpsertWithDefaults [GOOD] >> DataShardWrite::WriteImmediateBadRequest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> EraseRowsTests::EraseRowsShouldSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:49.124262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:49.124359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:49.124407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:49.124441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:49.124485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:49.124509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:49.124594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:49.124660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:49.125524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:49.125826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:49.232575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:49.232649Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:49.233532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:49.244447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:49.245050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:49.245240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:49.254728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:49.254998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:49.255568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.255901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:49.260213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:49.260387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:49.261427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:49.261489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:49.261631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:49.261676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:49.261729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:49.261882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.268183Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:49.398124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:49.398391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.398656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:49.398721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:49.398975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:49.399058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:49.403164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.403384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:49.403590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.403655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:49.403694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:49.403725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:49.405776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.405847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:49.405933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:49.407866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.407918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.407977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.408028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:49.410812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:49.412857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:49.413100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:49.414297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.414457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:49.414505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.414780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:49.414836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.414997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:49.415092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:49.417450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-07T17:39:46.471631Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-07T17:39:46.471665Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-07T17:39:46.471698Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-07T17:39:46.471724Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-07T17:39:46.471751Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-01-07T17:39:46.473414Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:46.473529Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:46.473570Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:39:46.473608Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-07T17:39:46.473655Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-07T17:39:46.475622Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:46.475735Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:46.475776Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:39:46.475814Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-07T17:39:46.475860Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-07T17:39:46.476996Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:46.477097Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:46.477135Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:39:46.477170Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-07T17:39:46.477209Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-07T17:39:46.479373Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:46.479467Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:39:46.479503Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:39:46.479537Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-07T17:39:46.479574Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-07T17:39:46.479651Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-07T17:39:46.480800Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:39:46.487895Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:39:46.488085Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:39:46.490196Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-07T17:39:46.492026Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-07T17:39:46.492077Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-07T17:39:46.494145Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-07T17:39:46.494263Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-07T17:39:46.494308Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2694:4682] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-07T17:39:46.495809Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-07T17:39:46.495857Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-07T17:39:46.495938Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-07T17:39:46.495965Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-07T17:39:46.496025Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-07T17:39:46.496050Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-07T17:39:46.496106Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-07T17:39:46.496132Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-07T17:39:46.496190Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-07T17:39:46.496216Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-07T17:39:46.499231Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-07T17:39:46.499405Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-07T17:39:46.499453Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2697:4685] 2026-01-07T17:39:46.499989Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-07T17:39:46.500175Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-07T17:39:46.500256Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-07T17:39:46.500290Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2697:4685] 2026-01-07T17:39:46.500469Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-07T17:39:46.500569Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-07T17:39:46.500602Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2697:4685] 2026-01-07T17:39:46.500750Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-07T17:39:46.500838Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-07T17:39:46.500872Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2697:4685] 2026-01-07T17:39:46.501055Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-07T17:39:46.501089Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2697:4685] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:46.558953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:46.672953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:46.690889Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:46.691419Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:46.691490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:46.975917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:46.976057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:47.049167Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807583867292 != 1767807583867296 2026-01-07T17:39:47.060425Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:47.108597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:47.202442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:47.523499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:47.545577Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:47.688754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:47.773085Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.008060s 2026-01-07T17:39:48.429495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1070:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:48.429630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1081:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:48.429721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:48.430738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:48.430993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:48.451761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:48.624591Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1084:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:39:48.709883Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1141:2943] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:49.242705Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:1172:2964], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrp62b9vrk9v7m25e2n6y8. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OGUyNmIzYzktNDhlNTg2ZDUtZDJlN2U2NDQtYTM0ZDdhODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2026-01-07T17:39:49.245567Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:1172:2964], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrp62b9vrk9v7m25e2n6y8. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OGUyNmIzYzktNDhlNTg2ZDUtZDJlN2U2NDQtYTM0ZDdhODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2026-01-07T17:39:49.249988Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:1173:2965], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrp62b9vrk9v7m25e2n6y8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OGUyNmIzYzktNDhlNTg2ZDUtZDJlN2U2NDQtYTM0ZDdhODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2026-01-07T17:39:49.259228Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OGUyNmIzYzktNDhlNTg2ZDUtZDJlN2U2NDQtYTM0ZDdhODY=, ActorId: [1:1068:2895], ActorState: ExecuteState, LegacyTraceId: 01kecrp62b9vrk9v7m25e2n6y8, Create QueryResponse for error on request, msg: status# INTERNAL_ERROR issues# { message: "Read from column index 1: got NULL from NOT NULL column" issue_code: 2012 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } trace_id# |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin >> TxUsage::WriteToTopic_Demo_47_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:38:31.406338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:38:31.406449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:38:31.406501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:38:31.406535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:38:31.406595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:38:31.406630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:38:31.406700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:38:31.406802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:38:31.407757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:38:31.408083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:38:31.502672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:38:31.502733Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:31.530950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:38:31.531318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:38:31.531500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:38:31.569790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:38:31.570294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:38:31.571093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:38:31.571424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:38:31.593859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:31.594092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:38:31.595368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:38:31.595450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:31.595593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:38:31.595652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:38:31.595756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:38:31.595969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:38:31.744963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.745998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.746949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.747027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:31.747111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :403: Unit PlanQueue has no ready operations at 72075186233409546 2026-01-07T17:39:50.373863Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:736:2717]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:39:50.374061Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2026-01-07T17:39:50.374500Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:736:2717], Recipient [3:129:2153]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 32 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 136 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-07T17:39:50.374550Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:39:50.374607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0032 2026-01-07T17:39:50.374722Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:39:50.374765Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-07T17:39:50.385310Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:739:2718]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-07T17:39:50.385406Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-07T17:39:50.385527Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409547 outdated step 5000038 last cleanup 0 2026-01-07T17:39:50.385604Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:50.385641Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409547 2026-01-07T17:39:50.385677Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409547 has no attached operations 2026-01-07T17:39:50.385717Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409547 2026-01-07T17:39:50.385893Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:739:2718]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:39:50.386043Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 38 2026-01-07T17:39:50.386432Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:739:2718], Recipient [3:129:2153]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 38 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 28 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 136 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-07T17:39:50.386482Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:39:50.386535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0028 2026-01-07T17:39:50.386687Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:39:50.444368Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:39:50.444457Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:39:50.444507Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-07T17:39:50.444605Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2026-01-07T17:39:50.444643Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-01-07T17:39:50.444768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-07T17:39:50.444847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-07T17:39:50.444884Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-07T17:39:50.444991Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2026-01-07T17:39:50.445066Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 2 out of 2 partitions 2026-01-07T17:39:50.445120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-07T17:39:50.445174Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:39:50.445204Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2026-01-07T17:39:50.445247Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2026-01-07T17:39:50.445277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409547 by load, its table already has 2 out of 2 partitions 2026-01-07T17:39:50.445364Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:39:50.461276Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:39:50.461359Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:39:50.461405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:39:50.487769Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:1743:3661], Recipient [3:736:2717]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:50.487864Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:50.487923Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409546, clientId# [3:1742:3660], serverId# [3:1743:3661], sessionId# [0:0:0] 2026-01-07T17:39:50.488293Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:1741:3659], Recipient [3:736:2717]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 38 } 2026-01-07T17:39:50.502424Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:1746:3664], Recipient [3:739:2718]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:50.502496Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:50.502555Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409547, clientId# [3:1745:3663], serverId# [3:1746:3664], sessionId# [0:0:0] 2026-01-07T17:39:50.502782Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:1744:3662], Recipient [3:739:2718]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 38 } |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table >> Cdc::AddStream [GOOD] >> Cdc::DisableStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:42.011728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:42.111097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:42.128350Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:42.128882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:42.128979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:42.405670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:42.405756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:42.462124Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807579360672 != 1767807579360676 2026-01-07T17:39:42.471018Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:42.513619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:42.607153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:42.883918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:42.897312Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:42.999923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:43.028696Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:43.029436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:43.029663Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:43.029829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:43.063473Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:43.064235Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:43.064347Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:43.065989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:43.066065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:43.066139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:43.066476Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:43.066608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:43.066702Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:43.077450Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:43.106798Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:43.106994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:43.107108Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:43.107148Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:43.107186Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:43.107224Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:43.107440Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:43.107500Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:43.107842Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:43.107929Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:43.108000Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:43.108036Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:43.108088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:43.108125Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:43.108161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:43.108192Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:43.108229Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:43.108359Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:43.108397Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:43.108454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:43.108856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:39:43.108905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:43.109042Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:43.109338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:43.109404Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:43.109489Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:43.109545Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:43.109583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:43.109629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:43.109687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:43.109975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:43.110010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:39:43.110044Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:39:43.110075Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:43.110132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:39:43.110160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:39:43.110197Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:39:43.110230Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:39:43.110254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:43.111791Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:39:43.111856Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:43.122620Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... aiting for: CA [2:1138:2834], trace_id# 2026-01-07T17:39:50.095412Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1138:2834], trace_id# 2026-01-07T17:39:50.095479Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-07T17:39:50.096533Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1138:2834] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-07T17:39:50.096600Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1138:2834], trace_id# 2026-01-07T17:39:50.096675Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1138:2834], trace_id# 2026-01-07T17:39:50.098123Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1138:2834] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 871 Tasks { TaskId: 1 CpuTimeUs: 389 FinishTimeMs: 1767807590097 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 22 BuildCpuTimeUs: 367 HostName: "ghrun-z2c6jslqle" NodeId: 2 CreateTimeMs: 1767807590095 UpdateTimeMs: 1767807590097 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:39:50.098195Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1138:2834] 2026-01-07T17:39:50.098285Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:1134:2834] trace_id# 2026-01-07T17:39:50.098358Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000871s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:39:50.098851Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [2:1140:2834], Recipient [2:892:2768]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-07T17:39:50.098906Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-07T17:39:50.099029Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [2:892:2768], Recipient [2:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:39:50.099061Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T17:39:50.099125Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-07T17:39:50.099282Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-07T17:39:50.099367Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 5) 2026-01-07T17:39:50.099415Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 5) table: [72057594046644480:38:1] 2026-01-07T17:39:50.099500Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckWrite 2026-01-07T17:39:50.099553Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:39:50.099587Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckWrite 2026-01-07T17:39:50.099622Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:39:50.099656Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:39:50.099694Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-07T17:39:50.099758Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-07T17:39:50.099799Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:39:50.099825Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:39:50.099873Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T17:39:50.099902Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BlockFailPoint 2026-01-07T17:39:50.099928Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:39:50.099949Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T17:39:50.099967Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteWrite 2026-01-07T17:39:50.099987Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteWrite 2026-01-07T17:39:50.100019Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-07T17:39:50.100073Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-07T17:39:50.100203Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-07T17:39:50.100286Z node 2 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T17:39:50.100358Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:50.100389Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T17:39:50.100424Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T17:39:50.100457Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:50.100558Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:50.100618Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T17:39:50.100654Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:39:50.100686Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:39:50.100736Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:39:50.100760Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:39:50.100787Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-07T17:39:50.113317Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:39:50.113430Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:4] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:39:50.113489Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 4 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T17:39:50.113604Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:50.114177Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:1135:2834] TxId: 281474976715662. Ctx: { TraceId: 01kecrp7ma8167rac302s4zg9g, Database: , SessionId: ydb://session/3?node_id=2&id=YzVhYTg2OTQtZWU2MTNhODUtNjA4YjAxZmItNDg3NjJmNjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system >> TxUsage::WriteToTopic_Demo_47_Query >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TxUsage::WriteToTopic_Demo_38_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |87.8%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |87.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:33.843209Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:33.920326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:33.936370Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:33.936735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:33.936789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:34.191171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:34.191302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:34.254202Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807571449595 != 1767807571449599 2026-01-07T17:39:34.263545Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:34.307658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:34.404306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:34.683277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:34.696698Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:34.798746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:35.068633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2026-01-07T17:39:35.292722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.292942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.293068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.294270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.294496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:35.299338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:35.444253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:39:35.545077Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } ... waiting for SysViewsRoster update finished 2026-01-07T17:39:40.559168Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:40.588133Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:40.595444Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:40.595847Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:40.596035Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:40.829834Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:40.829942Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:40.850740Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807577513551 != 1767807577513555 2026-01-07T17:39:40.854418Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:40.898536Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:40.967733Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:41.279269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:41.291926Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:41.395414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:41.629885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:41.844911Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.845041Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.845416Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.846028Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.846172Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:41.850643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:42.034055Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:39:42.069467Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1097:2913] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for at least 2 blocked commits 2026-01-07T17:39:44.386860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:39:44.386923Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 ... blocked commit for tablet 72075186224037889 ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table [GOOD] >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:51.282762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:51.392042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:51.410682Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:51.411159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:51.411217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:51.712434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:51.712595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:51.802681Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807588497455 != 1767807588497459 2026-01-07T17:39:51.814274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:51.862144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:52.078547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:52.528472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:52.549513Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:52.675973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:52.737930Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:52.738195Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:52.838577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:52.838761Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:52.845954Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:52.846086Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:52.846150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:52.846578Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:52.846737Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:52.846853Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:52.861496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:52.981909Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:52.982123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:52.982235Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:52.982270Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:52.982303Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:52.982342Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:52.982888Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:52.983007Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:52.983083Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:52.983132Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:52.983187Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:52.983231Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:52.983318Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:52.983772Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:52.984085Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:52.984214Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:52.989781Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:53.001025Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:39:53.001144Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:53.147271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:39:53.153043Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:39:53.153133Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:53.153572Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:53.153617Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:53.153669Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:39:53.153950Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:39:53.154126Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:53.154327Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:53.154394Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:39:53.164208Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:39:53.164769Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:53.167171Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:39:53.167223Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:53.168222Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:39:53.168294Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:53.176865Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:53.176948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:53.176995Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:39:53.177067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:39:53.177119Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:39:53.177196Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:53.182859Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:53.184564Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:39:53.184730Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:39:53.184808Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:59.618759Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:39:59.618996Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:39:59.619128Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:59.619642Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:59.619713Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:39:59.620134Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:39:59.620546Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:59.622446Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:39:59.622500Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:59.623305Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:39:59.623380Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:59.624025Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:59.624067Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:59.624110Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:39:59.624168Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:39:59.624220Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:39:59.624295Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:59.625929Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:59.627474Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:39:59.627543Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:39:59.628294Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:39:59.636434Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:59.636537Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:59.636602Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:59.637511Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:59.637686Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:59.641688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:59.647699Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:59.788864Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:59.796675Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:39:59.843157Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:59.941563Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2873], serverId# [2:1054:2874], sessionId# [0:0:0] 2026-01-07T17:39:59.941912Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:39:59.942044Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-07T17:39:59.952989Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:59.956746Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-07T17:39:59.957589Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:39:59.969560Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:39:59.969640Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:59.969880Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:39:59.969926Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-07T17:39:59.970028Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:59.970072Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:59.970118Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:59.970180Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:59.970452Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-07T17:39:59.971350Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:59.971727Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:39:59.971927Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:59.971974Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:39:59.972031Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:39:59.972266Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:39:59.972330Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:59.973850Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-07T17:39:59.974088Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:39:59.974228Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-07T17:39:59.974279Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-07T17:40:00.004068Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:00.004140Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-07T17:40:00.004303Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:00.004341Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:00.004382Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:00.004534Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:00.004597Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:00.004645Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> TLocksTest::SetEraseSet [GOOD] |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:38:35.675682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:38:35.675760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:38:35.675800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:38:35.675843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:38:35.675891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:38:35.675917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:38:35.675975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:38:35.676059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:38:35.676825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:38:35.677155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:38:35.760701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:38:35.760764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:35.776201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:38:35.776544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:38:35.781167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:38:35.788342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:38:35.788631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:38:35.789222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:38:35.789447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:38:35.791452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:35.791616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:38:35.792615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:38:35.792675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:35.792797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:38:35.792851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:38:35.792959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:38:35.793132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:38:35.920152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.923720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.923890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.923985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:35.924785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... onditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-07T17:39:59.920515Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-07T17:39:59.920716Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:761:2738]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2026-01-07T17:39:59.920768Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5456: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2026-01-07T17:40:00.015499Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:1314:3198]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-07T17:40:00.015594Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-07T17:40:00.015722Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409552 outdated step 300 last cleanup 0 2026-01-07T17:40:00.015811Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:00.015856Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409552 2026-01-07T17:40:00.015900Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409552 has no attached operations 2026-01-07T17:40:00.015938Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409552 2026-01-07T17:40:00.016103Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:1319:3201]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-07T17:40:00.016140Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-07T17:40:00.016214Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409553 outdated step 300 last cleanup 0 2026-01-07T17:40:00.016262Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:00.016290Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409553 2026-01-07T17:40:00.016354Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409553 has no attached operations 2026-01-07T17:40:00.016420Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409553 2026-01-07T17:40:00.016560Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1314:3198]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:40:00.016728Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 32 2026-01-07T17:40:00.016823Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1319:3201]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:40:00.016899Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 32 2026-01-07T17:40:00.017329Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1314:3198], Recipient [3:1726:3583]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 32 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 32 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 224 TableOwnerId: 72075186233409549 FollowerId: 0 2026-01-07T17:40:00.017389Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:40:00.017456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0032 2026-01-07T17:40:00.017579Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:40:00.017639Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-07T17:40:00.017918Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1319:3201], Recipient [3:1726:3583]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 32 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 22 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 224 TableOwnerId: 72075186233409549 FollowerId: 0 2026-01-07T17:40:00.017962Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:40:00.018016Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0022 2026-01-07T17:40:00.018136Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:40:00.041531Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:1726:3583]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:00.041613Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:00.041704Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:1726:3583], Recipient [3:1726:3583]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:00.041736Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:00.053393Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:1726:3583]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2026-01-07T17:40:00.053496Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5308: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2026-01-07T17:40:00.053531Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-07T17:40:00.053624Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-07T17:40:00.053691Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-07T17:40:00.053824Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:1726:3583]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2026-01-07T17:40:00.053857Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5456: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2026-01-07T17:40:00.054195Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269746180, Sender [3:2830:4646], Recipient [3:1726:3583]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-01-07T17:40:00.054243Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5455: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-01-07T17:40:00.094252Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:2833:4649], Recipient [3:1314:3198]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:00.094351Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:00.094486Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409552, clientId# [3:2832:4648], serverId# [3:2833:4649], sessionId# [0:0:0] 2026-01-07T17:40:00.094763Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:2831:4647], Recipient [3:1314:3198]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } 2026-01-07T17:40:00.095744Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:2836:4652], Recipient [3:1319:3201]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:00.095790Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:00.095828Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409553, clientId# [3:2835:4651], serverId# [3:2836:4652], sessionId# [0:0:0] 2026-01-07T17:40:00.095995Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:2834:4650], Recipient [3:1319:3201]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> TxUsage::WriteToTopic_Demo_38_Query >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2026-01-07T17:39:03.759526Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1767807543759487 2026-01-07T17:39:03.959862Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675584588660128:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:03.959910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:03.992340Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:03.993043Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675583525310450:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:03.993313Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:04.001326Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:04.150150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:04.151804Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:04.212842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:04.212964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:04.213596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:04.213679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:04.252955Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:04.253170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:04.259193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:04.296814Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:04.328067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:04.375556Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:04.417927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001aef/r3tmp/yandexDp53TA.tmp 2026-01-07T17:39:04.417981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001aef/r3tmp/yandexDp53TA.tmp 2026-01-07T17:39:04.418167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001aef/r3tmp/yandexDp53TA.tmp 2026-01-07T17:39:04.418276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:04.455409Z INFO: TTestServer started on Port 16986 GrpcPort 4125 PQClient connected to localhost:4125 2026-01-07T17:39:04.628811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:04.970038Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:04.995962Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:07.078497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675601768530584:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.079836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.080133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675601768530597:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.080181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675601768530598:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.080373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.084245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:07.117095Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675601768530601:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:39:07.459765Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675601768530680:2937] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:07.461572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:07.470930Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675600705179870:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:07.471489Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MzZiZTE0ZTctZDdiNDRmYWYtOGFkZTMzMDgtMWRkOTUwYTA=, ActorId: [2:7592675600705179832:2301], ActorState: ExecuteState, LegacyTraceId: 01kecrmxw5d0yt403nm1c4s5fg, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:07.476470Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:07.483220Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675601768530732:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:07.483587Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Y2YwZGJjNmEtNWZiN2I3ODItNGMxNTUyMDYtNTBlMDlhNTM=, ActorId: [1:7592675601768530575:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrmxp42x7wtswa4t8bxtja, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:07.483999Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:07 ... :2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:58.709205Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:58.709218Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:58.808744Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:58.808778Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:58.808794Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:58.808818Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:58.808833Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:58.913116Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:58.913152Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:58.913168Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:58.913212Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:58.913226Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.021080Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.021119Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.021135Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.021157Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.021171Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.123223Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.123258Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.123274Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.123296Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.123309Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.225038Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.225073Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.225089Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.225111Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.225123Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.329039Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.329077Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.329091Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.329109Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.329121Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.431251Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.431294Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.431309Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.431330Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.431343Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.532520Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.532560Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.532575Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.532593Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.532605Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.633608Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.633643Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.633657Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.633676Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.633689Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.688828Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:20229" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:20229" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:20229" location: "dc3" status: AVAILABLE weight: 500 } ] ControlPlaneEndpoint: cp.logbroker-federation:2135 } === Closing the session 2026-01-07T17:39:59.708949Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2026-01-07T17:39:59.714210Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2026-01-07T17:39:59.719769Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2026-01-07T17:39:59.719839Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2026-01-07T17:39:59.719908Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2026-01-07T17:39:59.720091Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2026-01-07T17:39:59.720147Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2026-01-07T17:39:59.737063Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.737103Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.737117Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.737136Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.737148Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.845057Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.845093Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.845106Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.845125Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.845138Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:39:59.949246Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:39:59.949287Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.949302Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:39:59.949323Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:39:59.949335Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:00.057066Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:00.057102Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:00.057114Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:00.057133Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:00.057144Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |87.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2026-01-07T17:39:47.428799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675773148982069:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:47.428851Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:47.845770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:47.928690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:47.928770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:47.981602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:48.070169Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:48.116014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.429200Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:48.548065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:48.557917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:39:48.699539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:48.941855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:49.005732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:49.070842Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2026-01-07T17:39:49.071272Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7592675781738917536:2703] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-07T17:39:49.071369Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7592675781738917536:2703] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-07T17:39:49.071414Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7592675781738917536:2703] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-07T17:39:49.074918Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2026-01-07T17:39:49.075184Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7592675781738917558:2710] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-07T17:39:49.075247Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7592675781738917558:2710] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-07T17:39:49.075289Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7592675781738917558:2710] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-07T17:39:49.078728Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2026-01-07T17:39:49.078991Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7592675781738917565:2714] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-07T17:39:49.079058Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7592675781738917565:2714] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-07T17:39:49.079079Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7592675781738917565:2714] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2026-01-07T17:39:49.082179Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 2026-01-07T17:39:49.082413Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7592675781738917571:2717] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-07T17:39:49.082477Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7592675781738917571:2717] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-07T17:39:49.082500Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7592675781738917571:2717] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-07T17:39:52.025658Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675791293500468:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:52.055949Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:52.117079Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:52.250501Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:52.255881Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675791293500404:2081] 1767807591977504 != 1767807591977507 2026-01-07T17:39:52.276444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:52.276550Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:52.312963Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:52.325931Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:52.608163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:52.639735Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:52.734429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:52.804511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:57.323891Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:57.324637Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592675817730860975:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:57.324684Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:57.605048Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:57.619861Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:57.629156Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592675817730860754:2081] 1767807597287277 != 1767807597287280 2026-01-07T17:39:57.664686Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:57.664807Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:57.673608Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:57.863625Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:57.971925Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:57.982770Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:39:57.996998Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:39:58.005240Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:58.117647Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:58.216264Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:58.311347Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin >> DefaultPoolSettings::TestResourcePoolsSysViewFilters [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> Cdc::DisableStream [GOOD] >> Cdc::AwsRegion >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |87.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:55.455732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:55.585142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:55.618018Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:55.618592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:55.618655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:56.017390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:56.017555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:56.130437Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807591494744 != 1767807591494748 2026-01-07T17:39:56.150452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:56.212180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:56.328068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:56.723790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:56.745491Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:56.879659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:56.940012Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:56.940378Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:56.999323Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:56.999482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:57.001278Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:57.001390Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:57.001447Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:57.001869Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:57.002022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:57.002126Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:57.013553Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:57.105387Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:57.105610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:57.105790Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:57.105841Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:57.105879Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:57.105919Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:57.106411Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:57.106531Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:57.106615Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:57.106655Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:57.106722Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:57.106773Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:57.106866Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:57.107373Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:57.107662Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:57.107774Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:57.118025Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:57.129643Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:39:57.129773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:57.267293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:39:57.282268Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:39:57.282376Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:57.282839Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:57.282895Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:57.282942Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:39:57.283321Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:39:57.283512Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:57.283710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:57.283784Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:39:57.288170Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:39:57.288735Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:57.291226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:39:57.291284Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:57.292314Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:39:57.292392Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:57.294140Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:57.294194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:57.294302Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:39:57.294384Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:39:57.294451Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:39:57.294558Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:57.300907Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:57.302735Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:39:57.302883Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:39:57.302939Z node 1 :TX_DATASHARD DEBUG: datashar ... hard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:04.301832Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:04.301868Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.302430Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:04.302544Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:04.302613Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:04.302662Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:04.302708Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:04.302752Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:04.303218Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:887:2766], sessionId# [0:0:0] 2026-01-07T17:40:04.303348Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:04.303590Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:04.303667Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:04.305356Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:04.317515Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:04.317656Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:04.442319Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:04.442950Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:04.443009Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.443563Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:04.443613Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:04.443664Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:04.443943Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:04.444080Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:04.444649Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:04.444720Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:04.445256Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:04.445668Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:04.447579Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:04.447632Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.448518Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:04.448597Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:04.449374Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:04.449423Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:04.449481Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:04.449541Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:04.449590Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:04.449673Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.452163Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:04.453846Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:04.453925Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:04.454653Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:04.460545Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-07T17:40:04.460725Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:04.472366Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:04.472478Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.472821Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-07T17:40:04.474973Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:959:2817], serverId# [2:960:2818], sessionId# [0:0:0] 2026-01-07T17:40:04.475154Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:04.475343Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:04.475394Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.475615Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:959:2817], serverId# [2:960:2818], sessionId# [0:0:0] 2026-01-07T17:40:04.481056Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:964:2822], serverId# [2:965:2823], sessionId# [0:0:0] 2026-01-07T17:40:04.481353Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:04.481571Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:04.481625Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.481863Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:964:2822], serverId# [2:965:2823], sessionId# [0:0:0] 2026-01-07T17:40:04.484013Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:969:2827], serverId# [2:970:2828], sessionId# [0:0:0] 2026-01-07T17:40:04.484203Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:04.484413Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:04.484463Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.484666Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:969:2827], serverId# [2:970:2828], sessionId# [0:0:0] 2026-01-07T17:40:04.486707Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:974:2832], serverId# [2:975:2833], sessionId# [0:0:0] 2026-01-07T17:40:04.486870Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:04.487153Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:04.487209Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.487420Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:974:2832], serverId# [2:975:2833], sessionId# [0:0:0] 2026-01-07T17:40:04.489702Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:979:2837], serverId# [2:980:2838], sessionId# [0:0:0] 2026-01-07T17:40:04.489821Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:04.490012Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:04.490056Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:04.490270Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:979:2837], serverId# [2:980:2838], sessionId# [0:0:0] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |87.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table >> BasicUsage::SimpleHandlers [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> TLocksFatTest::RangeSetNotBreak [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:59.004246Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:59.143338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:59.162122Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:59.162718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:59.162794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:59.460477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:59.460635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:59.544006Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807595413007 != 1767807595413011 2026-01-07T17:39:59.555566Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:59.610375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:59.724153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:00.277448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:00.291314Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:00.434521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:00.536302Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:00.536597Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:00.747252Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:00.747406Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:00.749297Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:00.749401Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:00.749473Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:00.749855Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:00.750013Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:00.750101Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:00.765531Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:00.845357Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:00.845587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:00.845722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:00.845760Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:00.845794Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:00.845826Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:00.846332Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:00.846470Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:00.846552Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:00.846604Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:00.846677Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:00.846721Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:00.846805Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:00.847303Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:00.847590Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:00.847708Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:00.851744Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:00.865679Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:00.865818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:01.007346Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:01.026960Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:01.027064Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:01.027607Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:01.027661Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:01.027711Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:01.028031Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:01.028206Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:01.028440Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:01.028513Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:01.035027Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:01.035644Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:01.042635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:01.042717Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:01.043980Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:01.044072Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:01.053955Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:01.054035Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:01.054111Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:01.054189Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:01.054247Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:01.054335Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:01.079768Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:01.084500Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:01.084717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:01.084791Z node 1 :TX_DATASHARD DEBUG: datashar ... 6-01-07T17:40:06.796325Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:06.796498Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:07.075777Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:07.081268Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:07.110872Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807603588507 != 1767807603588511 2026-01-07T17:40:07.114498Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:07.160465Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:07.245422Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:07.541910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:07.559341Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:07.665913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:07.688695Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-07T17:40:07.688878Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:07.733590Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:07.733711Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:07.734991Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:07.735073Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:07.735115Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:07.735474Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:07.735588Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:07.735652Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-07T17:40:07.746293Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:07.746382Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:07.746481Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:07.746561Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-07T17:40:07.746595Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:07.746626Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:07.746655Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:07.747005Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:07.747097Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:07.747417Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:07.747456Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:07.747491Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:07.747532Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:07.747587Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-07T17:40:07.747712Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:07.747889Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:07.747944Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:07.749409Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:07.760057Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:07.760170Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:07.887835Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:07.888553Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:07.888621Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:07.889675Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:07.889737Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:07.889789Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:07.890094Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:07.890249Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:07.891180Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:07.891257Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:07.891730Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:07.892211Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:07.894179Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:07.894284Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:07.894868Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:07.894948Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:07.896159Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:07.896207Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:07.896262Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:07.896333Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:07.896410Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:07.896504Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:07.897763Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:07.899644Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:07.899718Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:07.900483Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:07.906706Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-07T17:40:07.906841Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2026-01-07T17:40:07.907030Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:00.288468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:00.457924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:00.493887Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:00.494469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:00.494536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:01.134867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:01.135041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:01.325606Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807596902997 != 1767807596903001 2026-01-07T17:40:01.338241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:01.390250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:01.567872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:01.993356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:02.009258Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:02.129426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:02.197224Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:02.197561Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:02.286652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:02.286819Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:02.291475Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:02.291627Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:02.291705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:02.292188Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:02.292450Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:02.292596Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:02.304475Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:02.431705Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:02.431948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:02.432081Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:02.432120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:02.432159Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:02.432197Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:02.432744Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:02.432874Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:02.432992Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:02.433039Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:02.433101Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:02.433143Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:02.433231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:02.433717Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:02.434022Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:02.434131Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:02.435959Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:02.449678Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:02.449818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:02.591263Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:02.595914Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:02.596012Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:02.596532Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:02.596587Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:02.596639Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:02.605031Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:02.605342Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:02.605671Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:02.605745Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:02.608093Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:02.608663Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:02.611269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:02.611332Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:02.612482Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:02.612576Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:02.617063Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:02.617137Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:02.617194Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:02.617268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:02.617328Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:02.617485Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:02.623814Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:02.626215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:02.626417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:02.626502Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:08.227324Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:08.227623Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:08.227789Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:08.228671Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:08.228743Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:08.229223Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:08.229659Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:08.231431Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:08.231488Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:08.231985Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:08.232057Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:08.233253Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:08.233300Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:08.233354Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:08.233417Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:08.233474Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:08.233562Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:08.234697Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:08.236477Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:08.236547Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:08.237287Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:08.245900Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:08.246014Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:08.246085Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:08.247021Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:08.247216Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:08.251464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:08.258350Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:08.382260Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:08.385730Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:08.421698Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:08.529628Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2873], serverId# [2:1054:2874], sessionId# [0:0:0] 2026-01-07T17:40:08.530106Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:40:08.530281Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-07T17:40:08.541575Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:08.545712Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:08.546605Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:08.557697Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:08.557770Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:08.558072Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:08.558110Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-07T17:40:08.558226Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:08.558306Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:08.558357Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:08.558409Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:08.558467Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:08.559328Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:08.559674Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:08.559840Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:08.559878Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:08.559919Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:40:08.560086Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:08.560147Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:08.560770Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-07T17:40:08.561036Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:08.561174Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-07T17:40:08.561225Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-07T17:40:08.581177Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:08.581263Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-07T17:40:08.581713Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:08.581766Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:08.581810Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:08.581959Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:08.582035Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:08.582098Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2026-01-07T17:39:01.549815Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1767807541549780 2026-01-07T17:39:01.817320Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675575313746446:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:01.817688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:01.849810Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675577284424486:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:01.849851Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:01.854314Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:01.860791Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:02.050505Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:02.057048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:02.086185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:02.086316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:02.087107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:02.087171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:02.144056Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:02.144237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:02.145314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:02.178200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:02.250605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001b78/r3tmp/yandexUMya0d.tmp 2026-01-07T17:39:02.250630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001b78/r3tmp/yandexUMya0d.tmp 2026-01-07T17:39:02.250827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001b78/r3tmp/yandexUMya0d.tmp 2026-01-07T17:39:02.250935Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:02.267125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:02.271805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:02.288266Z INFO: TTestServer started on Port 19471 GrpcPort 31753 PQClient connected to localhost:31753 2026-01-07T17:39:02.498182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:02.824653Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:02.858476Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:04.909387Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675590169326705:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.911709Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675590169326697:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.911861Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.912362Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675590169326712:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.912442Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:04.917158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:04.950454Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592675590169326711:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-07T17:39:05.213724Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675594464294037:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:05.242530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:05.247134Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675588198649636:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:05.248210Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675594464294052:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:05.247588Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTFmNmFiNDEtZGM4OTQzLTNjYzE1MDY4LTIyYTJiOGVh, ActorId: [1:7592675588198649607:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrmvm02mh5c8bgg934e649, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:05.249759Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:05.249758Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=OTYxYWNiNzgtZjAyNTJmZWItNTQ1OTE1NzktM2ViZDJkNjU=, ActorId: [2:7592675590169326695:2301], ActorState: ExecuteState, LegacyTraceId: 01kecrmvjbe7f5e0w5st2nbexb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:05.249992Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:05 ... O: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_8405379213049982079_v1 grpc read failed 2026-01-07T17:40:07.280493Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer shared/user session shared/user_3_3_8405379213049982079_v1 grpc closed 2026-01-07T17:40:07.280505Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_8405379213049982079_v1 is DEAD 2026-01-07T17:40:07.281100Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|10c00669-b1077c84-68a9d1f2-cd54dd47_0] Write session: gracefully shut down, all writes complete 2026-01-07T17:40:07.281930Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|10c00669-b1077c84-68a9d1f2-cd54dd47_0] Write session is aborting and will not restart 2026-01-07T17:40:07.280970Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|10c00669-b1077c84-68a9d1f2-cd54dd47_0 grpc read done: success: 0 data: 2026-01-07T17:40:07.280988Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|10c00669-b1077c84-68a9d1f2-cd54dd47_0 grpc read failed 2026-01-07T17:40:07.281009Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: src_id|10c00669-b1077c84-68a9d1f2-cd54dd47_0 grpc closed 2026-01-07T17:40:07.282069Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|10c00669-b1077c84-68a9d1f2-cd54dd47_0] Write session: destroy 2026-01-07T17:40:07.281024Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|10c00669-b1077c84-68a9d1f2-cd54dd47_0 is DEAD 2026-01-07T17:40:07.281434Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:07.281546Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555675:2490] disconnected. 2026-01-07T17:40:07.281568Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555675:2490] disconnected; active server actors: 1 2026-01-07T17:40:07.281596Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555675:2490] client user disconnected session shared/user_3_2_5677381342315062835_v1 2026-01-07T17:40:07.281643Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2026-01-07T17:40:07.281693Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555686:2489] disconnected. 2026-01-07T17:40:07.281703Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555686:2489] disconnected; active server actors: 1 2026-01-07T17:40:07.281715Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555686:2489] client user disconnected session shared/user_3_1_9898496880695745626_v1 2026-01-07T17:40:07.281759Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555678:2491] disconnected. 2026-01-07T17:40:07.281770Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555678:2491] disconnected; active server actors: 1 2026-01-07T17:40:07.281782Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592675857152555678:2491] client user disconnected session shared/user_3_3_8405379213049982079_v1 2026-01-07T17:40:07.282645Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_9898496880695745626_v1 2026-01-07T17:40:07.282696Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7592675857152555704:2505] destroyed 2026-01-07T17:40:07.282726Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7592675857152555722:2488] destroyed 2026-01-07T17:40:07.282758Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:40:07.282789Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:07.282806Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:07.282817Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:07.282834Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:07.282844Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:07.282908Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_9898496880695745626_v1 2026-01-07T17:40:07.284077Z :INFO: [/Root] [/Root] [5011cd5-7be3faf1-12b3fcb7-1b031f34] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:07.284143Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:40:07.284192Z :INFO: [/Root] [/Root] [5011cd5-7be3faf1-12b3fcb7-1b031f34] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1160 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:07.284229Z :INFO: [/Root] [/Root] [b3576d1-2243fa54-149b1bb5-eb65f7f0] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:07.284264Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2026-01-07T17:40:07.284290Z :INFO: [/Root] [/Root] [b3576d1-2243fa54-149b1bb5-eb65f7f0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1150 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:07.284315Z :INFO: [/Root] [/Root] [5a7c40de-8f6655c2-518ad1cb-cb65fb68] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:07.284340Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:40:07.284383Z :INFO: [/Root] [/Root] [5a7c40de-8f6655c2-518ad1cb-cb65fb68] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1147 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:07.284413Z :INFO: [/Root] [/Root] [5a7c40de-8f6655c2-518ad1cb-cb65fb68] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:07.284444Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:40:07.284485Z :INFO: [/Root] [/Root] [5a7c40de-8f6655c2-518ad1cb-cb65fb68] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1148 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:07.284574Z :NOTICE: [/Root] [/Root] [5a7c40de-8f6655c2-518ad1cb-cb65fb68] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:07.285530Z :INFO: [/Root] [/Root] [b3576d1-2243fa54-149b1bb5-eb65f7f0] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:07.285577Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2026-01-07T17:40:07.285610Z :INFO: [/Root] [/Root] [b3576d1-2243fa54-149b1bb5-eb65f7f0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1151 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:07.285663Z :NOTICE: [/Root] [/Root] [b3576d1-2243fa54-149b1bb5-eb65f7f0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:07.286124Z :INFO: [/Root] [/Root] [5011cd5-7be3faf1-12b3fcb7-1b031f34] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:07.286155Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:40:07.286183Z :INFO: [/Root] [/Root] [5011cd5-7be3faf1-12b3fcb7-1b031f34] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1162 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:07.286228Z :NOTICE: [/Root] [/Root] [5011cd5-7be3faf1-12b3fcb7-1b031f34] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:07.305040Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:07.305093Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:07.305118Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:07.305137Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:07.305150Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:07.408066Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:07.408097Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:07.408111Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:07.408128Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:07.408141Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:07.509058Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:07.509104Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:07.509118Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:07.509135Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:07.509147Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous |87.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2026-01-07T17:39:47.591411Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675773450863988:2223];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:47.591996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:47.605495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:48.037608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:48.037703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:48.088662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:48.134823Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:48.137854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.140523Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675773450863799:2081] 1767807587524870 != 1767807587524873 2026-01-07T17:39:48.327540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.589230Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:48.700283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:48.705936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:39:48.725645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:39:48.862327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:49.056392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:49.155966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:52.590833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675773450863988:2223];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:52.590936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:39:59.403262Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675826024683771:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:59.405767Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:59.406423Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:59.515062Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:59.520010Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675826024683651:2081] 1767807599383668 != 1767807599383671 2026-01-07T17:39:59.532197Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:59.533953Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:59.534034Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:59.560629Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:59.766532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:59.777736Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:39:59.803732Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:59.804129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:39:59.812634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:59.957476Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:00.023475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:00.404100Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:04.399117Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592675826024683771:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:04.399164Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:00.454562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:00.646893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:00.714995Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:00.715547Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:00.715617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:01.369847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:01.369994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:01.720266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807597059204 != 1767807597059208 2026-01-07T17:40:01.767026Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:01.858256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:01.987821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:02.517475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:02.533545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:02.657063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:02.704131Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:02.704477Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:02.758204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:02.758389Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:02.760415Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:02.760554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:02.760643Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:02.761154Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:02.761320Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:02.761449Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:02.777439Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:02.816025Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:02.816264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:02.816403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:02.816450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:02.816493Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:02.816534Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:02.817072Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:02.817231Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:02.817360Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:02.817418Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:02.817482Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:02.817529Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:02.817621Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:02.818107Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:02.818394Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:02.818514Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:02.820442Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:02.831402Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:02.831539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:02.958287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:02.967481Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:02.967609Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:02.968207Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:02.968284Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:02.968374Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:02.968764Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:02.969009Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:02.969272Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:02.969374Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:02.971930Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:02.972556Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:02.975408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:02.975469Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:02.976799Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:02.976903Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:02.978363Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:02.978425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:02.978494Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:02.978571Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:02.978641Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:02.978738Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:02.985546Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:02.987720Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:02.987942Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:02.988014Z node 1 :TX_DATASHARD DEBUG: datashar ... ain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2026-01-07T17:40:09.947648Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2026-01-07T17:40:09.955670Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:10.000000Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1454:3223] 2026-01-07T17:40:10.000248Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:10.010332Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:10.010459Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:10.011896Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2026-01-07T17:40:10.011972Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2026-01-07T17:40:10.012021Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2026-01-07T17:40:10.012387Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:10.012537Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:10.012606Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [2:1469:3223] in generation 1 2026-01-07T17:40:10.037643Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:10.037729Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037894 2026-01-07T17:40:10.037834Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:10.037926Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037894, actorId: [2:1471:3233] 2026-01-07T17:40:10.037965Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2026-01-07T17:40:10.037994Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2026-01-07T17:40:10.038026Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:40:10.038477Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037894 2026-01-07T17:40:10.038573Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2026-01-07T17:40:10.038972Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-07T17:40:10.039015Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:10.039052Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037894 TxInFly 0 2026-01-07T17:40:10.039090Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-07T17:40:10.039184Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1451:3221], serverId# [2:1456:3224], sessionId# [0:0:0] 2026-01-07T17:40:10.039419Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:40:10.039631Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2026-01-07T17:40:10.039706Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2026-01-07T17:40:10.040163Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2026-01-07T17:40:10.055059Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:40:10.055196Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037894 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:10.199491Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1478:3240], serverId# [2:1479:3241], sessionId# [0:0:0] 2026-01-07T17:40:10.200515Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715663 at step 5000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 5000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2026-01-07T17:40:10.200562Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:40:10.201218Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-07T17:40:10.201264Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:10.201312Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [5000:281474976715663] in PlanQueue unit at 72075186224037894 2026-01-07T17:40:10.201556Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037894 loaded tx from db 5000:281474976715663 keys extracted: 0 2026-01-07T17:40:10.201674Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:10.202337Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-07T17:40:10.202420Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 44] schema version# 1 2026-01-07T17:40:10.202806Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:10.203161Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:10.204585Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 4500 2026-01-07T17:40:10.204635Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:40:10.205513Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 5000} 2026-01-07T17:40:10.205577Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-07T17:40:10.206511Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2026-01-07T17:40:10.206714Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:40:10.206839Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-07T17:40:10.206911Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2026-01-07T17:40:10.206964Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2026-01-07T17:40:10.207045Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2026-01-07T17:40:10.207114Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:10.207537Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-07T17:40:10.207589Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2026-01-07T17:40:10.207632Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037894 2026-01-07T17:40:10.207686Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [5000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:10.207734Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2026-01-07T17:40:10.207802Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:40:10.208676Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 5000 2026-01-07T17:40:10.211678Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2026-01-07T17:40:10.211736Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-01-07T17:40:10.217557Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1507:3263], serverId# [2:1508:3264], sessionId# [0:0:0] 2026-01-07T17:40:10.217784Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1507:3263], serverId# [2:1508:3264], sessionId# [0:0:0] 2026-01-07T17:40:10.219136Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1512:3268], serverId# [2:1513:3269], sessionId# [0:0:0] 2026-01-07T17:40:10.219329Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1512:3268], serverId# [2:1513:3269], sessionId# [0:0:0] 2026-01-07T17:40:10.221044Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1517:3273], serverId# [2:1518:3274], sessionId# [0:0:0] 2026-01-07T17:40:10.221444Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1517:3273], serverId# [2:1518:3274], sessionId# [0:0:0] |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous >> DistributedEraseTests::ConditionalEraseRowsShouldErase |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:56.648243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:56.804701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:56.825433Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:56.826096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:56.826156Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:57.154444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:57.154597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:57.293537Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807592539358 != 1767807592539362 2026-01-07T17:39:57.306442Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:57.356708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:57.464483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:57.790559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:57.806488Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:57.928478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:58.002094Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:58.002389Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:58.084171Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:58.084339Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:58.086228Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:58.086383Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:58.086446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:58.087176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:58.087375Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:58.087486Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:58.100875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:58.157126Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:58.157450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:58.157597Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:58.157638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:58.157675Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:58.157713Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:58.158304Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:58.158483Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:58.158588Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:58.158627Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:58.158684Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:58.158733Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:58.158836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:58.159359Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:58.159672Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:58.159804Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:58.162042Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:58.172948Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:39:58.173071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:58.303763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:39:58.315281Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:39:58.315420Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:58.316014Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:58.316095Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:58.316152Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:39:58.316513Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:39:58.325102Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:39:58.325526Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:58.325603Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:39:58.328176Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:39:58.328788Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:58.335952Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:39:58.336028Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:58.337895Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:39:58.338004Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:58.339274Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:58.339329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:58.339381Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:39:58.339448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:39:58.339508Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:39:58.339654Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:58.346925Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:58.348873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:39:58.350793Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:39:58.350904Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:11.364689Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:11.365022Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:11.365186Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:11.366012Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:11.366136Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:11.366657Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:11.367122Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:11.373745Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:11.373827Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:11.375101Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:11.375204Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:11.376359Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:11.376415Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:11.376463Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:11.376525Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:11.376580Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:11.376702Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:11.386541Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:11.388744Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:11.389079Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:11.389162Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:11.406436Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:11.406548Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:11.407013Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:11.407653Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:11.408062Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:11.416785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:11.436095Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:11.576791Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:11.580027Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:11.616050Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:11.714875Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-07T17:40:11.715336Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:40:11.715536Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-07T17:40:11.726970Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:11.730777Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:11.731625Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:11.743105Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:11.743192Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:11.743442Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:11.743490Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-07T17:40:11.743613Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:11.743661Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:11.743734Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:11.743801Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:11.744113Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:11.745254Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:11.745636Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:11.745859Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:11.745915Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:11.745968Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:40:11.746223Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:11.746298Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:11.747025Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-07T17:40:11.747293Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:11.747441Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-07T17:40:11.747502Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-07T17:40:11.749938Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:11.750002Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-07T17:40:11.750171Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:11.750211Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:11.750250Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:11.750385Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:11.750450Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:11.750502Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> TxUsage::ReadRuleGeneration >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query [GOOD] >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:05.459018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:05.565664Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:05.582359Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:05.582858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:05.582921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:05.868873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:05.869028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:05.940567Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807602296280 != 1767807602296284 2026-01-07T17:40:05.956070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:06.003063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:06.099702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:06.485994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:06.500461Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:06.628087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:06.684420Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:06.684729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:06.730524Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:06.730711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:06.732487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:06.732623Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:06.732683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:06.733095Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:06.733283Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:06.733398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:06.746107Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:06.787886Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:06.788116Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:06.788258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:06.788304Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:06.788341Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:06.788393Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:06.796995Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:06.797227Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:06.797421Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:06.797480Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:06.797561Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:06.797619Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:06.797740Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:06.798370Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:06.799021Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:06.799175Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:06.801294Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:06.812566Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:06.812702Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:06.938993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:06.943350Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:06.943432Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:06.943852Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:06.943901Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:06.943951Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:06.944231Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:06.944394Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:06.944569Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:06.944643Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:06.946727Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:06.947235Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:06.949510Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:06.949565Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:06.950614Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:06.950686Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:06.951818Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:06.951865Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:06.951905Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:06.951965Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:06.952012Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:06.952088Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:06.962355Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:06.964227Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:06.964412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:06.964476Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:12.440798Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:12.448692Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:12.448981Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:12.449823Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:12.449920Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:12.450510Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:12.451007Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:12.462623Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:12.462709Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.463870Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:12.463980Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.464812Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.464870Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:12.468214Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:12.468419Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:12.468496Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:12.468598Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.471138Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:12.473333Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:12.473449Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:12.474416Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:12.484583Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:12.484699Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:12.484778Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:12.485909Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:12.486117Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:12.490902Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:12.498447Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:12.653875Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:12.660945Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:12.719561Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:12.944137Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2873], serverId# [2:1054:2874], sessionId# [0:0:0] 2026-01-07T17:40:12.944561Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:40:12.944734Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-07T17:40:12.957560Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.961571Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:12.962476Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:12.973681Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:12.973771Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.973986Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:12.974027Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-07T17:40:12.974118Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:12.974161Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:12.974212Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:12.974270Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.974512Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:12.975330Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:12.975651Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:12.975809Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:12.975850Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:12.975894Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:40:12.976101Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:12.976154Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.976705Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-07T17:40:12.980667Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:12.980918Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-07T17:40:12.981027Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-07T17:40:13.021872Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:13.021940Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-07T17:40:13.022111Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:13.022150Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:13.022189Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:13.022325Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:13.022392Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:13.022438Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> ResourcePoolsDdl::TestPoolSwitchToLimitedState >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table >> TxUsage::WriteToTopic_Demo_47_Query [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table >> TFlatTest::RejectByPerRequestSize [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:59.287375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:59.415164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:59.438906Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:59.439491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:59.439558Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:59.748912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:59.753454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:59.846830Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807595567521 != 1767807595567525 2026-01-07T17:39:59.864204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:59.926025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:00.145692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:00.513601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:00.533879Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:00.669925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:00.734778Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:00.735084Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:00.814518Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:00.814686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:00.816731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:00.816857Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:00.816986Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:00.817461Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:00.817632Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:00.817740Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:00.828684Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:00.915336Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:00.915592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:00.915737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:00.915779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:00.915817Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:00.915861Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:00.916384Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:00.916554Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:00.916671Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:00.916723Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:00.916783Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:00.916829Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:00.921377Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:00.922179Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:00.922496Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:00.922634Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:00.924712Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:00.937707Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:00.937845Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:01.075321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:01.080354Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:01.080466Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:01.085069Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:01.085168Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:01.085240Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:01.085560Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:01.085778Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:01.086041Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:01.086115Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:01.088439Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:01.093201Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:01.095776Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:01.095845Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:01.110651Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:01.110775Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:01.112419Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:01.112487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:01.112535Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:01.112604Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:01.112661Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:01.112796Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:01.128560Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:01.138963Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:01.139214Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:01.139290Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:13.210037Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:13.210369Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:13.210527Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:13.216337Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:13.216444Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:13.217038Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:13.217488Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:13.219865Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:13.219923Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:13.220986Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:13.221067Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:13.222165Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:13.222212Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:13.222289Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:13.222351Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:13.222405Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:13.222495Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:13.223691Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:13.225884Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:13.226120Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:13.226189Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:13.238274Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:13.238391Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:13.238816Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:13.239620Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:13.240042Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:13.245656Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:13.252833Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:13.373422Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:13.376088Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:13.411363Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:13.512580Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-07T17:40:13.513011Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:40:13.513219Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-07T17:40:13.524220Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:13.528890Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:13.530855Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:13.542315Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:13.542411Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:13.542719Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:13.542772Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-07T17:40:13.542880Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:13.542932Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:13.542985Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:13.543057Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:13.543406Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:13.544433Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:13.544867Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:13.545129Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:13.545185Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:13.545236Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:40:13.545459Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:13.545530Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:13.546350Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-07T17:40:13.546608Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:13.546740Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-07T17:40:13.546806Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-07T17:40:13.548827Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:13.548882Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-07T17:40:13.549049Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:13.549094Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:13.549134Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:13.549277Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:13.549337Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:13.549387Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> Cdc::AwsRegion [GOOD] >> TxUsage::Write_And_Read_Small_Messages_1 >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2026-01-07T17:39:47.429225Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675772540337701:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:47.429425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:47.887998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:47.888073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:47.932056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:47.980136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:47.983586Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675772540337596:2081] 1767807587424730 != 1767807587424733 2026-01-07T17:39:47.989375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.304406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.320395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:48.441112Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:48.517669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:52.486986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675772540337701:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:52.487055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:39:59.523572Z node 1 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002741 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2026-01-07T17:39:59.523734Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002741 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2026-01-07T17:39:59.524344Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592675824079947034:3137] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2026-01-07T17:40:00.924013Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675831222102116:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:00.941652Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:00.968992Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:40:01.143866Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:01.148272Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:01.153238Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:01.181053Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675831222102074:2081] 1767807600913598 != 1767807600913601 2026-01-07T17:40:01.303498Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:01.359878Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:40:01.925395Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:01.963120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:01.970145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:40:01.985988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:40:01.993951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:05.918061Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592675831222102116:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:05.918127Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:40:10.117336Z node 2 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002565 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2026-01-07T17:40:10.117481Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002565 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2026-01-07T17:40:10.119391Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7592675874171776910:3130] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2026-01-07T17:40:10.889479Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592675872236545444:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:10.889567Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:10.929147Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:40:11.053748Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:11.071365Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:11.071483Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:11.081521Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:11.179480Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:40:11.355022Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:11.368811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:40:11.378077Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:11.907421Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:14.849346Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7592675872236545664:2106] Handle TEvProposeTransaction 2026-01-07T17:40:14.849386Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7592675872236545664:2106] TxId# 281474976715700 ProcessProposeTransaction 2026-01-07T17:40:14.849455Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [3:7592675872236545664:2106] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7592675889416415893:2797] DataReq marker# P0 2026-01-07T17:40:14.849522Z node 3 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [3:7592675889416415893:2797] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2026-01-07T17:40:14.850126Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7592675889416415893:2797] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2026-01-07T17:40:14.850142Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7592675889416415893:2797] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2026-01-07T17:40:14.850177Z node 3 :TX_PROXY DEBUG: datareq.cpp:1453: Actor# [3:7592675889416415893:2797] txid# 281474976715700 SEND to# [3:7592675872236545679:2111] TSchemeCache with 2 scheme entries. DataReq marker# P2 2026-01-07T17:40:14.850303Z node 3 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [3:7592675889416415893:2797] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2026-01-07T17:40:14.851807Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7592675889416415893:2797] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2026-01-07T17:40:14.852119Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7592675889416415893:2797] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2026-01-07T17:40:14.852647Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:14.853931Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2026-01-07T17:40:14.854280Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:40:14.855388Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2026-01-07T17:40:14.857482Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:14.857562Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:40:14.857657Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7592675889416415893:2797] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000887 out readset size 0 marker# P6 2026-01-07T17:40:14.857686Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7592675889416415893:2797] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000479 out readset size 0 marker# P6 2026-01-07T17:40:14.857757Z node 3 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [3:7592675889416415893:2797] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001366 exceeded limit 10000 Status# ExecError 2026-01-07T17:40:14.857808Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7592675889416415893:2797] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2026-01-07T17:40:14.857948Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2026-01-07T17:40:14.858005Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2026-01-07T17:40:14.858505Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2026-01-07T17:40:14.858539Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> TxUsage::WriteToTopic_Demo_38_Query [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:44.155505Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:44.260335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:44.280257Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:44.280852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:44.280913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:44.669944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:44.670122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:44.917175Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807340713078 != 1767807340713082 2026-01-07T17:35:44.951310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:45.018470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:45.153656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:45.606440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:45.620206Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:45.740544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:45.795400Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:35:45.795718Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:35:45.844104Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:35:45.844263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:35:45.846650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:35:45.846743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:35:45.846803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:35:45.847189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:35:45.847354Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:35:45.847443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:35:45.861523Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:35:45.917561Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:35:45.917795Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:35:45.917924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:35:45.917966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:45.918091Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:35:45.918136Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:45.918682Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:35:45.918785Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:35:45.918858Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:45.918900Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:45.918944Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:35:45.918985Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:45.919073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:35:45.919525Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:35:45.919828Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:35:45.919955Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:35:45.921941Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:45.933596Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:35:45.933710Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:35:46.069428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:35:46.078662Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:35:46.078763Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:46.079264Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:46.079317Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:35:46.079394Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:35:46.079676Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:35:46.079855Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:35:46.080052Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:35:46.080135Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:35:46.083299Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:35:46.083799Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:35:46.086353Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:35:46.086415Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:46.087542Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:35:46.087622Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:46.088681Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:35:46.088726Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:35:46.088784Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:35:46.088873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:35:46.089036Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:35:46.089133Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:35:46.090600Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:903:2775][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2026-01-07T17:35:46.096965Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:35:46.098759Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:35:46.098907Z node 1 :TX_ ... tion][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.267984Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.297818Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.297912Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.297955Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.297999Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.298037Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.298129Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.298161Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.298190Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.298222Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.298250Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.334979Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.335070Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.335111Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.335160Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.335198Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.335292Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.335329Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.335359Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.335391Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.335419Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.356332Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.356418Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.356458Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.356503Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.356539Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.356634Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.356669Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.356698Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.356730Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.356757Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.377682Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.377792Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.377831Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.377876Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.377911Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.378013Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.378045Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.378074Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.378104Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.378133Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.399085Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.399151Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.399182Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.399217Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.399248Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:40:15.399335Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:15.399358Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.399381Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:15.399413Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:15.399441Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2026-01-07T17:40:15.421498Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-07T17:40:15.421583Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2026-01-07T17:40:15.421789Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-07T17:40:15.422407Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 427 count 1 last offset 0, current partition end offset: 1 2026-01-07T17:40:15.422528Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-07T17:40:15.422718Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 427 accessed 1 times before, last time 1970-01-01T00:00:03.000000Z 2026-01-07T17:40:15.422850Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-07T17:40:15.423042Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:40:15.423237Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:40:15.423562Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 407 from pos 0 cbcount 1 2026-01-07T17:40:15.424382Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2026-01-07T17:40:15.426627Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-07T17:40:15.426773Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2026-01-07T17:40:15.427805Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037891][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-07T17:40:15.428420Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037891][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 426 count 1 last offset 0, current partition end offset: 1 2026-01-07T17:40:15.428527Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037891][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-07T17:40:15.428698Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 426 accessed 0 times before, last time 1970-01-01T00:00:03.000000Z 2026-01-07T17:40:15.428893Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037891][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2026-01-07T17:40:15.429065Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:40:15.429388Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 406 from pos 0 cbcount 1 2026-01-07T17:40:15.430199Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:40:15.430434Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous >> TxUsage::WriteToTopic_Demo_40_Table >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |87.9%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system >> TObjectStorageListingTest::ManyDeletes [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> BasicUsage::ConflictingWrites [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::WriteCommitVersion >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> GroupWriteTest::SimpleRdma >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2026-01-07T17:39:47.525059Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675776068427474:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:47.525151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:47.931858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:47.937343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:47.937460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:48.009064Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675776068427352:2081] 1767807587500530 != 1767807587500533 2026-01-07T17:39:48.012761Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:48.015305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:48.125559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:39:48.125580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:39:48.125586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:39:48.125656Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:48.171564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.354948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:48.363956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:39:48.378734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:39:48.525306Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:48.546029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:52.512731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675776068427474:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:52.512810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:39:56.509993Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675812333223057:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:56.510038Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:56.737202Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:56.757123Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675812333223015:2081] 1767807596497358 != 1767807596497361 2026-01-07T17:39:56.775763Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:56.775843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:56.777267Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:56.787595Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:57.005054Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:57.005628Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:39:57.005637Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:39:57.005646Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:39:57.005731Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:57.363667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:57.393956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:39:57.409047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:57.610566Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; .2026-01-07T17:40:01.518072Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592675812333223057:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:01.518153Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; . 2026-01-07T17:40:10.870194Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:40:10.871279Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037889 2026-01-07T17:40:10.871613Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2026-01-07T17:40:10.872270Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037890 2026-01-07T17:40:10.872515Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037891 2026-01-07T17:40:10.873178Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037891 2026-01-07T17:40:10.873445Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2026-01-07T17:40:10.874143Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037892 2026-01-07T17:40:10.874399Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-07T17:40:10.874404Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:40:10.874829Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037891 2026-01-07T17:40:10.875363Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2026-01-07T17:40:10.884007Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711500 at step 1767807610919 at tablet 72075186224037892 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807610919 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2026-01-07T17:40:10.884059Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-07T17:40:10.884244Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-07T17:40:10.884274Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:10.884316Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767807610919:281474976711500] in PlanQueue unit at 72075186224037892 2026-01-07T17:40:10.884378Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037892 got data tx from cache 1767807610919:281474976711500 2026-01-07T17:40:10.886170Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711500 released its data 2026-01-07T17:40:10.886224Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:10.886725Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711500 at step 1767807610919 at tablet 72075186224037889 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807610919 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-07T17:40:10.886752Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:10.886882Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:10.886905Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:10.886925Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767807610919:281474976711500] in PlanQueue unit at 7207518622 ... G: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037890 got data tx from cache 1767807620089:281474976711911 2026-01-07T17:40:20.046508Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711911 at step 1767807620089 at tablet 72075186224037889 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807620089 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-07T17:40:20.046540Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:20.046655Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:20.046670Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:20.046689Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767807620089:281474976711911] in PlanQueue unit at 72075186224037889 2026-01-07T17:40:20.046729Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037889 got data tx from cache 1767807620089:281474976711911 2026-01-07T17:40:20.048234Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-07T17:40:20.048282Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:20.048691Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711911 at step 1767807620089 at tablet 72075186224037892 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807620089 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2026-01-07T17:40:20.048708Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-07T17:40:20.048817Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-07T17:40:20.048832Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:20.048850Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767807620089:281474976711911] in PlanQueue unit at 72075186224037892 2026-01-07T17:40:20.048879Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037892 got data tx from cache 1767807620089:281474976711911 2026-01-07T17:40:20.050172Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-07T17:40:20.050197Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:20.050476Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711911 at step 1767807620089 at tablet 72075186224037891 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767807620089 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2026-01-07T17:40:20.050490Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T17:40:20.050577Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-07T17:40:20.050591Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:20.050606Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767807620089:281474976711911] in PlanQueue unit at 72075186224037891 2026-01-07T17:40:20.050634Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037891 got data tx from cache 1767807620089:281474976711911 2026-01-07T17:40:20.051577Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:20.055290Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-07T17:40:20.055338Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:20.055927Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-07T17:40:20.056770Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037892 restored its data 2026-01-07T17:40:20.057192Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1767807620089} 2026-01-07T17:40:20.057299Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T17:40:20.058332Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037890 restored its data 2026-01-07T17:40:20.059605Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-07T17:40:20.059956Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:20.060511Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1767807620089} 2026-01-07T17:40:20.060610Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1767807620089} 2026-01-07T17:40:20.060634Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-07T17:40:20.060714Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767807620089 : 281474976711911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7592675915412451811:5106], exec latency: 0 ms, propose latency: 9 ms 2026-01-07T17:40:20.060741Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T17:40:20.060994Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-07T17:40:20.061023Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:20.061578Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1767807620089} 2026-01-07T17:40:20.063189Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T17:40:20.064064Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037890 restored its data 2026-01-07T17:40:20.064205Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:20.065000Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:20.065021Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037889 restored its data 2026-01-07T17:40:20.065211Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-07T17:40:20.065894Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037892 restored its data 2026-01-07T17:40:20.066592Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:20.067277Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:40:20.067324Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767807620089 : 281474976711911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7592675915412451811:5106], exec latency: 18 ms, propose latency: 21 ms 2026-01-07T17:40:20.067346Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:40:20.067883Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-07T17:40:20.067912Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767807620089 : 281474976711911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7592675915412451811:5106], exec latency: 17 ms, propose latency: 18 ms 2026-01-07T17:40:20.067926Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-07T17:40:20.069622Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:20.073325Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:40:20.073404Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767807620089 : 281474976711911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7592675915412451811:5106], exec latency: 22 ms, propose latency: 26 ms 2026-01-07T17:40:20.073437Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:20.094182Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-07T17:40:20.094449Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2026-01-07T17:40:20.101252Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-07T17:40:20.101446Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2026-01-07T17:40:20.105202Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-07T17:40:20.105568Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] Test command err: 2026-01-07T17:39:47.522113Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675772335953651:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:47.522172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:47.595135Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:48.013684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:48.013781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:48.020249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:48.080141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:48.122685Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675772335953477:2081] 1767807587485438 != 1767807587485441 2026-01-07T17:39:48.126400Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:48.221698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:39:48.221730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:39:48.221737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:39:48.221818Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:48.311227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:48.537168Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:48.606217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:48.629866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:39:52.383732Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:39:52.385034Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675793810790831:2322], Start check tables existence, number paths: 2 2026-01-07T17:39:52.385775Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:39:52.385818Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:39:52.390917Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=YWNkYzcwOWUtMjM3NGNiZTItNjBmZWM1NjYtOTc4NmIyZGU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YWNkYzcwOWUtMjM3NGNiZTItNjBmZWM1NjYtOTc4NmIyZGU= temp_dir_name# 6f8252c9-49b4-0e02-04eb-58897e6d96f1 trace_id# 2026-01-07T17:39:52.391394Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675793810790831:2322], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:39:52.391442Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675793810790831:2322], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:39:52.391465Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675793810790831:2322], Successfully finished 2026-01-07T17:39:52.395495Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:39:52.395711Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=YWNkYzcwOWUtMjM3NGNiZTItNjBmZWM1NjYtOTc4NmIyZGU=, ActorId: [1:7592675793810790849:2325], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:39:52.401226Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:39:52.433066Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675793810790860:2508], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:39:52.437676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:52.441859Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675793810790860:2508], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-07T17:39:52.444100Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675793810790860:2508], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:39:52.458478Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675793810790860:2508], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:39:52.521041Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675793810790860:2508], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:39:52.521046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675772335953651:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:52.521118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:39:52.570892Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675793810790911:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:52.571004Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675793810790860:2508], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:39:52.571342Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675793810790921:2548], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:39:52.572410Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675793810790921:2548], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:39:52.596988Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=YWNkYzcwOWUtMjM3NGNiZTItNjBmZWM1NjYtOTc4NmIyZGU=, ActorId: [1:7592675793810790849:2325], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:39:52.597114Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=YWNkYzcwOWUtMjM3NGNiZTItNjBmZWM1NjYtOTc4NmIyZGU=, ActorId: [1:7592675793810790849:2325], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:39:52.597158Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=YWNkYzcwOWUtMjM3NGNiZTItNjBmZWM1NjYtOTc4NmIyZGU=, ActorId: [1:7592675793810790849:2325], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:39:52.597202Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=YWNkYzcwOWUtMjM3NGNiZTItNjBmZWM1NjYtOTc4NmIyZGU=, ActorId: [1:7592675793810790849:2325], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:39:52.597293Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=YWNkYzcwOWUtMjM3NGNiZTItNjBmZWM1NjYtOTc4NmIyZGU=, ActorId: [1:7592675793810790849:2325], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:39:53.439741Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675801388612135:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:53.449580Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:53.488355Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:53.596283Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:53.665673Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:53.694346Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:53.694443Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:53.702654Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:53.854701Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... P_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: ExecuteState, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, Sending CompileQuery request trace_id# 2026-01-07T17:40:20.815624Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) 2026-01-07T17:40:20.819236Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7592675915649316812:2335], DatabaseId: /Root, PoolId: default, Got watch notification 2026-01-07T17:40:20.819363Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:476: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7592675915649316812:2335], DatabaseId: /Root, PoolId: default, Pool config has changed, queue size: -1, in flight limit: -1 2026-01-07T17:40:20.823573Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: ExecuteState, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-07T17:40:20.823750Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: ExecuteState, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, TxInfo status# Committed kind# Pure total_duration# 10.565 server_duration# 10.485 queries_count# 2 trace_id# 2026-01-07T17:40:20.823819Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: ExecuteState, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:40:20.823964Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: ExecuteState, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:40:20.824013Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: ExecuteState, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, EndCleanup is_final# true trace_id# 2026-01-07T17:40:20.824352Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: ExecuteState, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, Sent query response back to proxy proxy_request_id# 5 proxy_id# [6:7592675898469446832:2199] trace_id# 2026-01-07T17:40:20.824391Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: unknown state, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:40:20.824589Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=NjYyYjUzNmYtMjViYWZjMzUtYjRkYzNlYTEtYTg1ZjdjMzg=, ActorId: [6:7592675915649316849:2343], ActorState: unknown state, LegacyTraceId: 01kecrq5n11pw4b58g6hdx2hb6, Session actor destroyed trace_id# 2026-01-07T17:40:20.827675Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI= temp_dir_name# f3fb5bf6-42bb-5766-22b3-18afed517685 trace_id# 2026-01-07T17:40:20.828140Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:40:20.828286Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ReadyState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, Received request proxy_request_id# 6 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# DROP RESOURCE POOL default; rpc_actor# [6:7592675915649316873:2600] database# Root database_id# /Root pool_id# default trace_id# 2026-01-07T17:40:20.828317Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ReadyState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, Request placed into pool from cache pool_id# default trace_id# 2026-01-07T17:40:20.828392Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, Sending CompileQuery request trace_id# 2026-01-07T17:40:20.867800Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:294: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7592675915649316812:2335], DatabaseId: /Root, PoolId: default, Got delete notification 2026-01-07T17:40:20.867916Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:40:20.867959Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:40:20.867994Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7592675915649316888:2347], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-07T17:40:20.868411Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7592675915649316888:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:20.868493Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:20.871172Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-07T17:40:20.871326Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, TxInfo status# Committed kind# Pure total_duration# 10.144 server_duration# 10.066 queries_count# 2 trace_id# 2026-01-07T17:40:20.871397Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:40:20.871524Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:40:20.871555Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, EndCleanup is_final# true trace_id# 2026-01-07T17:40:20.871607Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, Sent query response back to proxy proxy_request_id# 6 proxy_id# [6:7592675898469446832:2199] trace_id# 2026-01-07T17:40:20.871646Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: unknown state, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:40:20.871905Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=ZDI1MmUzYjAtYzA1MTk1NzAtODhmZDZjNDgtN2FhZjJjNWI=, ActorId: [6:7592675915649316874:2345], ActorState: unknown state, LegacyTraceId: 01kecrq5pw2zw0cx7c7x2q39fm, Session actor destroyed trace_id# 2026-01-07T17:40:20.885402Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=6&id=Y2I2MGIwMzEtNDdkY2E3MGItZjljMzZhZWItZjZmMjY1M2E=, ActorId: [6:7592675915649316690:2322], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:40:20.885475Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=Y2I2MGIwMzEtNDdkY2E3MGItZjljMzZhZWItZjZmMjY1M2E=, ActorId: [6:7592675915649316690:2322], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:40:20.885514Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=Y2I2MGIwMzEtNDdkY2E3MGItZjljMzZhZWItZjZmMjY1M2E=, ActorId: [6:7592675915649316690:2322], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:40:20.885543Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=Y2I2MGIwMzEtNDdkY2E3MGItZjljMzZhZWItZjZmMjY1M2E=, ActorId: [6:7592675915649316690:2322], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:40:20.885632Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=Y2I2MGIwMzEtNDdkY2E3MGItZjljMzZhZWItZjZmMjY1M2E=, ActorId: [6:7592675915649316690:2322], ActorState: unknown state, Session actor destroyed trace_id# |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> BasicUsage::ConflictingWrites [GOOD] Test command err: 2026-01-07T17:37:12.175341Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675110026965942:2227];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:12.175564Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:12.400255Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:13.181424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:13.181540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:13.318174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:13.515592Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:13.516041Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:13.525181Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675110026965739:2081] 1767807432109865 != 1767807432109868 2026-01-07T17:37:13.531017Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:13.723751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003cb3/r3tmp/yandexy5OY5i.tmp 2026-01-07T17:37:13.723788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003cb3/r3tmp/yandexy5OY5i.tmp 2026-01-07T17:37:13.723937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003cb3/r3tmp/yandexy5OY5i.tmp 2026-01-07T17:37:13.724017Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:13.728553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:13.771724Z INFO: TTestServer started on Port 19998 GrpcPort 28201 PQClient connected to localhost:28201 2026-01-07T17:37:14.211857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:14.231677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:37:14.379754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:16.734687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675127206835962:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.734811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.735257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675127206835974:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.735301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675127206835975:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.735417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.739641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:16.754930Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675127206835978:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:17.031593Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675127206836042:2646] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:17.064762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:17.116723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:17.177750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675110026965942:2227];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:17.177874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:37:17.233282Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675131501803347:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:17.233921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:17.234582Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ODg0ODhmY2UtZDZmYjQ2OC00YjE4ZWNmLWU5NjVkZWFi, ActorId: [1:7592675127206835936:2331], ActorState: ExecuteState, LegacyTraceId: 01kecrhhxcawavmyewar7zna4k, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:17.258849Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675131501803630:2824] === CheckClustersList. Ok 2026-01-07T17:37:22.733132Z :CreateTopicWithCustomName INFO: TTopicSdkTestSetup started 2026-01-07T17:37:22.798358Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:22.848814Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592675152976640310:2921] connected; active server actors: 1 2026-01-07T17:37:22.849085Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic-1767807442] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:37:22.867858Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic-1767807442] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:37:22.868084Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic-1767807442] BALANCER INIT DONE for test-topic-1767807442: (0, 72075186224037892) 2026-01-07T17:37:22.871753Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:37:22.872694Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:37:22.872886Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T17:37:22.872949Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:22.872994Z node 1 :PERSQUEUE INFO: pq ... os: 45000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-07T17:40:20.469287Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] OnAck: seqNo=96, txId=? 2026-01-07T17:40:20.469311Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: acknoledged message 96 2026-01-07T17:40:20.469420Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-07T17:40:20.469502Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 97 written { offset: 96 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 46000000 } max_queue_wait_time { nanos: 46000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-07T17:40:20.469524Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] OnAck: seqNo=97, txId=? 2026-01-07T17:40:20.469544Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: acknoledged message 97 2026-01-07T17:40:20.469668Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-07T17:40:20.469751Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 98 written { offset: 97 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 46000000 } max_queue_wait_time { nanos: 46000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-07T17:40:20.469790Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] OnAck: seqNo=98, txId=? 2026-01-07T17:40:20.469813Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: acknoledged message 98 2026-01-07T17:40:20.469941Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-07T17:40:20.470023Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 99 written { offset: 98 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 42000000 } max_queue_wait_time { nanos: 42000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-07T17:40:20.470045Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] OnAck: seqNo=99, txId=? 2026-01-07T17:40:20.470065Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: acknoledged message 99 2026-01-07T17:40:20.470166Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-07T17:40:20.470246Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 100 written { offset: 99 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 42000000 } max_queue_wait_time { nanos: 42000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-07T17:40:20.470265Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] OnAck: seqNo=100, txId=? 2026-01-07T17:40:20.470285Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: acknoledged message 100 2026-01-07T17:40:20.502456Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:20.502509Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:20.502537Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:20.502575Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:20.502600Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:20.505688Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session will now close 2026-01-07T17:40:20.505810Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: aborting 2026-01-07T17:40:20.506523Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: gracefully shut down, all writes complete 2026-01-07T17:40:20.507253Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-07T17:40:20.507306Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-07T17:40:20.507365Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session is aborting and will not restart 2026-01-07T17:40:20.507346Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 13 sessionId: test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0 grpc read done: success: 0 data: 2026-01-07T17:40:20.507384Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 13 sessionId: test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0 grpc read failed 2026-01-07T17:40:20.508148Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 13 sessionId: test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0 2026-01-07T17:40:20.508204Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 13 sessionId: test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0 is DEAD 2026-01-07T17:40:20.508716Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:20.509511Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7592675908704195405:2664] destroyed 2026-01-07T17:40:20.509581Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:40:20.509635Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:20.509665Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:20.509692Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:20.509728Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:20.509748Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:20.513355Z node 14 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-07T17:40:20.513638Z node 14 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path test-topic 2026-01-07T17:40:20.515229Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [14:7592675917294130054:2682], now have 1 active actors on pipe 2026-01-07T17:40:20.515253Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][test-topic] pipe [14:7592675917294130055:2683] connected; active server actors: 1 2026-01-07T17:40:20.515680Z node 14 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186224037892][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2026-01-07T17:40:20.516215Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7592675917294130054:2682] destroyed 2026-01-07T17:40:20.519220Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d5903efa-c675ffe0-53404993-6fcf4e55_0] PartitionId [0] Generation [11] Write session: destroy 2026-01-07T17:40:20.602584Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:20.602637Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:20.602660Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:20.602692Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:20.602710Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:20.703175Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:20.703235Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:20.703267Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:20.703309Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:20.703337Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:09.163644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:09.312047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:09.351348Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:09.352153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:09.352216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:09.763905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:09.764057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:09.948737Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807606085132 != 1767807606085136 2026-01-07T17:40:09.970109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:10.039652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:10.203692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:10.521710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:10.536020Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:10.658943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:10.711223Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-07T17:40:10.711516Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:10.759763Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:10.759893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:10.761762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:10.761863Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:10.761928Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:10.762334Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:10.762825Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:10.762926Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-07T17:40:10.763349Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2783] 2026-01-07T17:40:10.763547Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:10.773274Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:916:2787] 2026-01-07T17:40:10.773493Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:10.784513Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:10.784693Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:10.786140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:40:10.786213Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:40:10.786263Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:40:10.786563Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:10.786757Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:10.786831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2783] in generation 1 2026-01-07T17:40:10.787191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:10.787295Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:10.788635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-07T17:40:10.788719Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-07T17:40:10.788793Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-07T17:40:10.789132Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:10.789224Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:10.789313Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-07T17:40:10.800577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:10.830718Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:10.830966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:10.831136Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-07T17:40:10.831180Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:10.831219Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:10.831257Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:10.831761Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:10.831811Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:40:10.831890Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:10.831978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-07T17:40:10.832010Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:40:10.832034Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:40:10.832074Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:10.832417Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:10.832571Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:10.832725Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:10.832761Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T17:40:10.832815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:10.832873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-07T17:40:10.832898Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-07T17:40:10.832920Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-07T17:40:10.832962Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:40:10.833223Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:10.833268Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:10.833323Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:10.833373Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:10.833598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:912:2784], sessionId# [0:0:0] 2026-01-07T17:40:10.833651Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:40:10.833731Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:40:10.834276Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:10.834317Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:10.834346Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:40:10.834397Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 ... TASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:21.812340Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:21.812451Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:21.813458Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:21.813887Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:21.813946Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:21.813997Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:21.814069Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:21.814130Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:21.814214Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:21.816662Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:21.817049Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:21.817128Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:21.827390Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:21.827512Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:21.827584Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:21.828564Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:21.828751Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:21.833615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:21.841669Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:21.975415Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:21.979091Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:22.027724Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:22.157598Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-07T17:40:22.158029Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:40:22.158205Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=3 2026-01-07T17:40:22.169176Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:22.419034Z node 3 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] { items { uint64_value: 0 } } 2026-01-07T17:40:22.425750Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1092:2905], serverId# [3:1093:2906], sessionId# [0:0:0] 2026-01-07T17:40:22.426781Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:22.438218Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:22.438298Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:22.438367Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 2501 from mediator time cast 2026-01-07T17:40:22.439081Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2026-01-07T17:40:22.439159Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:22.439317Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:22.439361Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037888 2026-01-07T17:40:22.439641Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:22.439691Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:22.439743Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:22.439831Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:22.439936Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1092:2905], serverId# [3:1093:2906], sessionId# [0:0:0] 2026-01-07T17:40:22.511006Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-07T17:40:22.511161Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2026-01-07T17:40:22.521032Z node 3 :TX_DATASHARD INFO: datashard_write_operation.cpp:804: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2026-01-07T17:40:22.521317Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-07T17:40:22.521568Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-07T17:40:22.521652Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:22.521951Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:1113:2879], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:1060:2879]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:1113:2879].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-07T17:40:22.522500Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:1107:2879], SessionActorId: [3:1060:2879], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:1060:2879]. 2026-01-07T17:40:22.522850Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=YTY0NTRkZmMtZWJmMWQ0LTU1M2VkZDlhLTY0NWU5OWZl, ActorId: [3:1060:2879], ActorState: ExecuteState, LegacyTraceId: 01kecrq798e62bq5spyb39fcf0, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:1108:2879] from: [3:1107:2879] trace_id# 2026-01-07T17:40:22.523001Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:1108:2879] TxId: 281474976715662. Ctx: { TraceId: 01kecrq798e62bq5spyb39fcf0, Database: , SessionId: ydb://session/3?node_id=3&id=YTY0NTRkZmMtZWJmMWQ0LTU1M2VkZDlhLTY0NWU5OWZl, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:40:22.523323Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YTY0NTRkZmMtZWJmMWQ0LTU1M2VkZDlhLTY0NWU5OWZl, ActorId: [3:1060:2879], ActorState: ExecuteState, LegacyTraceId: 01kecrq798e62bq5spyb39fcf0, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:40:22.524297Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:7] at 72075186224037888 2026-01-07T17:40:22.524371Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:7] at 72075186224037888 2026-01-07T17:40:22.524553Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:53.659427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:53.659521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:53.659585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:53.659644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:53.659723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:53.659759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:53.659835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:53.659900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:53.660605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:53.660842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:53.765711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:53.765820Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:53.766652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:53.776353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:53.777165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:53.777367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:53.785650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:53.785948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:53.786648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:53.786901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:53.791003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:53.791171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:53.792177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:53.792250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:53.792408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:53.792459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:53.792507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:53.792652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.799131Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:53.918570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:53.918778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.918938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:53.918971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:53.919206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:53.919261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:53.921443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:53.921637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:53.921864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.921940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:53.921990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:53.922034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:53.924396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.924439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:53.924468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:53.925910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.925947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.926001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:53.926046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:53.928864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:53.930175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:53.930328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:53.931078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:53.931194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:53.931238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:53.931419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:53.931453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:53.931558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:53.931626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:53.933066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-07T17:40:19.335883Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-07T17:40:19.335919Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-07T17:40:19.335952Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-07T17:40:19.335981Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-07T17:40:19.336009Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-01-07T17:40:19.337474Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:19.337584Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:19.337627Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:40:19.337663Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-07T17:40:19.337708Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-07T17:40:19.339239Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:19.339337Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:19.339379Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:40:19.339417Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-07T17:40:19.339457Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-07T17:40:19.341246Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:19.341365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:19.341414Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:40:19.341449Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-07T17:40:19.341492Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-07T17:40:19.342878Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:19.342974Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:19.343011Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:40:19.343049Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-07T17:40:19.343088Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-07T17:40:19.343167Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-07T17:40:19.345183Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:40:19.347487Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:40:19.347616Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:40:19.349338Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-07T17:40:19.351035Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-07T17:40:19.351086Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-07T17:40:19.352992Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-07T17:40:19.353113Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-07T17:40:19.353157Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2680:4668] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-07T17:40:19.354704Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-07T17:40:19.354749Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-07T17:40:19.354837Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-07T17:40:19.354867Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-07T17:40:19.354934Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-07T17:40:19.354966Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-07T17:40:19.355029Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-07T17:40:19.355058Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-07T17:40:19.355123Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-07T17:40:19.355152Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-07T17:40:19.357256Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-07T17:40:19.357511Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-07T17:40:19.357557Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2683:4671] 2026-01-07T17:40:19.358026Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-07T17:40:19.358151Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-07T17:40:19.358249Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-07T17:40:19.358302Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-07T17:40:19.358337Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2683:4671] 2026-01-07T17:40:19.358467Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-07T17:40:19.358499Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2683:4671] 2026-01-07T17:40:19.358663Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-07T17:40:19.358724Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-07T17:40:19.358757Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2683:4671] 2026-01-07T17:40:19.358942Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-07T17:40:19.358976Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2683:4671] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName >> GroupWriteTest::WriteHardRateDispatcher >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> TTxDataShardUploadRows::RetryUploadRowsToShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:11.239589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:11.444006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:11.464894Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:11.465331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:11.465379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:11.753292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:11.753439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:11.827131Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807608126472 != 1767807608126476 2026-01-07T17:40:11.839164Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:11.886001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:11.987197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:12.301231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:12.315462Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:12.434036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:12.477021Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:12.477308Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:12.521827Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:12.521942Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:12.523331Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:12.523436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:12.523487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:12.523831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:12.523980Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:12.524073Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:12.535109Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:12.564413Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:12.564650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:12.565128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:12.565188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:12.565245Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:12.565298Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.565861Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:12.565995Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:12.566092Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:12.566137Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:12.566224Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:12.566273Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.566365Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:12.566842Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:12.567127Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:12.567240Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:12.569118Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:12.581119Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:12.581228Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:12.711503Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:12.722054Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:12.722166Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.722650Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:12.722703Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:12.722780Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:12.723067Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:12.723234Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:12.723422Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:12.723519Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:12.734122Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:12.734746Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:12.739429Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:12.739505Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.740809Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:12.740917Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.742218Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.742271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:12.742324Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:12.742390Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:12.742450Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:12.742537Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.760600Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:12.762524Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:12.762685Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:12.762753Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:23.723931Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:23.724219Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:23.724390Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:23.725583Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:23.725668Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:23.726170Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:23.726683Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:23.730428Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:23.730489Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:23.731556Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:23.731653Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:23.732760Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:23.732813Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:23.732862Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:23.734255Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:23.734346Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:23.734439Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:23.735769Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:23.737914Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:23.738206Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:23.738281Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:23.748768Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:23.748877Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:23.749688Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:23.750445Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:23.751062Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:23.755526Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:23.767059Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:23.889523Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:23.892478Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:23.927754Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:24.108220Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-07T17:40:24.108763Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:40:24.109009Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-07T17:40:24.120188Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:24.125067Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:24.126447Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:24.138542Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:24.138628Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:24.139003Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:24.139055Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-07T17:40:24.139171Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:24.139214Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:24.139289Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:24.139346Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:24.139649Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:24.140849Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:24.141273Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:24.141494Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:24.141548Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:24.141599Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:40:24.141857Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:24.141947Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:24.142645Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-07T17:40:24.142991Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:24.143124Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-07T17:40:24.143183Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-07T17:40:24.209361Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:24.209431Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-07T17:40:24.209611Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:24.209654Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:24.209696Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:24.209840Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:24.209906Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:24.209956Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:08.996821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:09.139458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:09.200402Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:09.204483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:09.204604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:09.702827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:09.702979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:09.862164Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807606105802 != 1767807606105806 2026-01-07T17:40:09.894967Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:09.958376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:10.071816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:10.422668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:10.436982Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:10.544196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:10.602040Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-07T17:40:10.602307Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:10.655585Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:10.655740Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:10.657636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:10.657733Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:10.657805Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:10.658213Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:10.658745Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:10.658827Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-07T17:40:10.659245Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2783] 2026-01-07T17:40:10.659473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:10.669608Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:916:2787] 2026-01-07T17:40:10.669862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:10.680050Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:10.680236Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:10.681726Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:40:10.681793Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:40:10.681851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:40:10.682163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:10.682348Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:10.682427Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2783] in generation 1 2026-01-07T17:40:10.682833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:10.682920Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:10.684260Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-07T17:40:10.684355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-07T17:40:10.684418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-07T17:40:10.684761Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:10.684860Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:10.684964Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-07T17:40:10.697739Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:10.746060Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:10.746270Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:10.746412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-07T17:40:10.746452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:10.746489Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:10.746529Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:10.746978Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:10.747025Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:40:10.747127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:10.747188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-07T17:40:10.747212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:40:10.747234Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:40:10.747257Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:10.747559Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:10.747696Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:10.747849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:10.747901Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T17:40:10.747961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:10.748016Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-07T17:40:10.748038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-07T17:40:10.748059Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-07T17:40:10.748082Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:40:10.748317Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:10.748385Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:10.748426Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:10.748476Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:10.748717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:912:2784], sessionId# [0:0:0] 2026-01-07T17:40:10.748771Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:40:10.748831Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:40:10.749340Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:10.749377Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:10.749404Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:40:10.749445Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 ... node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:40:24.233616Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2026-01-07T17:40:24.233878Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:40:24.233906Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2026-01-07T17:40:24.277603Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:40:24.277673Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2026-01-07T17:40:24.278278Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:256: 72075186224037889 snapshot complete for split OpId 281474976715663 2026-01-07T17:40:24.278571Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715663 2026-01-07T17:40:24.278646Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715663 2026-01-07T17:40:24.278675Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715663 2026-01-07T17:40:24.278702Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715663 2026-01-07T17:40:24.278913Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715663 2026-01-07T17:40:24.279114Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715663 2026-01-07T17:40:24.279145Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715663 2026-01-07T17:40:24.279171Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715663 2026-01-07T17:40:24.279197Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715663 2026-01-07T17:40:24.279280Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715663 2026-01-07T17:40:24.279975Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:424: 72075186224037889 Sending snapshots from src for split OpId 281474976715663 2026-01-07T17:40:24.280246Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2361: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2026-01-07T17:40:24.280393Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2361: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2026-01-07T17:40:24.280752Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [3:1383:3097], serverId# [3:1384:3098], sessionId# [0:0:0] 2026-01-07T17:40:24.280801Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037892, clientId# [3:1382:3096], serverId# [3:1385:3099], sessionId# [0:0:0] 2026-01-07T17:40:24.280974Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037891 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2026-01-07T17:40:24.281870Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037892 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2026-01-07T17:40:24.284020Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037891 ack snapshot OpId 281474976715663 2026-01-07T17:40:24.284245Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-01-07T17:40:24.284373Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:24.284488Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-07T17:40:24.284575Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [3:1388:3102] 2026-01-07T17:40:24.284621Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-07T17:40:24.284678Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2026-01-07T17:40:24.284720Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T17:40:24.284890Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715663 2026-01-07T17:40:24.285766Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-07T17:40:24.285821Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:24.285856Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-07T17:40:24.285891Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-07T17:40:24.286011Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1383:3097], serverId# [3:1384:3098], sessionId# [0:0:0] 2026-01-07T17:40:24.286485Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2026-01-07T17:40:24.286536Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T17:40:24.286608Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037892 ack snapshot OpId 281474976715663 2026-01-07T17:40:24.286730Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2026-01-07T17:40:24.286820Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:24.286890Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-07T17:40:24.286942Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037892, actorId: [3:1390:3104] 2026-01-07T17:40:24.286967Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037892 2026-01-07T17:40:24.287001Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037892 2026-01-07T17:40:24.287028Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-07T17:40:24.287256Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715663 2026-01-07T17:40:24.288015Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-07T17:40:24.288063Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:24.288097Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-01-07T17:40:24.288131Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-07T17:40:24.288207Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1382:3096], serverId# [3:1385:3099], sessionId# [0:0:0] 2026-01-07T17:40:24.288365Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 2500 next step 3000 2026-01-07T17:40:24.288434Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-07T17:40:24.288530Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 3000 2026-01-07T17:40:24.288565Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-07T17:40:24.289153Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 2500 next step 3000 2026-01-07T17:40:24.289204Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-07T17:40:24.310688Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976715663 2026-01-07T17:40:24.314669Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715663, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2026-01-07T17:40:24.316171Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-07T17:40:24.316220Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-01-07T17:40:24.316538Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:24.316575Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037889 state 5 2026-01-07T17:40:24.316677Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1276:3017], serverId# [3:1277:3018], sessionId# [0:0:0] 2026-01-07T17:40:24.316800Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976715663 2026-01-07T17:40:24.316851Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:40:24.316891Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> TTxDataShardUploadRows::TestUploadRows >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin >> GroupWriteTest::TwoTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:11.676987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:11.786180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:11.804310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:11.804827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:11.804899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:12.071317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:12.071445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:12.141495Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807608879782 != 1767807608879786 2026-01-07T17:40:12.155765Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:12.202002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:12.299277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:12.636638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:12.657441Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:12.770195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:12.820391Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-07T17:40:12.820659Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:12.870845Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:12.871001Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:12.872624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:12.872710Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:12.872774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:12.873176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:12.873636Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:12.873722Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-07T17:40:12.874133Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2783] 2026-01-07T17:40:12.874338Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:12.883532Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:916:2787] 2026-01-07T17:40:12.883760Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:12.892847Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:12.893049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:12.894371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:40:12.894433Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:40:12.894490Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:40:12.894781Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:12.894961Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:12.895054Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2783] in generation 1 2026-01-07T17:40:12.895445Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:12.895531Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:12.896849Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-07T17:40:12.896943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-07T17:40:12.897031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-07T17:40:12.897308Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:12.897396Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:12.897457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-07T17:40:12.908761Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:12.940496Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:12.940691Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:12.940819Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-07T17:40:12.940856Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:12.940892Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:12.940978Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.941374Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:12.941418Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:40:12.941508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:12.941565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-07T17:40:12.941588Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:40:12.941608Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:40:12.941630Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:12.941893Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:12.942010Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:12.942162Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:12.942208Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T17:40:12.942269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:12.942375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-07T17:40:12.942402Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-07T17:40:12.942422Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-07T17:40:12.942441Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:40:12.942652Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:12.942692Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:12.942732Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:12.942775Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.942987Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:912:2784], sessionId# [0:0:0] 2026-01-07T17:40:12.943038Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:40:12.943091Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:40:12.943462Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:12.943494Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:12.943517Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:40:12.943550Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 ... d__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-07T17:40:26.360174Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:26.360313Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-01-07T17:40:26.360374Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-01-07T17:40:26.360439Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1281:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 1 2026-01-07T17:40:26.360562Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:40:26.360644Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1281:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037889, status# 1 2026-01-07T17:40:26.360716Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-07T17:40:26.360745Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-07T17:40:26.360801Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1281:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 1 2026-01-07T17:40:26.360836Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:904: [DistEraser] [3:1281:3021] Register plan: txId# 281474976715662, minStep# 2502, maxStep# 32502 2026-01-07T17:40:26.360908Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2026-01-07T17:40:26.360958Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2026-01-07T17:40:26.373749Z node 3 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2026-01-07T17:40:26.373911Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:40:26.376790Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2026-01-07T17:40:26.376857Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2026-01-07T17:40:26.376971Z node 3 :TX_DATASHARD ERROR: datashard_distributed_erase.cpp:167: [DistEraser] [3:1281:3021] Reply: txId# 281474976715662, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715662, shard# 72075186224037888 2026-01-07T17:40:26.377626Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-07T17:40:26.377673Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-01-07T17:40:26.377831Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1276:3017], serverId# [3:1277:3018], sessionId# [0:0:0] 2026-01-07T17:40:26.378097Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:26.378142Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:26.378185Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2026-01-07T17:40:26.378236Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:40:26.398177Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1293:3032] 2026-01-07T17:40:26.398419Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:26.404231Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:26.405307Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:26.407525Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:26.407610Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:26.407675Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:26.408133Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:26.408519Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:26.408601Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [3:1308:3032] in generation 2 2026-01-07T17:40:26.420018Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:26.420131Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037888 2026-01-07T17:40:26.420259Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:26.420589Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [3:1310:3040] 2026-01-07T17:40:26.420631Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:26.420676Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:26.420713Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:26.420989Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-07T17:40:26.421227Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-07T17:40:26.421954Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:26.422074Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:26.422451Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:26.422592Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 2501 2026-01-07T17:40:26.422637Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:26.422720Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:26.422766Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:26.422805Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 1 2026-01-07T17:40:26.422846Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:26.423235Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-01-07T17:40:26.423281Z node 3 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2026-01-07T17:40:26.423326Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2026-01-07T17:40:26.423504Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-01-07T17:40:26.423532Z node 3 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2026-01-07T17:40:26.423556Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2026-01-07T17:40:26.423610Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2026-01-07T17:40:26.423682Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T17:40:26.423731Z node 3 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976715661 at 72075186224037889 2026-01-07T17:40:26.423775Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-07T17:40:26.423835Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T17:40:26.423961Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 2500 next step 2501 2026-01-07T17:40:26.424030Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2026-01-07T17:40:26.424085Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715661 2026-01-07T17:40:26.424133Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-07T17:40:26.424165Z node 3 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976715661 at 72075186224037890 2026-01-07T17:40:26.424194Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-07T17:40:26.424244Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037890 {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-07T17:40:26.424314Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:38:37.663446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:38:37.663580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:38:37.663631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:38:37.663670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:38:37.663726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:38:37.663770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:38:37.663838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:38:37.663938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:38:37.664891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:38:37.665320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:38:37.763872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:38:37.763948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:37.783106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:38:37.783552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:38:37.783775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:38:37.797176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:38:37.797639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:38:37.798559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:38:37.798887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:38:37.802750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:37.802996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:38:37.804403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:38:37.804498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:38:37.804671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:38:37.804733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:38:37.804845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:38:37.805093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:38:37.973762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.974827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.974990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.975913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:38:37.976015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... cipient [3:716:2701]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-07T17:40:24.853391Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-07T17:40:24.853485Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409546 outdated step 5000038 last cleanup 0 2026-01-07T17:40:24.853551Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:24.853590Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409546 2026-01-07T17:40:24.853624Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409546 has no attached operations 2026-01-07T17:40:24.853675Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2026-01-07T17:40:24.853821Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:716:2701]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:40:24.854129Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2026-01-07T17:40:24.855141Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:716:2701], Recipient [3:129:2153]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 235 Memory: 124368 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 261 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 132 TableOwnerId: 72057594046678944 FollowerId 2026-01-07T17:40:24.855195Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:40:24.855245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0235 2026-01-07T17:40:24.855370Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:40:24.855414Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-07T17:40:24.856698Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435080, Sender [3:1466:3407], Recipient [3:716:2701]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvBuildTableStatsResult 2026-01-07T17:40:24.905377Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:40:24.905454Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:40:24.905485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-07T17:40:24.905552Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-07T17:40:24.905584Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-07T17:40:24.905661Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-07T17:40:24.905721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-07T17:40:24.905750Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-07T17:40:24.905818Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2026-01-07T17:40:24.905875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-07T17:40:24.905955Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:40:24.916520Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:40:24.916601Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:40:24.916631Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:40:25.193653Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:25.193730Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:25.193824Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:129:2153], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:25.193863Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:25.573133Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:25.573224Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:25.573321Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:129:2153], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:25.573361Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:25.965419Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:25.965511Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:25.965621Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:129:2153], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:25.965657Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:26.331728Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:26.331811Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:26.331896Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:129:2153], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:26.331929Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:26.684676Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:26.684759Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:26.684854Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:129:2153], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:26.684889Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:26.748115Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:716:2701]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:40:27.050769Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:27.050835Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:40:27.050897Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:129:2153], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:40:27.050924Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |88.0%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |88.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::WriteCommitVersion [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:16.282057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:16.395091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:16.413825Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:16.414343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:16.414400Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:16.703833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:16.703983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:16.774334Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807613437442 != 1767807613437446 2026-01-07T17:40:16.785505Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:16.833400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:16.925030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:17.261931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:17.275777Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:17.386200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:17.427695Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:17.427944Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:17.480142Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:17.480272Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:17.482029Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:17.482132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:17.482212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:17.482602Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:17.482742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:17.482829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:17.494163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:17.530523Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:17.530752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:17.530889Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:17.530925Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:17.530961Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:17.531007Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:17.531571Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:17.531675Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:17.531765Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:17.531802Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:17.531855Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:17.531899Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:17.531977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:17.532486Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:17.532773Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:17.532885Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:17.534631Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:17.545728Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:17.545850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:17.671475Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:17.677062Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:17.677184Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:17.677667Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:17.677731Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:17.677815Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:17.678118Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:17.678291Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:17.678493Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:17.678596Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:17.681105Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:17.681599Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:17.683985Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:17.684040Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:17.685184Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:17.685267Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:17.686382Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:17.686492Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:17.686538Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:17.686602Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:17.686653Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:17.686736Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:17.691918Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:17.693616Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:17.693783Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:17.693842Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:27.900884Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:27.901425Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:27.901584Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:27.902457Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:27.902531Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:27.903098Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:27.903577Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:27.905952Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:27.906009Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:27.907168Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:27.907271Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:27.908459Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:27.908504Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:27.908541Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:27.908593Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:27.908632Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:27.908700Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:27.909822Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:27.912055Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:27.912313Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:27.912367Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:27.920795Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:27.920899Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:27.921584Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:27.922378Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:27.922763Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:27.926613Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:27.933400Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:28.055882Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:28.059590Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:28.098862Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:28.221613Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-07T17:40:28.222008Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:40:28.222172Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-07T17:40:28.233689Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:28.238016Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:28.239293Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-07T17:40:28.252174Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-07T17:40:28.252296Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:28.252564Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:28.252620Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-07T17:40:28.252742Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:28.252795Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:28.252846Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:28.252984Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:28.253304Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-07T17:40:28.254391Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:28.254830Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:28.255047Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:28.255095Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:28.255144Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:40:28.255443Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:28.255520Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:28.256253Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-07T17:40:28.256609Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:28.256771Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-07T17:40:28.256842Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-07T17:40:28.318306Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:28.318381Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-07T17:40:28.318572Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:28.318618Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:28.318662Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:28.318811Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:28.318878Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:28.318932Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin >> KikimrIcGateway::TestDropExternalTable >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table [GOOD] >> TxUsage::ReadRuleGeneration [GOOD] >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2026-01-07T17:39:06.779161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:06.779217Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:06.779678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:06.791424Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:06.791813Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:06.792087Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:06.802610Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:06.839962Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:06.840574Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:06.842307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:06.842374Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:06.842438Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:06.842741Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:06.842913Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:06.842975Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:06.911504Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:06.947397Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:06.947641Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:06.947755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:06.947808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:06.947842Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:06.947876Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:06.948034Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:06.948141Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:06.948465Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:06.948584Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:06.948727Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:06.948771Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:06.948830Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:06.948865Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:06.948975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:06.949025Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:06.949072Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:06.949177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:06.949213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:06.949277Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:06.955800Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:06.955869Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:06.955979Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:06.956119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:06.956175Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:06.956218Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:06.956262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:06.956289Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:06.956324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:06.956348Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:06.956583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:06.956614Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:06.956643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:06.956664Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:06.956728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:06.956757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:06.956801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:06.956832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:06.956853Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:06.969101Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:06.969172Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:06.969205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:06.969265Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:06.969328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:06.969846Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:06.969902Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:06.969962Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:06.970121Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:06.970153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:06.970303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:06.970340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:06.970375Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:06.970406Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:06.974010Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:06.974083Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:06.974302Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:06.974342Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:06.974422Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:06.974459Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:06.974499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:06.974539Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:06.974574Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 2:348:2316]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-07T17:40:29.018308Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.018339Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-07T17:40:29.018440Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-07T17:40:29.018476Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.018512Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-07T17:40:29.018613Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-07T17:40:29.018647Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.018677Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-07T17:40:29.018785Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-07T17:40:29.018826Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.018859Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-07T17:40:29.018966Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-07T17:40:29.018999Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.019029Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-07T17:40:29.019125Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-07T17:40:29.019155Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.019186Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-07T17:40:29.019268Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-07T17:40:29.019294Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.019326Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-07T17:40:29.019418Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-07T17:40:29.019451Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.019484Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-07T17:40:29.019574Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-07T17:40:29.019605Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.019638Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-07T17:40:29.019821Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-07T17:40:29.019870Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.019904Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-07T17:40:29.020013Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-07T17:40:29.020048Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.020077Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-07T17:40:29.020179Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-07T17:40:29.020229Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.020261Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-07T17:40:29.020366Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-07T17:40:29.020404Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.020438Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-07T17:40:29.020527Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-07T17:40:29.020560Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.020590Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-07T17:40:29.020677Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-07T17:40:29.020708Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.020739Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-07T17:40:29.020826Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-07T17:40:29.020859Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.020886Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-07T17:40:29.021046Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-07T17:40:29.021096Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.021130Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-07T17:40:29.021243Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-07T17:40:29.021277Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.021309Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-07T17:40:29.021404Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-07T17:40:29.021443Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.021475Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-07T17:40:29.021579Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-07T17:40:29.021620Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:29.021657Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 30 31 27 30 31 29 21 31 30 30 28 30 16 28 30 24 11 23 30 18 27 27 30 31 19 17 - 31 31 - - - actual 30 31 27 30 31 29 21 31 30 30 28 30 16 28 30 24 11 23 30 18 27 27 30 31 19 17 - 31 31 - - - interm 4 1 3 6 4 2 - 6 - 6 6 - 6 3 5 4 6 4 4 5 4 - - 5 1 1 - - - - - - >> TTxDataShardUploadRows::RetryUploadRowsToShard [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query [GOOD] >> TxUsage::Write_And_Read_Small_Messages_1 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets >> TxUsage::Write_And_Read_Small_Messages_2 >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table [GOOD] >> KikimrIcGateway::TestCreateExternalTable >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query >> KikimrIcGateway::TestDropExternalTable [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> KikimrIcGateway::TestDropExternalDataSource >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query >> TLocksTest::Range_CorrectDot [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError >> TxUsage::WriteToTopic_Demo_40_Table [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system [GOOD] >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit >> BasicUsage::RecreateObserver >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system >> TTxDataShardUploadRows::TestUploadShadowRows >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:11.011419Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:11.132187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:11.150758Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:11.151353Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:11.151439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:11.446038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:11.446189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:11.559387Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807607916068 != 1767807607916072 2026-01-07T17:40:11.570550Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:11.622100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:11.721141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:12.046479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:12.061503Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:12.176462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:12.241558Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-07T17:40:12.241787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:12.292866Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:12.293048Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:12.294848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:12.294956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:12.295015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:12.295413Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:12.295931Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:12.296014Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-07T17:40:12.296531Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2783] 2026-01-07T17:40:12.296736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:12.308218Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:916:2787] 2026-01-07T17:40:12.308449Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:12.323952Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:12.324165Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:12.325655Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:40:12.325738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:40:12.325785Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:40:12.326127Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:12.326368Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:12.326438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2783] in generation 1 2026-01-07T17:40:12.326858Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:12.326942Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:12.328455Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-07T17:40:12.328593Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-07T17:40:12.328656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-07T17:40:12.328979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:12.329108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:12.329197Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-07T17:40:12.340372Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:12.396670Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:12.396907Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:12.397081Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-07T17:40:12.397124Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:12.397165Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:12.397207Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:12.397663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:12.397706Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:40:12.397797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:12.397859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-07T17:40:12.397881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:40:12.397902Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:40:12.397924Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:12.398208Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:12.398340Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:12.398502Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:12.398543Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T17:40:12.398603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:12.398659Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-07T17:40:12.398681Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-07T17:40:12.398701Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-07T17:40:12.398721Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:40:12.398958Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:12.398999Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:12.399041Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:12.399087Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:12.399314Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:912:2784], sessionId# [0:0:0] 2026-01-07T17:40:12.399363Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:40:12.399431Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:40:12.399949Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:12.399984Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:12.400017Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:40:12.400054Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 ... 037888 2026-01-07T17:40:32.080935Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2026-01-07T17:40:32.081059Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:40:32.081131Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3000 : 281474976715662] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1282:3022], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:40:32.081187Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 3000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2026-01-07T17:40:32.081212Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:40:32.081329Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1282:3022] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 2 2026-01-07T17:40:32.081366Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2026-01-07T17:40:32.081390Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1282:3022] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 2 2026-01-07T17:40:32.081424Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1282:3022] Reply: txId# 281474976715662, status# OK, error# 2026-01-07T17:40:32.081670Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-07T17:40:32.081713Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-01-07T17:40:32.081957Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:32.081986Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:32.082016Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:40:32.082065Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:40:32.082128Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1276:3017], serverId# [3:1277:3018], sessionId# [0:0:0] 2026-01-07T17:40:32.083101Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:40:32.083394Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:40:32.083574Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:32.083612Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.083647Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037889 for WaitForStreamClearance 2026-01-07T17:40:32.083845Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.083896Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:40:32.084342Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 1 2026-01-07T17:40:32.084527Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037889, TxId: 281474976715664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:32.084712Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715664, PendingAcks: 0 2026-01-07T17:40:32.084753Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 0 2026-01-07T17:40:32.101027Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-07T17:40:32.101105Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715664, at: 72075186224037889 2026-01-07T17:40:32.101313Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:32.101355Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.101398Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037889 for ReadTableScan 2026-01-07T17:40:32.101550Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:32.101620Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:40:32.101675Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:32.105022Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:32.105502Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:32.105741Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:32.105792Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.105845Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:40:32.106108Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.106187Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:32.106881Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2026-01-07T17:40:32.107166Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:32.107368Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2026-01-07T17:40:32.107435Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2026-01-07T17:40:32.145830Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:32.145912Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715665, at: 72075186224037888 2026-01-07T17:40:32.146141Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:32.146181Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.146221Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:32.146359Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:32.146433Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:32.146487Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:32.153189Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2026-01-07T17:40:32.153561Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-07T17:40:32.153753Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T17:40:32.153798Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.153840Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715666] at 72075186224037890 for WaitForStreamClearance 2026-01-07T17:40:32.154044Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.154129Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:40:32.154669Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2026-01-07T17:40:32.154859Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:32.155019Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2026-01-07T17:40:32.155061Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2026-01-07T17:40:32.214111Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-07T17:40:32.214198Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715666, at: 72075186224037890 2026-01-07T17:40:32.214402Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T17:40:32.214444Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:32.214486Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715666] at 72075186224037890 for ReadTableScan 2026-01-07T17:40:32.214625Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:32.214698Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:40:32.214748Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2026-01-07T17:39:47.560382Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675773187746999:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:47.572313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:47.616713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:47.943421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:47.943529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:48.018077Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.074480Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:48.075594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:48.185972Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.458386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:48.505872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:39:48.587066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:48.659182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:48.913524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:48.984051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:52.191905Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:52.217038Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675793618998016:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:52.217211Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:52.269084Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:52.506511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:52.506587Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:52.522885Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:52.553037Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675793618997783:2081] 1767807592130415 != 1767807592130418 2026-01-07T17:39:52.580561Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:52.589995Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:52.943127Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:53.111208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:53.141469Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:39:53.152638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:53.213517Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:53.265450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:53.326602Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:57.793474Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592675818965681866:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:57.800387Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:57.822377Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:57.927968Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:57.937898Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:57.937978Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:57.970141Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:58.098278Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:58.202260Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:58.208427Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:39:58.218408Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:39:58.225004Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:58.314204Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:58.399671Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:02.773723Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592675838930429614:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:02.773919Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:02.787812Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:02.900852Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:02.933008Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscr ... ONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7592675900513220140:2081] 1767807616256494 != 1767807616256497 2026-01-07T17:40:16.608662Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:40:16.785793Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:16.795303Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:40:16.812361Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:16.903414Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:16.985099Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:21.007323Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592675919452728469:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:21.007380Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:21.038445Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:40:21.144315Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:21.147515Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592675915157761136:2081] 1767807621003692 != 1767807621003695 2026-01-07T17:40:21.157508Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:21.157590Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:21.190322Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:21.314025Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:40:21.469625Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:21.496547Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:21.579132Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:21.649307Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:25.839662Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592675938546325788:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:25.839756Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:25.861539Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:40:25.962382Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:25.963447Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592675938546325749:2081] 1767807625838068 != 1767807625838071 2026-01-07T17:40:25.984034Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:25.984116Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:26.009391Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:26.045908Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:40:26.241649Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:26.264429Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:26.336508Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:26.394662Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:30.459561Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592675957280060861:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:30.459821Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:30.521022Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:40:30.622641Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:30.623780Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592675957280060823:2081] 1767807630452126 != 1767807630452129 2026-01-07T17:40:30.648410Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:30.648535Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:30.679075Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:30.783876Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:40:30.941814Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:30.955229Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:40:30.962246Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:31.040617Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:31.108232Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> ResourcePoolsDdl::TestWorkloadConfigOnServerless |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KikimrIcGateway::TestDropResourcePool >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2026-01-07T17:37:07.567471Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675087148981300:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:07.567578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:07.603875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:37:07.634545Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:07.949023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:07.965504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:07.965605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:08.121106Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675087148981056:2081] 1767807427478601 != 1767807427478604 2026-01-07T17:37:08.142756Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:08.150030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:08.205019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:08.437434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003cc7/r3tmp/yandexlyRRk6.tmp 2026-01-07T17:37:08.437461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003cc7/r3tmp/yandexlyRRk6.tmp 2026-01-07T17:37:08.437652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003cc7/r3tmp/yandexlyRRk6.tmp 2026-01-07T17:37:08.437745Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:08.509280Z INFO: TTestServer started on Port 19698 GrpcPort 29138 2026-01-07T17:37:08.570380Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:29138 2026-01-07T17:37:08.832672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:08.851117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:37:08.865642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:37:08.946817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:09.161220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T17:37:09.185860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-07T17:37:12.573085Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675087148981300:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:12.573211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:37:12.855904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675108623818563:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.856103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.859965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675108623818600:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.860045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.860138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675108623818599:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.877416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675108623818632:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.877512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.877792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675108623818634:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.877842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:12.878545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:12.914616Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675108623818603:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:12.971180Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675108623818662:2651] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:13.574509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:13.576078Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675108623818671:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:13.580586Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTIxMDcwODItN2U3N2Y1MjQtNWEyZDE1YTYtNGNhMGJkNTk=, ActorId: [1:7592675108623818558:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrhe1v06gfy9ec7b00d9wd, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:13.583382Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:13.620774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:13.731121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ... dingWrites: 0 2026-01-07T17:40:31.271156Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:31.271192Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.271220Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:31.371363Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:31.371425Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.371449Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:31.371494Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.371522Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:31.472293Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:31.472341Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.472365Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:31.472397Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.472425Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:31.574579Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:31.574628Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.574653Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:31.574685Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.574713Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:31.684162Z :INFO: [/Root] [/Root] [2584ef3e-8d9a679-209c3982-52038062] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:31.684259Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2026-01-07T17:40:31.684335Z :INFO: [/Root] [/Root] [2584ef3e-8d9a679-209c3982-52038062] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2001 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:31.684485Z :NOTICE: [/Root] [/Root] [2584ef3e-8d9a679-209c3982-52038062] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:40:31.684548Z :DEBUG: [/Root] [/Root] [2584ef3e-8d9a679-209c3982-52038062] [] Abort session to cluster 2026-01-07T17:40:31.685254Z :DEBUG: [/Root] 0x00007D83DDE2D590 TDirectReadSessionManager ServerSessionId=consumer-1_14_3_4537843732074183471_v1 Close 2026-01-07T17:40:31.685754Z :DEBUG: [/Root] 0x00007D83DDE2D590 TDirectReadSessionManager ServerSessionId=consumer-1_14_3_4537843732074183471_v1 Close 2026-01-07T17:40:31.685925Z :NOTICE: [/Root] [/Root] [2584ef3e-8d9a679-209c3982-52038062] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:31.685036Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:31.685080Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.685098Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:31.685123Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.685145Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:31.693931Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e27e0359-44d7d5da-9542b12b-69aac4a7_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-07T17:40:31.694008Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e27e0359-44d7d5da-9542b12b-69aac4a7_0] PartitionId [0] Generation [1] Write session will now close 2026-01-07T17:40:31.693058Z node 14 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer consumer-1 session consumer-1_14_3_4537843732074183471_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:31.693101Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer consumer-1 session consumer-1_14_3_4537843732074183471_v1 grpc read failed 2026-01-07T17:40:31.693132Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer consumer-1 session consumer-1_14_3_4537843732074183471_v1 grpc closed 2026-01-07T17:40:31.693166Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer consumer-1 session consumer-1_14_3_4537843732074183471_v1 is DEAD 2026-01-07T17:40:31.694694Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [14:7592675953822317620:2519]: session cookie 4 consumer consumer-1 session consumer-1_14_3_4537843732074183471_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:31.694725Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7592675953822317620:2519]: session cookie 4 consumer consumer-1 session consumer-1_14_3_4537843732074183471_v1grpc read failed 2026-01-07T17:40:31.694766Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7592675953822317620:2519]: session cookie 4 consumer consumer-1 session consumer-1_14_3_4537843732074183471_v1 grpc closed 2026-01-07T17:40:31.694796Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7592675953822317620:2519]: session cookie 4 consumer consumer-1 session consumer-1_14_3_4537843732074183471_v1 proxy is DEAD 2026-01-07T17:40:31.701037Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e27e0359-44d7d5da-9542b12b-69aac4a7_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-07T17:40:31.701832Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e27e0359-44d7d5da-9542b12b-69aac4a7_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-07T17:40:31.701916Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e27e0359-44d7d5da-9542b12b-69aac4a7_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-07T17:40:31.701926Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][test-topic] pipe [14:7592675953822317611:2513] disconnected. 2026-01-07T17:40:31.701990Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][test-topic] pipe [14:7592675953822317611:2513] disconnected; active server actors: 1 2026-01-07T17:40:31.702029Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][test-topic] pipe [14:7592675953822317611:2513] client consumer-1 disconnected session consumer-1_14_3_4537843732074183471_v1 2026-01-07T17:40:31.702183Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session consumer-1_14_3_4537843732074183471_v1 2026-01-07T17:40:31.702233Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7592675953822317614:2517] destroyed 2026-01-07T17:40:31.702336Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer-1_14_3_4537843732074183471_v1 2026-01-07T17:40:31.712654Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: test-message_group_id|e27e0359-44d7d5da-9542b12b-69aac4a7_0 grpc closed 2026-01-07T17:40:31.712713Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: test-message_group_id|e27e0359-44d7d5da-9542b12b-69aac4a7_0 is DEAD 2026-01-07T17:40:31.713980Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:31.721097Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7592675945232382883:2478] destroyed 2026-01-07T17:40:31.721191Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:40:31.721247Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:31.721279Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.721307Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:31.721343Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.721366Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:31.785269Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:31.785325Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.785350Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:31.785385Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.785412Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:31.885689Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:31.885741Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.885764Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:31.885797Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:31.885825Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TxUsage::WriteToTopic_Demo_40_Query |88.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> GroupWriteTest::TwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:30.071713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:30.183418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:30.201728Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:30.202252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:30.202311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:30.486236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:30.486360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:30.565690Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807627433850 != 1767807627433854 2026-01-07T17:40:30.577030Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:30.625569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:30.714531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:31.023811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:31.038547Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:31.143912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:31.176227Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:40:31.177322Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:40:31.177583Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:31.177781Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:31.212120Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:40:31.212758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:31.212897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:31.214862Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:31.214955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:31.215032Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:31.215417Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:31.215546Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:31.215636Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:31.226468Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:31.261589Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:31.261808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:31.261958Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:31.262009Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:31.262046Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:31.262084Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:31.262314Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:31.262365Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:31.262765Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:31.262867Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:31.262953Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:31.262996Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:31.263043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:40:31.263092Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:40:31.263149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:40:31.263184Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:31.263231Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:31.263356Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:31.263409Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:31.263475Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:31.263878Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:40:31.263954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:40:31.264063Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:31.264346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:40:31.264406Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:31.264497Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:31.264540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:40:31.264634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:40:31.264681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:40:31.264732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:40:31.265235Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:40:31.265286Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:40:31.265324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:40:31.265359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:40:31.265416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:40:31.265451Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:40:31.265502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:40:31.265539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:40:31.265565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:40:31.267205Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:40:31.267274Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:31.279207Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:36.527985Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:36.528194Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:36.753081Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:36.753205Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:36.780607Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807633712783 != 1767807633712787 2026-01-07T17:40:36.784242Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:36.828954Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:36.908572Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:37.174887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:37.188180Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:37.289650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:37.311074Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-07T17:40:37.311319Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:37.352737Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:37.352879Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:37.354687Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:37.354820Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:37.354883Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:37.355282Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:37.355438Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:37.355535Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-07T17:40:37.366352Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:37.366441Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:37.366560Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:37.366633Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-07T17:40:37.366677Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:37.366709Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:37.366741Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:37.367063Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:37.367146Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:37.367482Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:37.367521Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:37.367561Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:37.367600Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:37.367650Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-07T17:40:37.367752Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:37.367945Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:37.368009Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:37.369615Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:37.380435Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:37.380549Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:40:37.499124Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-07T17:40:37.499745Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:40:37.499800Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:37.500621Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:37.500685Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:37.500733Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:37.501059Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:37.501230Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:37.502048Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:37.502116Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:37.502572Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:37.503012Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:37.504713Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:37.504767Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:37.505311Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:37.505388Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:37.506441Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:37.506485Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:37.506530Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:37.506595Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:37.506644Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:37.506724Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:37.507720Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:37.510043Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:37.510125Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:37.510880Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:37.515763Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-07T17:40:37.515866Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> GroupWriteTest::SimpleRdma [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] >> KikimrIcGateway::TestListPath Test command err: RandomSeed# 13330204144230884665 2026-01-07T17:40:29.408035Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-07T17:40:29.408141Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-07T17:40:29.436238Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-07T17:40:29.436321Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-07T17:40:29.436436Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-07T17:40:29.436462Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-07T17:40:29.440754Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-07T17:40:29.440879Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-07T17:40:29.461192Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:29.461337Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:29.465605Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-07T17:40:29.465707Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-07T17:40:38.844351Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-07T17:40:38.844445Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:38.844504Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:38.879963Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2026-01-07T17:40:38.880060Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:15.464165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:15.569633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:15.582159Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:15.582509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:15.582551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:15.834663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:15.834784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:15.906915Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807612695368 != 1767807612695372 2026-01-07T17:40:15.918155Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:15.964577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:16.071035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:16.375278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:16.388224Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:16.504641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:16.561910Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-07T17:40:16.562204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:16.610626Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:16.610751Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:16.612367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:16.612464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:16.612531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:16.612905Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:16.613385Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:16.613475Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-07T17:40:16.613841Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2783] 2026-01-07T17:40:16.614049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:16.622919Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:916:2787] 2026-01-07T17:40:16.623125Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:16.631490Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:16.631656Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:16.632984Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:40:16.633048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:40:16.633100Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:40:16.633383Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:16.633572Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:16.633633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2783] in generation 1 2026-01-07T17:40:16.634005Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:16.634101Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:16.635280Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-07T17:40:16.635342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-07T17:40:16.635397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-07T17:40:16.635647Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:16.635758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:16.635807Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-07T17:40:16.647082Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:16.674169Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:16.674345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:16.674462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-07T17:40:16.674499Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:16.674533Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:16.674564Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:16.674944Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:16.674984Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:40:16.675048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:16.675120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-07T17:40:16.675142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:40:16.675164Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:40:16.675185Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:16.675423Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:16.675520Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:16.675638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:16.675683Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T17:40:16.675758Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:16.675816Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-07T17:40:16.675837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-07T17:40:16.675859Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-07T17:40:16.675878Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:40:16.676058Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:16.676101Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:16.676140Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:16.676185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:16.676386Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:912:2784], sessionId# [0:0:0] 2026-01-07T17:40:16.676440Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:40:16.676493Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:40:16.676963Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:16.676999Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:16.677026Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:40:16.677063Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 ... datashard.cpp:4020: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715666 2026-01-07T17:40:37.663983Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-07T17:40:37.664036Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1603:3237], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:37.664124Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 } 2026-01-07T17:40:37.664182Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T17:40:37.664392Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1603:3237] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037891, status# 2 2026-01-07T17:40:37.664554Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 3500} 2026-01-07T17:40:37.664588Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-07T17:40:37.664781Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2026-01-07T17:40:37.665086Z node 3 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 3 change records: to# [3:1409:3117], at tablet# 72075186224037891 2026-01-07T17:40:37.665135Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2026-01-07T17:40:37.665191Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715666 2026-01-07T17:40:37.665258Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 3500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2026-01-07T17:40:37.665337Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037893 2026-01-07T17:40:37.665469Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-07T17:40:37.665495Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:37.665534Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [3500:281474976715666] at 72075186224037893 for LoadAndWaitInRS 2026-01-07T17:40:37.665859Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:37.666309Z node 3 :TX_DATASHARD DEBUG: datashard_change_receiving.cpp:468: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2026-01-07T17:40:37.677631Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-07T17:40:37.677715Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715666] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1603:3237], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:40:37.677808Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 3500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2026-01-07T17:40:37.677848Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-07T17:40:37.677986Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1603:3237] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037893, status# 2 2026-01-07T17:40:37.678031Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1603:3237] Reply: txId# 281474976715666, status# OK, error# 2026-01-07T17:40:37.678159Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715666 2026-01-07T17:40:37.678465Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037891 2026-01-07T17:40:37.678511Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037891 2026-01-07T17:40:37.678628Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2026-01-07T17:40:37.678656Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2026-01-07T17:40:37.678742Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2026-01-07T17:40:37.678772Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2026-01-07T17:40:37.678952Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1598:3233], serverId# [3:1599:3234], sessionId# [0:0:0] 2026-01-07T17:40:37.679015Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-07T17:40:37.679039Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:37.679067Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-07T17:40:37.679845Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037893 2026-01-07T17:40:37.680112Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037893 2026-01-07T17:40:37.680253Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-07T17:40:37.680287Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:37.680327Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715667] at 72075186224037893 for WaitForStreamClearance 2026-01-07T17:40:37.680500Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:37.680543Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-07T17:40:37.681046Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2026-01-07T17:40:37.681155Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2026-01-07T17:40:37.714558Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037893 2026-01-07T17:40:37.714637Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715667, at: 72075186224037893 2026-01-07T17:40:37.714823Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-07T17:40:37.714863Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:37.714903Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715667] at 72075186224037893 for ReadTableScan 2026-01-07T17:40:37.715058Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:37.715126Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-07T17:40:37.715175Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-07T17:40:37.716366Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2026-01-07T17:40:37.716630Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2026-01-07T17:40:37.716778Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-07T17:40:37.716810Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:37.716846Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715668] at 72075186224037892 for WaitForStreamClearance 2026-01-07T17:40:37.720106Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:37.720201Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-07T17:40:37.720664Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2026-01-07T17:40:37.720749Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2026-01-07T17:40:37.722211Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037892 2026-01-07T17:40:37.722242Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715668, at: 72075186224037892 2026-01-07T17:40:37.722380Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-07T17:40:37.722406Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:37.722434Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715668] at 72075186224037892 for ReadTableScan 2026-01-07T17:40:37.722515Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:37.722553Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-07T17:40:37.722585Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TStorageTenantTest::Empty [GOOD] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] >> KikimrIcGateway::TestDropResourcePool [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure >> KikimrIcGateway::TestCreateResourcePool >> KikimrIcGateway::TestCreateStreamingQuery >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin >> KqpWorkloadService::TestStartQueryAfterCancel >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] Test command err: Trying to start YDB, gRPC: 1088, MsgBus: 30070 2026-01-07T17:40:32.978816Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675966014538264:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:32.978875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:33.315737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:33.315858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:33.367812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:33.375840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:33.382486Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:33.509572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:33.509591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:33.509603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:33.509702Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:33.635458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:33.989476Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:33.992271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:34.043375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:34.170088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:34.313236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:34.380536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.165932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675983194409278:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.166020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.166462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675983194409288:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.166530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.469755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.495899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.523319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.548344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.573851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.611235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.648692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.696186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:36.768569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675983194410155:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.768675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.768783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675983194410160:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.768858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675983194410162:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.768898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:36.772795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:36.783437Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675983194410164:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:40:36.858079Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675983194410215:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:37.979271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675966014538264:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:37.979354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KikimrIcGateway::TestAlterResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::SimpleRdma [GOOD] Test command err: RandomSeed# 10645572094102810381 2026-01-07T17:40:23.232586Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-07T17:40:23.257993Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-07T17:40:23.258078Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-07T17:40:23.260976Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-07T17:40:23.276145Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:23.279513Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-07T17:40:39.637484Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-07T17:40:39.637584Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:39.687406Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-07T17:37:13.501690Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675111004620574:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:13.514361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:13.613424Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:13.833060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:13.881424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:13.881514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:13.913605Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675111004620544:2081] 1767807433480482 != 1767807433480485 2026-01-07T17:37:13.928659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:13.932121Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:14.078022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003cb2/r3tmp/yandext0InwN.tmp 2026-01-07T17:37:14.078048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003cb2/r3tmp/yandext0InwN.tmp 2026-01-07T17:37:14.079630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003cb2/r3tmp/yandext0InwN.tmp 2026-01-07T17:37:14.079775Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:14.113865Z INFO: TTestServer started on Port 14080 GrpcPort 11086 2026-01-07T17:37:14.118711Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:11086 2026-01-07T17:37:14.340430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:14.350253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:37:14.448335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:14.518213Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:14.650419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:37:16.853482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675123889523463:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.853618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675123889523476:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.853664Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.860775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:16.861053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675123889523479:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.861122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:16.874246Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675123889523478:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:37:17.078493Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675123889523544:2641] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:17.105289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:17.142346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:17.238511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:17.242971Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675128184490848:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:17.245740Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Yzg5ZjQyMTktZDA4MzkxNmYtNTMwNDM3YjMtM2E3MjVmMzU=, ActorId: [1:7592675123889523461:2330], ActorState: ExecuteState, LegacyTraceId: 01kecrhj0x7zxme6mpjpa2atnb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:17.248066Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675128184491132:2820] 2026-01-07T17:37:18.499596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675111004620574:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:18.499661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-07T17:37:22.906361Z :WriteToTopic_Demo_12_Table INFO: TTopicSdkTestSetup started 2026-01-07T17:37:22.937613Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:22.961455Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592675149659327797:2911] connected; active server actors: 1 2026-01-07T17:37:22.961688Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:37:22.963459Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:37:22.963604Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T17:37:22.968290Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:37:22.971206Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:37:22.971436Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T17:37:22.971471Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [P ... artition:stream-id:read-offset:committed-offset): -:topic_A:0:1:21:22 2026-01-07T17:40:35.583902Z :INFO: [/Root] [/Root] [d9f89f2a-3f8f743f-b4f542b5-90d3b97b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2058 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:35.584011Z :NOTICE: [/Root] [/Root] [d9f89f2a-3f8f743f-b4f542b5-90d3b97b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:40:35.584058Z :DEBUG: [/Root] [/Root] [d9f89f2a-3f8f743f-b4f542b5-90d3b97b] [] Abort session to cluster 2026-01-07T17:40:35.584960Z :DEBUG: [/Root] 0x00007DBB44522190 TDirectReadSessionManager ServerSessionId=test-consumer_14_1_2429492155605842091_v1 Close 2026-01-07T17:40:35.585264Z node 14 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:35.585292Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1 grpc read failed 2026-01-07T17:40:35.585323Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1 grpc closed 2026-01-07T17:40:35.585361Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1 is DEAD 2026-01-07T17:40:35.585668Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_14_1_2429492155605842091_v1 2026-01-07T17:40:35.585707Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7592675972497177389:2527] destroyed 2026-01-07T17:40:35.585740Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:399: Direct read cache: DestroyPartitionSession, sessionId: test-consumer_14_1_2429492155605842091_v1, proxy: [14:7592675972497177395:2529] 2026-01-07T17:40:35.585762Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_14_1_2429492155605842091_v1 2026-01-07T17:40:35.585777Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:489: session cookie 2 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1 got EvDirectReadDestroyPartitionSession, assignId: 1 2026-01-07T17:40:35.585997Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [14:7592675972497177386:2524] disconnected. 2026-01-07T17:40:35.586029Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [14:7592675972497177386:2524] disconnected; active server actors: 1 2026-01-07T17:40:35.586045Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [14:7592675972497177386:2524] client test-consumer disconnected session test-consumer_14_1_2429492155605842091_v1 2026-01-07T17:40:35.586347Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [14:7592675972497177395:2529]: session cookie 2 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:35.586378Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7592675972497177395:2529]: session cookie 2 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1grpc read failed 2026-01-07T17:40:35.586407Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7592675972497177395:2529]: session cookie 2 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1 grpc closed 2026-01-07T17:40:35.586424Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7592675972497177395:2529]: session cookie 2 consumer test-consumer session test-consumer_14_1_2429492155605842091_v1 proxy is DEAD 2026-01-07T17:40:35.587139Z :DEBUG: [/Root] 0x00007DBB44522190 TDirectReadSessionManager ServerSessionId=test-consumer_14_1_2429492155605842091_v1 Close 2026-01-07T17:40:35.587293Z :NOTICE: [/Root] [/Root] [d9f89f2a-3f8f743f-b4f542b5-90d3b97b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:35.588487Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-01-07T17:40:35.588586Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0] PartitionId [0] Generation [2] Write session will now close 2026-01-07T17:40:35.588639Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0] PartitionId [0] Generation [2] Write session: aborting 2026-01-07T17:40:35.589408Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-07T17:40:35.589479Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0] PartitionId [0] Generation [2] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-07T17:40:35.589528Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-07T17:40:35.589565Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0] PartitionId [0] Generation [2] Write session is aborting and will not restart 2026-01-07T17:40:35.589653Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-07T17:40:35.590945Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0 grpc read done: success: 0 data: 2026-01-07T17:40:35.590976Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0 grpc read failed 2026-01-07T17:40:35.591019Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0 grpc closed 2026-01-07T17:40:35.591039Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|22527e60-bccd61c6-7f81b351-dae6d159_0 is DEAD 2026-01-07T17:40:35.591617Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:35.591781Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7592675963907242732:2503] destroyed 2026-01-07T17:40:35.591830Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:40:35.591867Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:35.591887Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.591900Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:35.591916Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.591930Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:35.596029Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:35.596064Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.596081Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:35.596101Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.596115Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:35.611432Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:35.611478Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.611491Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:35.611511Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.611525Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:35.696421Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:35.696454Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.696467Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:35.696486Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.696499Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:35.711745Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:35.711777Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.711792Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:35.711812Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.711826Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:35.796752Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:35.796787Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.796802Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:35.796822Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:35.796835Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:29.767984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:29.861262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:29.888112Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:29.888698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:29.888788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:30.189490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:30.189620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:30.272308Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807627129724 != 1767807627129728 2026-01-07T17:40:30.284692Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:30.333420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:30.447768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:30.775120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:30.788878Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:30.898644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:30.948889Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2774] 2026-01-07T17:40:30.949221Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:30.999625Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:30.999834Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:31.001782Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:31.001896Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:31.001981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:31.002420Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:31.002734Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:31.002880Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2774] in generation 1 2026-01-07T17:40:31.004406Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-07T17:40:31.004690Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:31.014202Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:910:2779] 2026-01-07T17:40:31.014442Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:31.026925Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:31.027067Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:31.028626Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:40:31.028708Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:40:31.028757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:40:31.029115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:31.029277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:31.029349Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:961:2776] in generation 1 2026-01-07T17:40:31.030014Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:916:2783] 2026-01-07T17:40:31.030223Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:31.039746Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:31.039917Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:31.041470Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-07T17:40:31.041552Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-07T17:40:31.041625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-07T17:40:31.041972Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:31.042204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:31.042281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2779] in generation 1 2026-01-07T17:40:31.042807Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:31.042891Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:31.044400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-07T17:40:31.044479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-07T17:40:31.044527Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-07T17:40:31.044862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:31.045001Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:31.045071Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-07T17:40:31.056307Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:31.085383Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:31.085636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:31.085787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-07T17:40:31.085837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:31.085876Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:31.085914Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:31.086603Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:31.086685Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:40:31.086751Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:31.086822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-07T17:40:31.086851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:40:31.086874Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:40:31.086899Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:31.086976Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:31.087009Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-07T17:40:31.087057Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:31.087121Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-07T17:40:31.087142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-07T17:40:31.087172Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-07T17:40:31.087206Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T17:40:31.087409Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:31.087522Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:31.087650Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:31.087682Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T17:40:31.087731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: mis ... 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-07T17:40:41.242418Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:41.242926Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037889:1][3:933:2795] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2026-01-07T17:40:41.243367Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:224: [AsyncIndexChangeSenderMain][72075186224037889:1][3:965:2819] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2026-01-07T17:40:41.243538Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1282: Upload rows: got OK from shard 72075186224037889 description: 2026-01-07T17:40:41.243666Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1412: completed with status SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:55.436694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:55.436785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:55.436830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:55.436883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:55.436956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:55.436992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:55.437064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:55.437131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:55.437906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:55.438190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:55.560325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:55.560406Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:55.561304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:55.571075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:55.571732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:55.571881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:55.579880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:55.580115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:55.580648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:55.580876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:55.584632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:55.584768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:55.585907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:55.585990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:55.586218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:55.586276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:55.586339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:55.586485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:55.592646Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:55.734662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:55.734871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:55.735067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:55.735119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:55.735377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:55.735441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:55.738782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:55.738988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:55.739260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:55.739335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:55.739401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:55.739478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:55.741609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:55.741682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:55.741729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:55.748151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:55.748212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:55.748285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:55.748348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:55.752439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:55.756505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:55.756717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:55.757760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:55.757904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:55.757951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:55.758220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:55.758272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:55.758431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:55.758533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:55.760669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... 3, at schemeshard: 72057594046678944, txId: 253 2026-01-07T17:40:37.541401Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 180], version: 5 2026-01-07T17:40:37.541430Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 180] was 2 2026-01-07T17:40:37.541829Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-07T17:40:37.541908Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-07T17:40:37.541935Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 253 2026-01-07T17:40:37.541969Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 181], version: 2 2026-01-07T17:40:37.541999Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 181] was 2 2026-01-07T17:40:37.542703Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-07T17:40:37.542775Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-07T17:40:37.542799Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 253 2026-01-07T17:40:37.542825Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 178], version: 2 2026-01-07T17:40:37.542851Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 178] was 2 2026-01-07T17:40:37.542917Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 253, subscribers: 0 2026-01-07T17:40:37.546026Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-07T17:40:37.546171Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-07T17:40:37.546280Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-07T17:40:37.548346Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-07T17:40:37.548467Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 TestModificationResult got TxId: 253, wait until txId: 253 TestWaitNotification wait txId: 253 2026-01-07T17:40:37.550745Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 253: send EvNotifyTxCompletion 2026-01-07T17:40:37.550791Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 253 2026-01-07T17:40:37.552511Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 253, at schemeshard: 72057594046678944 2026-01-07T17:40:37.552607Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 253: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.552639Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 253: satisfy waiter [32:4237:6223] TestWaitNotification: OK eventTxId 253 TestWaitNotification wait txId: 245 2026-01-07T17:40:37.554399Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 245: send EvNotifyTxCompletion 2026-01-07T17:40:37.554443Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 245 TestWaitNotification wait txId: 246 2026-01-07T17:40:37.554513Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 246: send EvNotifyTxCompletion 2026-01-07T17:40:37.554540Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 246 TestWaitNotification wait txId: 247 2026-01-07T17:40:37.554587Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 247: send EvNotifyTxCompletion 2026-01-07T17:40:37.554608Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 247 TestWaitNotification wait txId: 248 2026-01-07T17:40:37.554655Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 248: send EvNotifyTxCompletion 2026-01-07T17:40:37.554677Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 248 TestWaitNotification wait txId: 249 2026-01-07T17:40:37.554724Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 249: send EvNotifyTxCompletion 2026-01-07T17:40:37.554745Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 249 TestWaitNotification wait txId: 250 2026-01-07T17:40:37.554795Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 250: send EvNotifyTxCompletion 2026-01-07T17:40:37.554825Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 250 TestWaitNotification wait txId: 251 2026-01-07T17:40:37.554886Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 251: send EvNotifyTxCompletion 2026-01-07T17:40:37.554918Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 251 TestWaitNotification wait txId: 252 2026-01-07T17:40:37.554986Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 252: send EvNotifyTxCompletion 2026-01-07T17:40:37.555022Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 252 2026-01-07T17:40:37.557425Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 245, at schemeshard: 72057594046678944 2026-01-07T17:40:37.557741Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 246, at schemeshard: 72057594046678944 2026-01-07T17:40:37.557826Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 245: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.557870Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 245: satisfy waiter [32:4240:6226] 2026-01-07T17:40:37.558237Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 246: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.558270Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 246: satisfy waiter [32:4240:6226] 2026-01-07T17:40:37.558615Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 247, at schemeshard: 72057594046678944 2026-01-07T17:40:37.558914Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 248, at schemeshard: 72057594046678944 2026-01-07T17:40:37.558987Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 247: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.559015Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 247: satisfy waiter [32:4240:6226] 2026-01-07T17:40:37.559142Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 249, at schemeshard: 72057594046678944 2026-01-07T17:40:37.559297Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 250, at schemeshard: 72057594046678944 2026-01-07T17:40:37.559357Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 248: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.559381Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 248: satisfy waiter [32:4240:6226] 2026-01-07T17:40:37.559565Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 251, at schemeshard: 72057594046678944 2026-01-07T17:40:37.559632Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 249: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.559665Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 249: satisfy waiter [32:4240:6226] 2026-01-07T17:40:37.559779Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 252, at schemeshard: 72057594046678944 2026-01-07T17:40:37.559820Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 250: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.559842Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 250: satisfy waiter [32:4240:6226] 2026-01-07T17:40:37.559949Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 251: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.559974Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 251: satisfy waiter [32:4240:6226] 2026-01-07T17:40:37.560101Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 252: got EvNotifyTxCompletionResult 2026-01-07T17:40:37.560127Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 252: satisfy waiter [32:4240:6226] TestWaitNotification: OK eventTxId 245 TestWaitNotification: OK eventTxId 246 TestWaitNotification: OK eventTxId 247 TestWaitNotification: OK eventTxId 248 TestWaitNotification: OK eventTxId 249 TestWaitNotification: OK eventTxId 250 TestWaitNotification: OK eventTxId 251 TestWaitNotification: OK eventTxId 252 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:29.324032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:29.428368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:29.443688Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:29.444104Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:29.444150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:29.692818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:29.692945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:29.752599Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807626783087 != 1767807626783091 2026-01-07T17:40:29.762485Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:29.805843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:29.908527Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:30.193914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:30.207409Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:30.315895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:30.370255Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2774] 2026-01-07T17:40:30.370600Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:30.425792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:30.426032Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:30.428038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:30.428162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:30.428253Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:30.428711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:30.429062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:30.429155Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2774] in generation 1 2026-01-07T17:40:30.430678Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-07T17:40:30.430914Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:30.440610Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:910:2779] 2026-01-07T17:40:30.440854Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:30.451928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:30.452062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:30.453624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:40:30.453702Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:40:30.453750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:40:30.454090Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:30.454249Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:30.454318Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:961:2776] in generation 1 2026-01-07T17:40:30.454987Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:916:2783] 2026-01-07T17:40:30.455266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:30.467755Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:30.467946Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:30.469673Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-07T17:40:30.469757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-07T17:40:30.469811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-07T17:40:30.470166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:30.470348Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:30.470424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2779] in generation 1 2026-01-07T17:40:30.470967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:30.471060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:30.472669Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-07T17:40:30.472752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-07T17:40:30.472812Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-07T17:40:30.473178Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:30.473324Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:30.473408Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-07T17:40:30.484894Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:30.529268Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:30.529541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:30.529710Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-07T17:40:30.529769Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:30.529814Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:30.529858Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:30.530625Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:30.530696Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:40:30.530796Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:30.530883Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-07T17:40:30.530915Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:40:30.530950Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:40:30.530984Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:30.531064Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:30.531092Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-07T17:40:30.531142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:30.531189Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-07T17:40:30.531225Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-07T17:40:30.531248Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-07T17:40:30.531284Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T17:40:30.531534Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:30.531695Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:30.531862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:30.531896Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T17:40:30.531957Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: mis ... 7:40:40.426243Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:40.426631Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:40.426681Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:40:40.426727Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:40:40.426986Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:40:40.427132Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:40.427420Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:40.427484Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:40:40.427901Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:40:40.428279Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:40.429836Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:40:40.429881Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:40.430631Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:40:40.430692Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:40.431367Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:40.431716Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:40.431751Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:40.431786Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:40:40.431837Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:40.431874Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:40:40.431964Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:40.434007Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:40:40.434238Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:40:40.434300Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:40:40.441816Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:40.441906Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:40.441955Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:40.442410Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:40.442537Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:40.446577Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:40.453441Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:40.571223Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:40.573695Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:40.607752Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:40.677699Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-07T17:40:40.678038Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:40:40.678187Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=3 2026-01-07T17:40:40.689081Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:40.756698Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-07T17:40:40.756830Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=3 2026-01-07T17:40:40.767774Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:40.824748Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-07T17:40:40.824894Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=3 2026-01-07T17:40:40.835834Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:40.838812Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2026-01-07T17:40:40.850048Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2026-01-07T17:40:40.850126Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:40.851759Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:40.852078Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:40:40.852254Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:40.852296Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:40.852336Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:40:40.852508Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:40.852556Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:40.853159Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 1 2026-01-07T17:40:40.853485Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:40.853709Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715663, PendingAcks: 0 2026-01-07T17:40:40.853765Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 0 2026-01-07T17:40:40.886297Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:40:40.886351Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715663, at: 72075186224037888 2026-01-07T17:40:40.886485Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:40.886514Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:40:40.886542Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for ReadTableScan 2026-01-07T17:40:40.886628Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:40.886668Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:40.886693Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:31.234120Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:31.345487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:31.364282Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:31.364776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:31.364835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:31.689894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:31.690041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:31.776261Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807628613457 != 1767807628613461 2026-01-07T17:40:31.791969Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:31.839644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:31.974999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:32.262895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:32.275811Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:32.381585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:32.422389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:40:32.423378Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:40:32.423643Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:40:32.423863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:32.475801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:40:32.476575Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:32.476697Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:32.478408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:32.478479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:32.478521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:32.478804Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:32.478918Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:32.479003Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:40:32.493490Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:32.516798Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:32.517027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:32.517137Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:40:32.517218Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:32.517251Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:32.517281Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:32.517488Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:32.517549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:32.517888Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:32.517982Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:32.518038Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:32.518078Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:32.518112Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:40:32.518149Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:40:32.518195Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:40:32.518226Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:32.518264Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:32.518388Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:32.518428Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:32.518470Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:40:32.518857Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:40:32.518914Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:40:32.519002Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:40:32.519219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:40:32.519281Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:40:32.519365Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:40:32.519428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:40:32.519473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:40:32.519521Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:40:32.519553Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:40:32.519825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:40:32.519862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:40:32.519898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:40:32.519928Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:40:32.519978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:40:32.520010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:40:32.520052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:40:32.520082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:40:32.520113Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:40:32.521537Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:40:32.521595Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:40:32.532880Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... pp:1883: Execution status for [4500:281474976715668] at 72075186224037889 is DelayComplete 2026-01-07T17:40:45.165315Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2026-01-07T17:40:45.165354Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:40:45.165388Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:40:45.165418Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037889 is Executed 2026-01-07T17:40:45.165444Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:40:45.165469Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976715668] at 72075186224037889 has finished 2026-01-07T17:40:45.165508Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:45.165531Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037889 2026-01-07T17:40:45.165558Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2026-01-07T17:40:45.165591Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037889 2026-01-07T17:40:45.176561Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:40:45.176627Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:40:45.176665Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976715668] at 72075186224037889 on unit CompleteOperation 2026-01-07T17:40:45.176728Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [3:1341:3112], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:40:45.176785Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:45.177135Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [3:1341:3112], Recipient [3:1168:2963]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2026-01-07T17:40:45.177173Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-07T17:40:45.177237Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 4500} 2026-01-07T17:40:45.177297Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:40:45.177322Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:40:45.177441Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:1168:2963], Recipient [3:1168:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:45.177467Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:45.177511Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T17:40:45.177540Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:45.177573Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2026-01-07T17:40:45.177598Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2026-01-07T17:40:45.177632Z node 3 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [4500:281474976715668] at 72075186224037890 2026-01-07T17:40:45.177665Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-01-07T17:40:45.177695Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2026-01-07T17:40:45.177720Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2026-01-07T17:40:45.177743Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit ReadTableScan 2026-01-07T17:40:45.177940Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Continue 2026-01-07T17:40:45.177965Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:45.177987Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2026-01-07T17:40:45.178027Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:52: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2026-01-07T17:40:45.178051Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-07T17:40:45.178842Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [3:1361:3130], Recipient [3:1168:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-07T17:40:45.178887Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-07T17:40:45.179114Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2026-01-07T17:40:45.179712Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:40:45.263670Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2026-01-07T17:40:45.263764Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2026-01-07T17:40:45.266073Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-07T17:40:45.266125Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715668, at: 72075186224037890 2026-01-07T17:40:45.266290Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:1168:2963], Recipient [3:1168:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:45.266333Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:45.266409Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T17:40:45.266456Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:40:45.266496Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976715668] at 72075186224037890 for ReadTableScan 2026-01-07T17:40:45.266526Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit ReadTableScan 2026-01-07T17:40:45.266558Z node 3 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [4500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2026-01-07T17:40:45.266598Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-01-07T17:40:45.266628Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2026-01-07T17:40:45.266656Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2026-01-07T17:40:45.266682Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit CompleteOperation 2026-01-07T17:40:45.266854Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is DelayComplete 2026-01-07T17:40:45.266885Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2026-01-07T17:40:45.266913Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2026-01-07T17:40:45.266939Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit CompletedOperations 2026-01-07T17:40:45.266969Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-01-07T17:40:45.266992Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2026-01-07T17:40:45.267017Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976715668] at 72075186224037890 has finished 2026-01-07T17:40:45.267046Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:45.267083Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-07T17:40:45.267121Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-07T17:40:45.267151Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-07T17:40:45.277841Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:40:45.277892Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:40:45.277916Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976715668] at 72075186224037890 on unit CompleteOperation 2026-01-07T17:40:45.277957Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1341:3112], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:40:45.277993Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:40:28.739400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:28.852378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:40:28.867678Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:40:28.868304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:40:28.868385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:40:29.153243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:29.153377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:29.240183Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807626041095 != 1767807626041099 2026-01-07T17:40:29.257009Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:29.314443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:29.414627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:40:29.706088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:29.720119Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:29.826435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:29.871265Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:40:29.872554Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:40:29.872911Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2768] 2026-01-07T17:40:29.873258Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:29.882749Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:40:29.918921Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:40:29.919308Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:40:29.919582Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-07T17:40:29.919875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:40:29.928249Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:880:2761], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:40:29.928904Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:29.929082Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:29.930953Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:40:29.931033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:40:29.931118Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:40:29.931512Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:29.931673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:29.931773Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2768] in generation 1 2026-01-07T17:40:29.932272Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:40:29.932570Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:40:29.936632Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:40:29.936717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:40:29.936802Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:40:29.937198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:40:29.937329Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:40:29.937398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-07T17:40:29.948811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:29.972919Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:40:29.973170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:29.973261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-07T17:40:29.973296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:40:29.973326Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:40:29.973367Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:40:29.973458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:40:29.973485Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:40:29.973521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:40:29.973570Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-07T17:40:29.973586Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:40:29.973619Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:40:29.973635Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:29.973723Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:891:2768], Recipient [1:891:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:29.973769Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:29.973845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:894:2770], Recipient [1:894:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:29.973860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:40:29.974169Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:40:29.974244Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:40:29.974276Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:40:29.974309Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:40:29.974405Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:40:29.974441Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:29.974469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:40:29.974503Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:40:29.974534Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:40:29.974559Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:40:29.974597Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:40:29.974643Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:40:29.974663Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:40:29.974682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2026-01-07T17:40:29.974704Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2026-01-07T17:40:29.974784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 202 ... on plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:40:45.242817Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 72075186224037888 has finished 2026-01-07T17:40:45.253762Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2026-01-07T17:40:45.253848Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 72075186224037888 on unit DirectOp 2026-01-07T17:40:45.253892Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status GENERIC_ERROR 2026-01-07T17:40:45.459604Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T17:40:45.459689Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:68:2115] TxId# 281474976715662 ProcessProposeKqpTransaction 2026-01-07T17:40:45.462952Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [3:1401:3185], Recipient [3:890:2769]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T17:40:45.463227Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:40:45.463287Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v23000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v23000/18446744073709551615 ImmediateWriteEdgeReplied# v23000/18446744073709551615 2026-01-07T17:40:45.463337Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v23000/18446744073709551615 2026-01-07T17:40:45.463407Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2026-01-07T17:40:45.463522Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-07T17:40:45.463567Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:40:45.463610Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:40:45.463646Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:40:45.463695Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 72075186224037888 2026-01-07T17:40:45.463738Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-07T17:40:45.463762Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:40:45.463782Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:40:45.463806Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:40:45.463914Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T17:40:45.464224Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[3:1401:3185], 0} after executionsCount# 1 2026-01-07T17:40:45.464287Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[3:1401:3185], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:40:45.464380Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[3:1401:3185], 0} finished in read 2026-01-07T17:40:45.464443Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-07T17:40:45.464468Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:40:45.464495Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:40:45.464522Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:40:45.464564Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-07T17:40:45.464585Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:40:45.464609Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 72075186224037888 has finished 2026-01-07T17:40:45.464649Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:40:45.464757Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:40:45.466159Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [3:1401:3185], Recipient [3:890:2769]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:40:45.466225Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2026-01-07T17:40:45.626399Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T17:40:45.626484Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:68:2115] TxId# 281474976715663 ProcessProposeKqpTransaction 2026-01-07T17:40:45.630397Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [3:1429:3209], Recipient [3:1114:2930]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2026-01-07T17:40:45.630577Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-07T17:40:45.630644Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v7000/18446744073709551615 ImmediateWriteEdgeReplied# v7000/18446744073709551615 2026-01-07T17:40:45.630691Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037889 changed HEAD read to non-repeatable v23000/18446744073709551615 2026-01-07T17:40:45.630756Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-07T17:40:45.630858Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:40:45.630900Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-07T17:40:45.630943Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-07T17:40:45.630974Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-07T17:40:45.631030Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-07T17:40:45.631069Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:40:45.631093Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-07T17:40:45.631115Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-07T17:40:45.631138Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-07T17:40:45.631259Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-07T17:40:45.631602Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[3:1429:3209], 0} after executionsCount# 1 2026-01-07T17:40:45.631661Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[3:1429:3209], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:40:45.631746Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[3:1429:3209], 0} finished in read 2026-01-07T17:40:45.631821Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:40:45.631846Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-07T17:40:45.631869Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:40:45.631896Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:40:45.631942Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:40:45.631962Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:40:45.631986Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-07T17:40:45.632028Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-07T17:40:45.632152Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-07T17:40:45.633625Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [3:1429:3209], Recipient [3:1114:2930]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:40:45.633696Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets |88.1%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> KikimrIcGateway::TestAlterResourcePool [GOOD] >> KikimrIcGateway::TestAlterStreamingQuery >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 11915, MsgBus: 27026 2026-01-07T17:40:30.923084Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675957728843727:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:30.923150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:31.164807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:31.170427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:31.170544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:31.231071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:31.278010Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:31.289095Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675957728843701:2081] 1767807630922356 != 1767807630922359 2026-01-07T17:40:31.344816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:31.344887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:31.344898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:31.345010Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:31.458647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:31.943521Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:31.967734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:31.981900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-07T17:40:31.996185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) Trying to start YDB, gRPC: 26858, MsgBus: 18831 2026-01-07T17:40:34.662348Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675973958264334:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:34.662669Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:34.688288Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:34.795965Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:34.797639Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675973958264229:2081] 1767807634635651 != 1767807634635654 2026-01-07T17:40:34.798566Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:34.798634Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:34.846737Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:34.940097Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:34.940131Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:34.940139Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:34.940241Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:34.944183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:35.340345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:35.361710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 4236, MsgBus: 9340 2026-01-07T17:40:37.991278Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592675988148378665:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:37.991321Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:38.000870Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:40:38.079680Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:38.079774Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:38.084094Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:38.103040Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592675988148378638:2081] 1767807637990467 != 1767807637990470 2026-01-07T17:40:38.106810Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:38.111906Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:38.197489Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:38.197523Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:38.197532Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:38.197620Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:38.314785Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:38.638362Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:38.664272Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) Trying to start YDB, gRPC: 21062, MsgBus: 1031 2026-01-07T17:40:41.474445Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676004282765408:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:41.474504Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:41.489814Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:41.562498Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:41.573033Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592676004282765381:2081] 1767807641473629 != 1767807641473632 2026-01-07T17:40:41.598747Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:41.598825Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:41.602383Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:41.647118Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:41.647146Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:41.647165Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:41.647269Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:41.666887Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:42.092715Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:42.535887Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:42.541461Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:44.649303Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676017167668271:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:44.649331Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676017167668272:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:44.649383Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676017167668260:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:44.649518Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:44.649871Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676017167668281:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:44.649908Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:44.653359Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:44.659321Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676017167668283:2571] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:40:44.665387Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592676017167668279:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:40:44.665387Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592676017167668280:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:40:44.758743Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676017167668330:2602] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:44.766428Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676017167668348:2610] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:45.498211Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:40:45.900518Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:46.023746Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:40:46.474742Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676004282765408:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:46.474844Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> GroupWriteTest::WithRead |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> Worker::Basic >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> KikimrIcGateway::TestLoadExternalTable >> GroupWriteTest::Simple >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> KikimrIcGateway::TestLoadTableMetadata |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] |88.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin >> TMonitoringTests::ValidActorId |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> ResourcePoolsDdl::TestWorkloadConfigOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless |88.1%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TMonitoringTests::ValidActorId [GOOD] |88.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:39:48.822289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:39:48.996899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:39:49.017378Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:39:49.017966Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:39:49.018020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:39:49.300720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:49.300866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:49.380643Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807585910108 != 1767807585910112 2026-01-07T17:39:49.393406Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:49.443291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:49.554501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:39:49.875682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:49.889931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:50.008811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:50.045722Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:50.046799Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:50.047110Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:39:50.047444Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:50.094925Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:50.095733Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:50.095876Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:50.097730Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:39:50.097832Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:39:50.097888Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:39:50.098306Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:50.098461Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:50.098563Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:39:50.109455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:50.138296Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:39:50.138537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:50.138692Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:39:50.138735Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:39:50.138779Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:39:50.138817Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:39:50.139063Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:50.139129Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:50.139490Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:39:50.139600Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:39:50.139693Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:39:50.139742Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:50.139804Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:39:50.139842Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:39:50.139881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:39:50.139919Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:39:50.139964Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:39:50.140087Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:50.140122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:50.140167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:39:50.140559Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:39:50.140610Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:50.140721Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:39:50.141025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:39:50.141092Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:39:50.141190Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:39:50.141241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:39:50.141287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:39:50.141328Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:39:50.141366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:39:50.141710Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:50.141768Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:39:50.141819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:39:50.141853Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:39:50.141903Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:39:50.141934Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:39:50.141975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:39:50.142009Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:39:50.142037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:50.143487Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:39:50.143555Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:39:50.154393Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:40:48.244604Z node 10 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-07T17:40:48.244716Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:921:2784], Recipient [10:1151:2957]: {TEvReadSet step# 3001 txid# 1234567890012 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-07T17:40:48.244748Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:48.244778Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890012 ... validating table 2026-01-07T17:40:48.427238Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1203:2996], Recipient [10:1151:2957]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:48.427332Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:40:48.427401Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [10:1202:2995], serverId# [10:1203:2996], sessionId# [0:0:0] 2026-01-07T17:40:48.442001Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1208:2999], Recipient [10:1151:2957]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T17:40:48.442231Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:40:48.442333Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-07T17:40:48.442465Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:40:48.442518Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:40:48.442570Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:40:48.442634Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:40:48.442683Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-07T17:40:48.442732Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:40:48.442752Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:40:48.442777Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:40:48.442802Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:40:48.442936Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T17:40:48.443191Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2026-01-07T17:40:48.443256Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[10:1208:2999], 0} after executionsCount# 1 2026-01-07T17:40:48.443314Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[10:1208:2999], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:40:48.443405Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[10:1208:2999], 0} finished in read 2026-01-07T17:40:48.443478Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:40:48.443501Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:40:48.443526Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:40:48.443545Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:40:48.443581Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:40:48.443598Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:40:48.443625Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-07T17:40:48.443671Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:40:48.443777Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:40:48.444295Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [10:1208:2999], Recipient [10:1151:2957]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:40:48.444360Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-07T17:40:48.444506Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1208:2999], Recipient [10:921:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-07T17:40:48.444600Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-07T17:40:48.444639Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-07T17:40:48.444682Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:40:48.444703Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-07T17:40:48.444728Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-07T17:40:48.444755Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-07T17:40:48.444795Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-07T17:40:48.444827Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:40:48.444852Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-07T17:40:48.444867Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-07T17:40:48.444889Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-07T17:40:48.444967Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-07T17:40:48.445105Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3001/18446744073709551615 2026-01-07T17:40:48.445136Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[10:1208:2999], 1} after executionsCount# 1 2026-01-07T17:40:48.445163Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[10:1208:2999], 1} sends rowCount# 3, bytes# 96, quota rows left# 996, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:40:48.445213Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[10:1208:2999], 1} finished in read 2026-01-07T17:40:48.445246Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:40:48.445273Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-07T17:40:48.445294Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:40:48.445311Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:40:48.445344Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:40:48.445369Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:40:48.445389Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-07T17:40:48.445410Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-07T17:40:48.445461Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-07T17:40:48.446590Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [10:1208:2999], Recipient [10:921:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-07T17:40:48.446640Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1004 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1003 } }, { items { int32_value: 13 } items { int32_value: 1004 } } |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:49.953869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:49.953957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:49.954021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:49.954059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:49.954095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:49.954120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:49.954197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:49.954265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:49.954960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:49.955191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:50.058479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:50.058539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:50.059168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:50.070702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:50.071294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:50.071437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:50.087672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:50.087956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:50.088619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:50.088954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:50.094265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:50.094465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:50.095466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:50.095524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:50.095664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:50.095712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:50.095754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:50.095919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.102405Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:50.250020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:50.250293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.250501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:50.250549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:50.250817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:50.250878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:50.253805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:50.254000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:50.254223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.254290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:50.254348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:50.254384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:50.256350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.256410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:50.256461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:50.258187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.258241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.258315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:50.258368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:50.262240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:50.263995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:50.264159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:50.265205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:50.265362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:50.265410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:50.265661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:50.265719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:50.265866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:50.265962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:50.267851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 2 2026-01-07T17:40:43.976304Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-07T17:40:43.999262Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-07T17:40:43.999330Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-07T17:40:43.999358Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-07T17:40:43.999402Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-07T17:40:44.002560Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:44.002682Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:44.002724Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:40:44.002767Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-07T17:40:44.002807Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-07T17:40:44.003673Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:44.003768Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:44.003806Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:40:44.003835Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-07T17:40:44.003864Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-07T17:40:44.004741Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:44.004808Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:44.004834Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:40:44.004858Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-07T17:40:44.004885Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-07T17:40:44.006074Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:44.006147Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:40:44.006173Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:40:44.006198Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-07T17:40:44.006228Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2026-01-07T17:40:44.006286Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-07T17:40:44.009457Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:40:44.009616Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:40:44.011880Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:40:44.012027Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-07T17:40:44.013672Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-07T17:40:44.013723Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-07T17:40:44.015208Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-07T17:40:44.015327Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-07T17:40:44.015360Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2688:4676] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-07T17:40:44.016492Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-07T17:40:44.016533Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-07T17:40:44.016621Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-07T17:40:44.016649Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-07T17:40:44.016712Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-07T17:40:44.016738Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-07T17:40:44.016799Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-07T17:40:44.016841Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-07T17:40:44.016898Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-07T17:40:44.016937Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-07T17:40:44.018765Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-07T17:40:44.018888Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-07T17:40:44.018920Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2691:4679] 2026-01-07T17:40:44.019279Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-07T17:40:44.019445Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-07T17:40:44.019501Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-07T17:40:44.019533Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2691:4679] 2026-01-07T17:40:44.019699Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-07T17:40:44.019770Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-07T17:40:44.019792Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2691:4679] 2026-01-07T17:40:44.019896Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-07T17:40:44.019960Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-07T17:40:44.019983Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2691:4679] 2026-01-07T17:40:44.020080Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-07T17:40:44.020120Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2691:4679] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> BasicUsage::RecreateObserver [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2026-01-07T17:39:21.760090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:21.760147Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:21.760585Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:21.772364Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:21.772739Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:21.773049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:21.783479Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:21.820261Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:21.820825Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:21.822403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:21.822488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:21.822536Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:21.822800Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:21.822957Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:21.823005Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:21.886296Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:21.918425Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:21.918609Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:21.918709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:21.918762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:21.918803Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:21.918838Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:21.918996Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.919055Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.919332Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:21.919442Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:21.919577Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:21.919615Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:21.919661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:21.919695Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:21.919738Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:21.919776Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:21.919817Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:21.919916Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.919958Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.920018Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:21.927389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:21.927467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:21.927593Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:21.927751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:21.927818Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:21.927873Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:21.927926Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:21.927965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:21.928043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:21.928077Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:21.928357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:21.928401Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:21.928445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:21.928478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:21.928542Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:21.928571Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:21.928608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:21.928637Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:21.928664Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:21.940622Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:21.940693Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:21.940721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:21.940763Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:21.940816Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:21.941373Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.941441Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:21.941509Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:21.941641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:21.941668Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:21.941776Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:21.941806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:21.941837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:21.941864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:21.944308Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:21.944361Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:21.944511Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.944536Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:21.944578Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:21.944608Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:21.944649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:21.944693Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:21.944726Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... t [32:347:2315]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-07T17:40:49.534170Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534190Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-07T17:40:49.534241Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-07T17:40:49.534262Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534280Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-07T17:40:49.534331Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-07T17:40:49.534353Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534378Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-07T17:40:49.534428Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-07T17:40:49.534451Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534474Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-07T17:40:49.534519Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-07T17:40:49.534539Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534559Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-07T17:40:49.534615Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-07T17:40:49.534642Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534665Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-07T17:40:49.534732Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-07T17:40:49.534752Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534771Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-07T17:40:49.534824Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-07T17:40:49.534844Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534868Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-07T17:40:49.534942Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-07T17:40:49.534963Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.534983Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-07T17:40:49.535032Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-07T17:40:49.535054Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535076Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-07T17:40:49.535116Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-07T17:40:49.535150Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535171Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-07T17:40:49.535225Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-07T17:40:49.535247Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535267Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-07T17:40:49.535325Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-07T17:40:49.535346Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535366Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-07T17:40:49.535415Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-07T17:40:49.535437Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535456Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-07T17:40:49.535507Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-07T17:40:49.535528Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535548Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-07T17:40:49.535598Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-07T17:40:49.535620Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535644Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-07T17:40:49.535701Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-07T17:40:49.535723Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535747Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-07T17:40:49.535806Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-07T17:40:49.535826Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535847Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-07T17:40:49.535907Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-07T17:40:49.535935Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.535955Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-07T17:40:49.536014Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-07T17:40:49.536035Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:49.536070Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 30 31 31 30 31 28 22 25 30 25 24 30 25 30 30 22 23 14 31 30 31 25 22 31 20 - - 20 - - - - actual 30 31 31 30 31 28 22 25 30 25 24 30 25 30 30 22 23 14 31 30 31 25 22 31 20 - - 20 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ShouldBePings |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> AggregateStatistics::ShouldBePings [GOOD] |88.1%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2026-01-07T17:40:53.249125Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.259616Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-07T17:40:53.260044Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.260287Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-07T17:40:53.260557Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.260714Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.260867Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-07T17:40:53.260897Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.261054Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2026-01-07T17:40:53.261126Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.261281Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-07T17:40:53.261343Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-07T17:40:53.261395Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:53.261513Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-07T17:40:53.261543Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.261633Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2026-01-07T17:40:53.261681Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.261760Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-07T17:40:53.261807Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-07T17:40:53.261836Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:40:53.261917Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-07T17:40:53.261935Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.262049Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-07T17:40:53.272450Z node 3 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:53.272546Z node 3 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-07T17:40:53.272700Z node 4 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:53.272726Z node 4 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-07T17:40:53.283459Z node 2 :STATISTICS DEBUG: service_impl.cpp:403: Skip TEvKeepAliveTimeout 2026-01-07T17:40:53.283569Z node 1 :STATISTICS INFO: service_impl.cpp:418: Node 2 is unavailable 2026-01-07T17:40:53.283609Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:53.283762Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:53.283798Z node 1 :STATISTICS DEBUG: service_impl.cpp:395: Skip TEvKeepAliveTimeout 2026-01-07T17:40:53.283837Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:53.283861Z node 1 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-07T17:40:53.283993Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:53.284039Z node 1 :STATISTICS DEBUG: service_impl.cpp:430: Skip TEvAggregateKeepAlive |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2026-01-07T17:40:53.275145Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.275631Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.384045Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 2 2026-01-07T17:40:53.384130Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:53.384194Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-07T17:40:53.384950Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-07T17:40:53.385000Z node 2 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.385058Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-07T17:40:53.385082Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.385179Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2026-01-07T17:40:53.385227Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2026-01-07T17:40:53.627790Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.635564Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2026-01-07T17:40:53.635632Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 2 is not local. 2026-01-07T17:40:53.635761Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-07T17:40:53.636523Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2026-01-07T17:40:53.636596Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 1 is not local. 2026-01-07T17:40:53.636713Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-07T17:40:53.636740Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.636789Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2026-01-07T17:40:53.636812Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 4 is not local. 2026-01-07T17:40:53.636889Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2026-01-07T17:40:53.636948Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 5 is not local. 2026-01-07T17:40:53.637010Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 6 2026-01-07T17:40:53.637070Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2026-01-07T17:40:53.637091Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 7 is not local. 2026-01-07T17:40:53.637143Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2026-01-07T17:40:53.637165Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 8 is not local. 2026-01-07T17:40:53.637188Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:53.637296Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-07T17:40:53.637316Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2026-01-07T17:39:04.002950Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1767807544002918 2026-01-07T17:39:04.290524Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675588148996787:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:04.290653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:04.342243Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:04.342444Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675588844846038:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:04.342943Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:04.356417Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:39:04.539339Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:04.556841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:04.594332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:04.594437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:04.595974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:04.596060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:04.628292Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:04.628480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:04.630934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:04.654066Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:04.733473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001b17/r3tmp/yandex9Z63xH.tmp 2026-01-07T17:39:04.733496Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001b17/r3tmp/yandex9Z63xH.tmp 2026-01-07T17:39:04.733655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001b17/r3tmp/yandex9Z63xH.tmp 2026-01-07T17:39:04.733751Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:04.772526Z INFO: TTestServer started on Port 1146 GrpcPort 8774 2026-01-07T17:39:04.828238Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:04.829198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:8774 2026-01-07T17:39:04.967568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:05.299042Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:05.353874Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:07.536127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675601033899913:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.536207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675601033899924:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.536257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.536956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675601033899928:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.537027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:39:07.540290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:07.558830Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675601033899927:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:39:07.861150Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675601033900019:2990] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:07.889894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:07.896190Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675601729748155:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:07.896808Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YWY0MTExZDYtOGYwZGQwOTQtODg0ZDg2N2ItYTM5ZTdkOGI=, ActorId: [2:7592675601729748134:2301], ActorState: ExecuteState, LegacyTraceId: 01kecrmy4t53fvrg0y7wdn6kep, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:07.896813Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675601033900032:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:07.897408Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTczZDI2NTUtNDJjOWRiMWEtNjMyYjEzNDMtZmM5MTg0Nzg=, ActorId: [1:7592675601033899911:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrmy4eaktej942m9panrza, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:07.900306Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:07.900471Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:08.0269 ... 02-3fdd745c-56ea70c6] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:50.776066Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2026-01-07T17:40:50.776099Z :INFO: [/Root] [/Root] [b75730a-cde61e02-3fdd745c-56ea70c6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 60 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:50.776140Z :NOTICE: [/Root] [/Root] [b75730a-cde61e02-3fdd745c-56ea70c6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:40:50.776159Z :DEBUG: [/Root] [/Root] [b75730a-cde61e02-3fdd745c-56ea70c6] [] Abort session to cluster 2026-01-07T17:40:50.776352Z :INFO: [/Root] [/Root] [b75730a-cde61e02-3fdd745c-56ea70c6] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:50.776396Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2026-01-07T17:40:50.776433Z :INFO: [/Root] [/Root] [b75730a-cde61e02-3fdd745c-56ea70c6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 61 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:50.776545Z :NOTICE: [/Root] [/Root] [b75730a-cde61e02-3fdd745c-56ea70c6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:50.776664Z :INFO: [/Root] [/Root] [4da30c3c-ed68e017-2a12b5cb-81543005] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:50.776688Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:40:50.776714Z :INFO: [/Root] [/Root] [4da30c3c-ed68e017-2a12b5cb-81543005] Counters: { Errors: 0 CurrentSessionLifetimeMs: 63 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:50.776753Z :NOTICE: [/Root] [/Root] [4da30c3c-ed68e017-2a12b5cb-81543005] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:50.776794Z :INFO: [/Root] [/Root] [91145264-1ead29c3-70ca1732-f8c35fd2] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:50.776818Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-07T17:40:50.776857Z :INFO: [/Root] [/Root] [91145264-1ead29c3-70ca1732-f8c35fd2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 70 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:50.776894Z :NOTICE: [/Root] [/Root] [91145264-1ead29c3-70ca1732-f8c35fd2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:50.782045Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.782081Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.782105Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.782125Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.782137Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.786134Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_5854737441791741273_v1 2026-01-07T17:40:50.786171Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7592676043436951462:2488] destroyed 2026-01-07T17:40:50.792836Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_3_5854737441791741273_v1 2026-01-07T17:40:50.781034Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_6620544938386439468_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:50.781113Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_6620544938386439468_v1 grpc read failed 2026-01-07T17:40:50.781240Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 1 consumer shared/user session shared/user_3_1_6620544938386439468_v1 closed 2026-01-07T17:40:50.781821Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_6620544938386439468_v1 is DEAD 2026-01-07T17:40:50.782491Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer shared/user session shared/user_3_2_8118037881334316732_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:50.782503Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer shared/user session shared/user_3_2_8118037881334316732_v1 grpc read failed 2026-01-07T17:40:50.782525Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer shared/user session shared/user_3_2_8118037881334316732_v1 grpc closed 2026-01-07T17:40:50.782554Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer shared/user session shared/user_3_2_8118037881334316732_v1 is DEAD 2026-01-07T17:40:50.784601Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer shared/user session shared/user_3_3_5854737441791741273_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:50.784613Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_5854737441791741273_v1 grpc read failed 2026-01-07T17:40:50.784629Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer shared/user session shared/user_3_3_5854737441791741273_v1 grpc closed 2026-01-07T17:40:50.784674Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_5854737441791741273_v1 is DEAD 2026-01-07T17:40:50.785302Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951451:2478] disconnected. 2026-01-07T17:40:50.785327Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951451:2478] disconnected; active server actors: 1 2026-01-07T17:40:50.785362Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951451:2478] client user disconnected session shared/user_3_1_6620544938386439468_v1 2026-01-07T17:40:50.785407Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2026-01-07T17:40:50.785452Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnreadableFamilies=0 [], RequireBalancing=0 [] 2026-01-07T17:40:50.785479Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2026-01-07T17:40:50.785502Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000032s 2026-01-07T17:40:50.785894Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951455:2480] disconnected. 2026-01-07T17:40:50.785913Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951455:2480] disconnected; active server actors: 1 2026-01-07T17:40:50.785929Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951455:2480] client user disconnected session shared/user_3_2_8118037881334316732_v1 2026-01-07T17:40:50.785949Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2026-01-07T17:40:50.785976Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951456:2481] disconnected. 2026-01-07T17:40:50.785989Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951456:2481] disconnected; active server actors: 1 2026-01-07T17:40:50.786002Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7592676043436951456:2481] client user disconnected session shared/user_3_3_5854737441791741273_v1 2026-01-07T17:40:50.885108Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.885140Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.885154Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.885175Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.885187Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.985512Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.985544Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.985557Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.985577Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.985588Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:51.089035Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:51.089072Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.089087Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:51.089107Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.089116Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2026-01-07T17:40:53.914229Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.921554Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2026-01-07T17:40:53.921926Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.922040Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-07T17:40:53.922192Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2026-01-07T17:40:53.922237Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.922281Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-07T17:40:53.922296Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.922337Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2026-01-07T17:40:53.922375Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.922418Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2026-01-07T17:40:53.922449Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.922486Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2026-01-07T17:40:53.922508Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.922530Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-07T17:40:53.922582Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2026-01-07T17:40:53.922631Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.922700Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 5 2026-01-07T17:40:53.922748Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-07T17:40:53.922763Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.922783Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2026-01-07T17:40:53.922808Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.922832Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2026-01-07T17:40:53.922841Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.922859Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 7 2026-01-07T17:40:53.922886Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2026-01-07T17:40:53.922898Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.935401Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 1 has already been processed 2026-01-07T17:40:53.935516Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 2 2026-01-07T17:40:53.935557Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 2 is not local. 2026-01-07T17:40:53.935678Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 3 has already been processed 2026-01-07T17:40:53.935704Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 4 2026-01-07T17:40:53.935728Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 4 is not local. 2026-01-07T17:40:53.935776Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 5 has already been processed 2026-01-07T17:40:53.935798Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 6 2026-01-07T17:40:53.935814Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 6 is not local. 2026-01-07T17:40:53.935857Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:53.935970Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:53.936003Z node 1 :STATISTICS DEBUG: service_impl.cpp:1027: Skip TEvStatisticsRequestTimeout 2026-01-07T17:40:53.936065Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-07T17:40:53.936091Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.936160Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-07T17:40:53.936184Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.936228Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-07T17:40:53.936250Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2026-01-07T17:40:53.926835Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.935656Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-07T17:40:53.936043Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.936477Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2026-01-07T17:40:53.936545Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.936865Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.937029Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:53.937201Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-07T17:40:53.937405Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2026-01-07T17:40:53.937468Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.937522Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 5, status = OK 2026-01-07T17:40:53.937557Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.937649Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [2:46:2057], tablet id = 4, status = OK 2026-01-07T17:40:53.937689Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:46:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.937779Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 2 2026-01-07T17:40:53.937929Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-07T17:40:53.938096Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-07T17:40:53.938152Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-07T17:40:53.938174Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.938225Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 5 2026-01-07T17:40:53.938277Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:53.938412Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2026-01-07T17:40:53.938469Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:53.938592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-07T17:40:53.938618Z node 2 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.938659Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-07T17:40:53.938680Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.938724Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 6 2026-01-07T17:40:53.938777Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:40:53.938865Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2026-01-07T17:40:53.938888Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.938937Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-07T17:40:53.939024Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-07T17:40:53.939052Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.939184Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-07T17:40:53.939216Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:53.939261Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-07T17:40:53.939408Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-07T17:40:53.939508Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:53.939668Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2026-01-07T17:40:53.939724Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TxUsage::WriteToTopic_Demo_40_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2026-01-07T17:40:54.795730Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:54.803858Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-07T17:40:54.804297Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:54.804482Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-07T17:40:54.804727Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:54.804878Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-07T17:40:54.805038Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-07T17:40:54.805063Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:54.805149Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2026-01-07T17:40:54.805212Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:54.805342Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-07T17:40:54.805390Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-07T17:40:54.805431Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:54.805531Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-07T17:40:54.805558Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:54.805640Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2026-01-07T17:40:54.805679Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-07T17:40:54.805746Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-07T17:40:54.805785Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-07T17:40:54.805809Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:40:54.805883Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-07T17:40:54.805906Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-07T17:40:54.806021Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-07T17:40:54.816712Z node 3 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:54.816776Z node 3 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-07T17:40:54.816855Z node 4 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:54.816870Z node 4 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-07T17:40:54.829184Z node 2 :STATISTICS DEBUG: service_impl.cpp:403: Skip TEvKeepAliveTimeout 2026-01-07T17:40:54.829258Z node 1 :STATISTICS INFO: service_impl.cpp:418: Node 2 is unavailable 2026-01-07T17:40:54.829291Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-07T17:40:54.829427Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:54.829464Z node 1 :STATISTICS DEBUG: service_impl.cpp:395: Skip TEvKeepAliveTimeout 2026-01-07T17:40:54.829498Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:54.829520Z node 1 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-07T17:40:54.829635Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-07T17:40:54.829661Z node 1 :STATISTICS DEBUG: service_impl.cpp:430: Skip TEvAggregateKeepAlive |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-07T17:37:19.730056Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675139366464591:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:19.730675Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:19.797253Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:20.067652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:20.126147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:20.126255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:20.202571Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675139366464480:2081] 1767807439684467 != 1767807439684470 2026-01-07T17:37:20.213271Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:20.221711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:20.312705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:20.514912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003c94/r3tmp/yandexkQOVAw.tmp 2026-01-07T17:37:20.514933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003c94/r3tmp/yandexkQOVAw.tmp 2026-01-07T17:37:20.515084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003c94/r3tmp/yandexkQOVAw.tmp 2026-01-07T17:37:20.515183Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:20.547992Z INFO: TTestServer started on Port 4262 GrpcPort 22541 PQClient connected to localhost:22541 2026-01-07T17:37:20.739142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:20.746053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:20.812669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:20.928515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T17:37:23.272884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675156546334701:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.273014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675156546334716:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.273073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.275728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675156546334719:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.275847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.278732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:23.289850Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675156546334755:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.289961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.294247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675156546334764:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.294370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:23.299610Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675156546334718:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:23.535577Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675156546334778:2644] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:23.575193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:23.682298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:23.850246Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675156546334789:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:23.853465Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=N2E1YTJlNmQtNjc5MjZlNDItNjBkMTY4YmQtYzc4ZTc0YTg=, ActorId: [1:7592675156546334699:2330], ActorState: ExecuteState, LegacyTraceId: 01kecrhr9d58nvmr2v08yty5by, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:23.855934Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:23.898038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675160841302374:2826] 2026-01-07T17:37:24.734322Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675139366464591:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:24.737050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-07T17:37:29.337134Z :WriteToTopic_Demo_19_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-07T17:37:29.373084Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:29.395737Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592675182316139049:2921] connected; active server actors: 1 2026-01-07T17:37:29.395987Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing ... kie 2 consumer test-consumer session test-consumer_13_1_10513536928584867465_v1 proxy is DEAD 2026-01-07T17:40:50.694881Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7592676034607685167:2525] disconnected. 2026-01-07T17:40:50.694909Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7592676034607685167:2525] disconnected; active server actors: 1 2026-01-07T17:40:50.694943Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7592676034607685167:2525] client test-consumer disconnected session test-consumer_13_1_10513536928584867465_v1 2026-01-07T17:40:50.695063Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_10513536928584867465_v1 2026-01-07T17:40:50.695096Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7592676034607685170:2528] destroyed 2026-01-07T17:40:50.695160Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_10513536928584867465_v1 2026-01-07T17:40:50.711901Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|58c01989-67a79d3f-4a24ba04-5bbe124_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-07T17:40:50.712013Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|58c01989-67a79d3f-4a24ba04-5bbe124_0] PartitionId [0] Generation [2] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-07T17:40:50.712073Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-07T17:40:50.712107Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|58c01989-67a79d3f-4a24ba04-5bbe124_0] PartitionId [0] Generation [2] Write session is aborting and will not restart 2026-01-07T17:40:50.712188Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|58c01989-67a79d3f-4a24ba04-5bbe124_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-07T17:40:50.717087Z node 13 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|58c01989-67a79d3f-4a24ba04-5bbe124_0 grpc read done: success: 0 data: 2026-01-07T17:40:50.717134Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|58c01989-67a79d3f-4a24ba04-5bbe124_0 grpc read failed 2026-01-07T17:40:50.717168Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|58c01989-67a79d3f-4a24ba04-5bbe124_0 grpc closed 2026-01-07T17:40:50.717186Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|58c01989-67a79d3f-4a24ba04-5bbe124_0 is DEAD 2026-01-07T17:40:50.718070Z node 13 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:50.721084Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7592676026017750508:2502] destroyed 2026-01-07T17:40:50.721147Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:40:50.721175Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.721192Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.721221Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.721240Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.721254Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.761330Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.761385Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.761402Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.761436Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.761453Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.761511Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.761525Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.761539Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.761553Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.761565Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.861637Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.861690Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.861707Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.861731Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.861746Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.861811Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.861827Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.861840Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.861853Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.861863Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.961993Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.962034Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.962060Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.962084Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.962104Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.962166Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.962180Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.962190Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.962204Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.962220Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:51.065096Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:51.065143Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.065157Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:51.065175Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.065188Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:51.065257Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:51.065269Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.065280Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:51.065292Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.065302Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:51.166321Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:51.166361Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.166378Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:51.166401Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.166416Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:51.166470Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:51.166483Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.166495Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:51.166507Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:51.166520Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier |88.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets |88.1%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestDropStreamingQuery >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 11425, MsgBus: 8101 2026-01-07T17:40:34.698347Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675977358251118:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:34.698410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:34.922304Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:34.926175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:34.926296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:34.937731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:35.041587Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:35.092905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:35.092961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:35.092999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:35.093101Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:35.095356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:35.474875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:35.495936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-07T17:40:35.517483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) Trying to start YDB, gRPC: 22077, MsgBus: 61525 2026-01-07T17:40:38.047945Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675993505499815:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:38.048003Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:38.065815Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:38.130401Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:38.132055Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592675993505499788:2081] 1767807638047068 != 1767807638047071 2026-01-07T17:40:38.140865Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:38.140986Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:38.170674Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:38.300960Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:38.306853Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:38.306879Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:38.306886Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:38.306975Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:38.658370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:38.680599Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-07T17:40:38.697257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2026-01-07T17:40:38.756048Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675993505500684:2532] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:38.756192Z node 2 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 64876, MsgBus: 17246 2026-01-07T17:40:41.388099Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592676004536657426:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:41.388380Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:41.407050Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:41.490349Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592676004536657319:2081] 1767807641385332 != 1767807641385335 2026-01-07T17:40:41.505356Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:41.517551Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:41.517634Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:41.524715Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:41.556487Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:41.556514Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:41.556527Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:41.556612Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:41.674850Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:41.955845Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:41.968606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) Trying to start YDB, gRPC: 15358, MsgBus: 26477 2026-01-07T17:40:44.659472Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676020056691738:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:44.659513Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:44.674565Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:44.761047Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:44.762631Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592676020056691709:2081] 1767807644658936 != 1767807644658939 2026-01-07T17:40:44.799307Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:44.799390Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:44.803330Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:44.852613Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:44.852636Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:44.852645Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:44.852735Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:44.975776Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:45.274165Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:45.312443Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:45.337124Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) Trying to start YDB, gRPC: 27794, MsgBus: 6910 2026-01-07T17:40:48.539410Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592676033981586158:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:48.539487Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:48.554878Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:48.639046Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:48.640568Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592676033981586103:2081] 1767807648536970 != 1767807648536973 2026-01-07T17:40:48.689070Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:48.689166Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:48.691412Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:48.737046Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:48.761709Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:48.761737Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:48.761748Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:48.761858Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:49.268944Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:49.572029Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:49.578214Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:52.290411Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592676051161456288:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.290430Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592676051161456289:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.290486Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592676051161456276:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.290650Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.291110Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592676051161456298:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.291186Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.294228Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:52.297008Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592676051161456300:2570] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:40:52.304985Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592676051161456297:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:40:52.304985Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592676051161456296:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:40:52.394713Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592676051161456347:2601] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:52.402379Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592676051161456365:2609] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:53.166574Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:40:53.539762Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592676033981586158:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:53.539873Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:40:53.561249Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.681502Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2026-01-07T17:39:32.134473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:32.134529Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:32.135011Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:32.148838Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:32.149215Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:32.149468Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:32.159269Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:32.202128Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:32.202782Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:32.204386Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:32.204454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:32.204501Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:32.204884Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:32.205114Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:32.205178Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:32.272247Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:32.298384Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:32.298541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:32.298612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:32.298671Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:32.298711Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:32.298743Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:32.298863Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:32.298910Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:32.299129Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:32.299206Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:32.299287Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:32.299313Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:32.299348Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:32.299371Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:32.299412Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:32.299434Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:32.299482Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:32.299532Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:32.299555Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:32.299587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:32.304676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:32.304728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:32.304796Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:32.304964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:32.305042Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:32.305093Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:32.305148Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:32.305184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:32.305214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:32.305244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:32.305517Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:32.305558Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:32.305598Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:32.305638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:32.305704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:32.305732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:32.305767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:32.305795Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:32.305819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:32.317855Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:32.317916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:32.317959Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:32.318014Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:32.318065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:32.318497Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:32.318534Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:32.318564Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:32.318705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:32.318742Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:32.318876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:32.318909Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:32.318933Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:32.318953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:32.321808Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:32.321856Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:32.322013Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:32.322039Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:32.322076Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:32.322102Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:32.322127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:32.322159Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:32.322183Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [32:347:2315]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-07T17:40:54.302039Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.302070Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-07T17:40:54.302149Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-07T17:40:54.302178Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.302207Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-07T17:40:54.302288Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-07T17:40:54.302316Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.302347Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-07T17:40:54.302432Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-07T17:40:54.302464Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.302494Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-07T17:40:54.302576Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-07T17:40:54.302610Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.302640Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-07T17:40:54.302725Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-07T17:40:54.302754Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.302781Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-07T17:40:54.302861Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-07T17:40:54.302891Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.302946Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-07T17:40:54.303050Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-07T17:40:54.303082Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.303110Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-07T17:40:54.303188Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-07T17:40:54.303219Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.303247Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-07T17:40:54.303330Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-07T17:40:54.303366Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.303394Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-07T17:40:54.303514Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-07T17:40:54.303554Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.303583Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-07T17:40:54.303689Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-07T17:40:54.303722Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.303750Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-07T17:40:54.303829Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-07T17:40:54.303861Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.303889Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-07T17:40:54.303963Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-07T17:40:54.303992Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.304032Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-07T17:40:54.304113Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-07T17:40:54.304141Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.304167Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-07T17:40:54.304269Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-07T17:40:54.304300Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.304338Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-07T17:40:54.304423Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-07T17:40:54.304518Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.304553Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-07T17:40:54.304645Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-07T17:40:54.304674Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.304700Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-07T17:40:54.304789Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-07T17:40:54.304822Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.304847Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-07T17:40:54.304954Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-07T17:40:54.304983Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:40:54.305011Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 26 29 31 28 26 30 21 29 27 24 30 21 21 15 22 30 28 9 28 28 20 28 24 15 15 10 10 - - 15 - - actual 26 29 31 28 26 30 21 29 27 24 30 21 21 15 22 30 28 9 28 28 20 28 24 15 15 10 10 - - 15 - - interm 6 5 1 6 3 4 6 - 6 0 2 6 - - 5 5 6 1 6 - - 4 1 - - 1 - - - - - - >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> Worker::Basic [GOOD] |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TxUsage::WriteToTopic_Demo_41_Query >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |88.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> KqpWorkloadServiceDistributed::TestDistributedQueue >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system [GOOD] >> GroupWriteTest::ByTableName [GOOD] >> GroupWriteTest::WithRead [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous |88.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2026-01-07T17:40:49.545292Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676038386736812:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:49.545629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:49.773043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:49.778235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:49.778352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:49.831065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:49.928220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:50.049315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:50.173511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:50.173535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:50.173542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:50.173637Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:50.451360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:50.458764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:40:50.553474Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:50.589608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:50.701219Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handshake: worker# [1:7592676042681705008:2609] 2026-01-07T17:40:50.701325Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handshake: worker# [1:7592676042681705008:2609] 2026-01-07T17:40:50.707931Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:40:50.733205Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:40:50.733285Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Send handshake: worker# [1:7592676042681705008:2609] 2026-01-07T17:40:50.733410Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-07T17:40:50.733430Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:173: [Worker][1:7592676042681705008:2609] Handshake with writer: sender# [1:7592676042681705010:2609] 2026-01-07T17:40:50.756888Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Create read session: session# [1:7592676042681705016:2487] 2026-01-07T17:40:50.756996Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-07T17:40:50.757013Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7592676042681705008:2609] Handshake with reader: sender# [1:7592676042681705009:2609] 2026-01-07T17:40:50.757059Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:40:50.789033Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_5685877736650207214_v1 } } 2026-01-07T17:40:50.826594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:52.693945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676051271639788:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.693947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676051271639776:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.694054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676051271639787:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.694113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.694811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676051271639797:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.694884Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.697959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:52.733416Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676051271639798:2685] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:40:52.737442Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676051271639795:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:40:52.737442Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676051271639796:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:40:52.793764Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676051271639846:2717] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:52.822698Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676051271639864:2725] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:53.515832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:40:53.905320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB firs ... pp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:40:55.821151Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-07T17:40:55.783000Z WriteTime: 2026-01-07T17:40:55.785000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-07T17:40:55.821228Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-07T17:40:55.783000Z WriteTime: 2026-01-07T17:40:55.785000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-07T17:40:55.821301Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-07T17:40:55.783000Z WriteTime: 2026-01-07T17:40:55.785000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-07T17:40:55.821437Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2026-01-07T17:40:55.821651Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7592676064156542410:2609] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T17:40:55.821724Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-07T17:40:55.821814Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7592676064156542410:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-07T17:40:55.823565Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7592676064156542410:2609] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:40:55.823657Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-07T17:40:55.823714Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2026-01-07T17:40:55.823791Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:40:55.823847Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:40:55.962555Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-07T17:40:55.951000Z WriteTime: 2026-01-07T17:40:55.953000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-07T17:40:55.962638Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-07T17:40:55.951000Z WriteTime: 2026-01-07T17:40:55.953000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-07T17:40:55.962693Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-07T17:40:55.951000Z WriteTime: 2026-01-07T17:40:55.953000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-07T17:40:55.962795Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2026-01-07T17:40:55.962904Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7592676064156542410:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-07T17:40:55.965018Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7592676064156542410:2609] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:40:55.965102Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-07T17:40:55.965154Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-07T17:40:55.965213Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:40:55.965264Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:40:56.152949Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-07T17:40:56.117000Z WriteTime: 2026-01-07T17:40:56.119000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-07T17:40:56.153038Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-07T17:40:56.117000Z WriteTime: 2026-01-07T17:40:56.119000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-07T17:40:56.153101Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-07T17:40:56.117000Z WriteTime: 2026-01-07T17:40:56.119000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-07T17:40:56.153216Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2026-01-07T17:40:56.153309Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7592676064156542410:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-07T17:40:56.154742Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7592676064156542410:2609] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T17:40:56.154811Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-07T17:40:56.154848Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7592676042681705010:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2026-01-07T17:40:56.154893Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:40:56.154950Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:40:56.244223Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:125: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2026-01-07T17:40:56.244300Z node 1 :REPLICATION_SERVICE INFO: topic_reader.cpp:138: [RemoteTopicReader][/Root/topic][0][1:7592676042681705009:2609] Leave 2026-01-07T17:40:56.244445Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:267: [Worker][1:7592676042681705008:2609] Reader has gone: sender# [1:7592676042681705009:2609]: NKikimr::NReplication::NService::TEvWorker::TEvGone { Status: UNAVAILABLE ErrorDescription: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } 2026-01-07T17:40:56.244533Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7592676068451509869:2609] Handshake: worker# [1:7592676042681705008:2609] 2026-01-07T17:40:56.247873Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7592676068451509869:2609] Create read session: session# [1:7592676068451509870:2487] 2026-01-07T17:40:56.247922Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7592676042681705008:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-07T17:40:56.247931Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7592676042681705008:2609] Handshake with reader: sender# [1:7592676068451509869:2609] 2026-01-07T17:40:56.247951Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7592676068451509869:2609] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 15891275738600260382 2026-01-07T17:40:50.375075Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-07T17:40:50.392170Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-07T17:40:50.392219Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-07T17:40:50.394281Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-07T17:40:50.407968Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:50.411054Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-07T17:40:57.802629Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-07T17:40:57.802707Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:57.872704Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} |88.2%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 11060896558096002602 2026-01-07T17:40:26.160041Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-07T17:40:26.183969Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-07T17:40:26.184044Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-07T17:40:26.186632Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-07T17:40:26.200630Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:26.203165Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-07T17:40:58.059273Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-07T17:40:58.059399Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:58.120956Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |88.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-07T17:37:15.915437Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675123232042492:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:15.915521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:15.986282Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:16.229063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:16.229245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:16.291550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:16.295845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:16.351542Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:16.358090Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675123232042466:2081] 1767807435914379 != 1767807435914382 2026-01-07T17:37:16.450345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003cb1/r3tmp/yandex5vVe1j.tmp 2026-01-07T17:37:16.450373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003cb1/r3tmp/yandex5vVe1j.tmp 2026-01-07T17:37:16.450552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003cb1/r3tmp/yandex5vVe1j.tmp 2026-01-07T17:37:16.450643Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:16.489316Z INFO: TTestServer started on Port 24422 GrpcPort 5497 2026-01-07T17:37:16.498921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:5497 2026-01-07T17:37:16.764243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:16.790787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:37:16.872985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:16.930773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:17.028788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-07T17:37:19.377224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675140411912678:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:19.377396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:19.379865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675140411912698:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:19.379915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675140411912699:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:19.382181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:19.386979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:19.407555Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675140411912703:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:37:19.471438Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675140411912767:2642] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:19.933357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:19.950707Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675140411912775:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:19.963838Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzlhYWQxNDEtMzBjMzJiYTItODdmNGM0NmMtYTI0NzFmMjk=, ActorId: [1:7592675140411912675:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrhmgdekjcez1kfdtt5xcg, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:19.966281Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:20.007011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:20.112470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675144706880348:2820] 2026-01-07T17:37:20.917058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675123232042492:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:20.917147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-07T17:37:25.600780Z :WriteToTopic_Demo_21_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-07T17:37:25.624351Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:25.650121Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592675166181717020:2915] connected; active server actors: 1 2026-01-07T17:37:25.650359Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:37:25.660770Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:37:25.669385Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T17:37:25.670586Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:37:25.672269Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:37:25.672521Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T17:37:25.672559Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:92 ... 6224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.583761Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:55.583776Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.583784Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:55.602921Z :INFO: [/Root] [/Root] [35b8358e-efdbf02f-fccfe2d7-7f854f03] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:55.603021Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:1:2 2026-01-07T17:40:55.603076Z :INFO: [/Root] [/Root] [35b8358e-efdbf02f-fccfe2d7-7f854f03] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2005 BytesRead: 14000000 MessagesRead: 2 BytesReadCompressed: 14000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:55.603193Z :NOTICE: [/Root] [/Root] [35b8358e-efdbf02f-fccfe2d7-7f854f03] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:40:55.603253Z :DEBUG: [/Root] [/Root] [35b8358e-efdbf02f-fccfe2d7-7f854f03] [] Abort session to cluster 2026-01-07T17:40:55.604161Z :DEBUG: [/Root] 0x00007D812567D190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_13744194102045595401_v1 Close 2026-01-07T17:40:55.604580Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:55.604627Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1 grpc read failed 2026-01-07T17:40:55.604661Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1 grpc closed 2026-01-07T17:40:55.604712Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1 is DEAD 2026-01-07T17:40:55.604958Z :DEBUG: [/Root] 0x00007D812567D190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_13744194102045595401_v1 Close 2026-01-07T17:40:55.604954Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_13744194102045595401_v1 2026-01-07T17:40:55.605002Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7592676055944357769:2522] destroyed 2026-01-07T17:40:55.605101Z :NOTICE: [/Root] [/Root] [35b8358e-efdbf02f-fccfe2d7-7f854f03] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:55.605070Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:399: Direct read cache: DestroyPartitionSession, sessionId: test-consumer_13_1_13744194102045595401_v1, proxy: [13:7592676055944357775:2524] 2026-01-07T17:40:55.605109Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_13744194102045595401_v1 2026-01-07T17:40:55.605144Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:489: session cookie 2 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1 got EvDirectReadDestroyPartitionSession, assignId: 1 2026-01-07T17:40:55.605255Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7592676055944357775:2524]: session cookie 2 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:55.605284Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7592676055944357775:2524]: session cookie 2 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1grpc read failed 2026-01-07T17:40:55.605313Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7592676055944357775:2524]: session cookie 2 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1 grpc closed 2026-01-07T17:40:55.605333Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7592676055944357775:2524]: session cookie 2 consumer test-consumer session test-consumer_13_1_13744194102045595401_v1 proxy is DEAD 2026-01-07T17:40:55.605752Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7592676055944357766:2519] disconnected. 2026-01-07T17:40:55.605790Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7592676055944357766:2519] disconnected; active server actors: 1 2026-01-07T17:40:55.605810Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7592676055944357766:2519] client test-consumer disconnected session test-consumer_13_1_13744194102045595401_v1 2026-01-07T17:40:55.606251Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-01-07T17:40:55.606302Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0] PartitionId [0] Generation [2] Write session will now close 2026-01-07T17:40:55.606352Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0] PartitionId [0] Generation [2] Write session: aborting 2026-01-07T17:40:55.607023Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-07T17:40:55.607078Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0] PartitionId [0] Generation [2] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-07T17:40:55.607134Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-07T17:40:55.607166Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0] PartitionId [0] Generation [2] Write session is aborting and will not restart 2026-01-07T17:40:55.607241Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-07T17:40:55.607334Z node 13 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0 grpc read done: success: 0 data: 2026-01-07T17:40:55.607361Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0 grpc read failed 2026-01-07T17:40:55.607960Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 5 sessionId: test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0 2026-01-07T17:40:55.608013Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|b138dad5-6dae43a8-697c2e64-3fda3e85_0 is DEAD 2026-01-07T17:40:55.608589Z node 13 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:55.608937Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7592676047354423111:2498] destroyed 2026-01-07T17:40:55.608987Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:40:55.609014Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:55.609034Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.609050Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:55.609077Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.609093Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:55.674768Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:55.674809Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.674824Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:55.674857Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.674871Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:55.684015Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:55.684065Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.684093Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:55.684116Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.684127Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:55.775145Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:55.775191Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.775207Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:55.775230Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.775244Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:55.784346Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:55.784394Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.784411Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:55.784442Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:55.784450Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |88.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:57.179641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:57.179727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:57.179790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:57.179840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:57.179884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:57.179915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:57.179981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:57.180049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:57.180920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:57.181248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:57.317806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:57.317888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:57.318789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:57.332208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:57.333177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:57.333383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:57.349407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:57.349683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:57.350340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:57.350633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:57.359980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:57.360179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:57.361363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:57.361431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:57.361593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:57.361650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:57.361700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:57.361876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:57.371293Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:57.508810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:57.510112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:57.510335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:57.510386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:57.510570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:57.510618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:57.513168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:57.513407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:57.513648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:57.513731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:57.513770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:57.513807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:57.519140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:57.519206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:57.519248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:57.521026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:57.521078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:57.521162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:57.521229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:57.524976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:57.526747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:57.526894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:57.527661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:57.527773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:57.527809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:57.528007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:57.528041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:57.528152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:57.528268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:57.529775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 4 2026-01-07T17:40:53.702531Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2026-01-07T17:40:53.702564Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2026-01-07T17:40:53.702595Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2026-01-07T17:40:53.702622Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2026-01-07T17:40:53.702650Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2026-01-07T17:40:53.705561Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:53.705696Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:53.705737Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:40:53.705775Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2026-01-07T17:40:53.705817Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2026-01-07T17:40:53.707072Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:53.707178Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:53.707210Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:40:53.707243Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2026-01-07T17:40:53.707281Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-07T17:40:53.708350Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:53.708435Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:53.708469Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:40:53.708500Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-07T17:40:53.708532Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-07T17:40:53.710081Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:53.710151Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:53.710178Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:40:53.710202Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2026-01-07T17:40:53.710229Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2026-01-07T17:40:53.710284Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2026-01-07T17:40:53.713162Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:40:53.713305Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:40:53.715277Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:40:53.715395Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2026-01-07T17:40:53.716617Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2026-01-07T17:40:53.716655Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2026-01-07T17:40:53.718412Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2026-01-07T17:40:53.718543Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-07T17:40:53.718590Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:5322:6873] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-07T17:40:53.720097Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-07T17:40:53.720148Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-07T17:40:53.720241Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-07T17:40:53.720270Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-07T17:40:53.720337Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-07T17:40:53.720364Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-07T17:40:53.720425Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-07T17:40:53.720450Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-07T17:40:53.720509Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-07T17:40:53.720535Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-07T17:40:53.722171Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-07T17:40:53.722334Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-07T17:40:53.722370Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:5325:6876] 2026-01-07T17:40:53.722762Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-07T17:40:53.722994Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-07T17:40:53.723027Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:5325:6876] 2026-01-07T17:40:53.723110Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-07T17:40:53.723293Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-07T17:40:53.723361Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-07T17:40:53.723389Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:5325:6876] 2026-01-07T17:40:53.723548Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-07T17:40:53.723607Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-07T17:40:53.723633Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:5325:6876] 2026-01-07T17:40:53.723812Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-07T17:40:53.723845Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:5325:6876] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] Test command err: 2026-01-07T17:37:09.006213Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675091326959206:2189];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:09.006398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:09.049607Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:09.233106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:09.258009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:09.258142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:09.266309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:09.386426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:09.501445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:09.515743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003cb5/r3tmp/yandexuWtyHO.tmp 2026-01-07T17:37:09.515764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003cb5/r3tmp/yandexuWtyHO.tmp 2026-01-07T17:37:09.515928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003cb5/r3tmp/yandexuWtyHO.tmp 2026-01-07T17:37:09.516002Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:09.549038Z INFO: TTestServer started on Port 24238 GrpcPort 13372 PQClient connected to localhost:13372 2026-01-07T17:37:09.795209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:09.805656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:37:09.815844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:37:09.882969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:10.021073Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:13.471563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675112801796561:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.471772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.472028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675112801796574:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.472128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675112801796575:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.472463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.477240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:13.492019Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675112801796578:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:13.567665Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675112801796642:2646] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:13.919837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:13.921544Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675112801796650:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:13.922954Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MmZjNDlhNjAtNGEyOGUyNWItZDQ0YjQyNzgtMTU5MjVkMzM=, ActorId: [1:7592675112801796559:2331], ActorState: ExecuteState, LegacyTraceId: 01kecrheqx92tvng150drjbr61, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:13.929142Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:13.986367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:14.008869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675091326959206:2189];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:14.008995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:37:14.104446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675117096764223:2825] === CheckClustersList. Ok 2026-01-07T17:37:19.510191Z :WriteToTopic_Demo_41_Table INFO: TTopicSdkTestSetup started 2026-01-07T17:37:19.535473Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:19.558477Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592675138571600893:2916] connected; active server actors: 1 2026-01-07T17:37:19.558725Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:37:19.565726Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:37:19.583042Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T17:37:19.584325Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:37:19.585950Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:37:19.586263Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T17:37:19.586306Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:19.586351Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx w ... e 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:49.984594Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:49.984611Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:49.984630Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:49.984646Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.019386Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.019423Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.019439Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.019460Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.019475Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.065004Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:474: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 checking auth because of timeout 2026-01-07T17:40:50.065076Z node 13 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 auth for : test-consumer 2026-01-07T17:40:50.065865Z node 13 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:142: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 Handle describe topics response 2026-01-07T17:40:50.065984Z node 13 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 auth is DEAD 2026-01-07T17:40:50.066063Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:313: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 auth ok: topics# 1, initDone# 1 2026-01-07T17:40:50.076347Z :INFO: [/Root] [/Root] [e198d876-2ef48481-f8343b91-80bf9960] Closing read session. Close timeout: 0.000000s 2026-01-07T17:40:50.076424Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:319:320 2026-01-07T17:40:50.076480Z :INFO: [/Root] [/Root] [e198d876-2ef48481-f8343b91-80bf9960] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2032 BytesRead: 20480000 MessagesRead: 320 BytesReadCompressed: 20480000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:40:50.076593Z :NOTICE: [/Root] [/Root] [e198d876-2ef48481-f8343b91-80bf9960] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:40:50.076649Z :DEBUG: [/Root] [/Root] [e198d876-2ef48481-f8343b91-80bf9960] [] Abort session to cluster 2026-01-07T17:40:50.077249Z :DEBUG: [/Root] 0x00007D776C91C590 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_15496432466742017312_v1 Close 2026-01-07T17:40:50.077400Z :DEBUG: [/Root] 0x00007D776C91C590 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_15496432466742017312_v1 Close 2026-01-07T17:40:50.077524Z :NOTICE: [/Root] [/Root] [e198d876-2ef48481-f8343b91-80bf9960] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:40:50.079312Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:50.079347Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 grpc read failed 2026-01-07T17:40:50.079381Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 grpc closed 2026-01-07T17:40:50.079421Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 is DEAD 2026-01-07T17:40:50.080425Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7592676034662091816:2521] disconnected. 2026-01-07T17:40:50.080468Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7592676034662091816:2521] disconnected; active server actors: 1 2026-01-07T17:40:50.080488Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7592676034662091816:2521] client test-consumer disconnected session test-consumer_13_1_15496432466742017312_v1 2026-01-07T17:40:50.080585Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_15496432466742017312_v1 2026-01-07T17:40:50.080620Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7592676034662091819:2524] destroyed 2026-01-07T17:40:50.080680Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:399: Direct read cache: DestroyPartitionSession, sessionId: test-consumer_13_1_15496432466742017312_v1, proxy: [13:7592676034662091825:2526] 2026-01-07T17:40:50.080711Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_15496432466742017312_v1 2026-01-07T17:40:50.080739Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:489: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 got EvDirectReadDestroyPartitionSession, assignId: 1 2026-01-07T17:40:50.082475Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7592676034662091825:2526]: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 grpc read done: success# 0, data# { } 2026-01-07T17:40:50.082502Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7592676034662091825:2526]: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1grpc read failed 2026-01-07T17:40:50.082526Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7592676034662091825:2526]: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 grpc closed 2026-01-07T17:40:50.082548Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7592676034662091825:2526]: session cookie 2 consumer test-consumer session test-consumer_13_1_15496432466742017312_v1 proxy is DEAD 2026-01-07T17:40:50.086250Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.086282Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.086297Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.086318Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.086334Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.120160Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.120200Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.120216Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.120234Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.120249Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.189071Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.189111Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.189126Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.189146Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.189161Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.220632Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.220671Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.220687Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.220706Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.220722Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.289987Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.290040Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.290057Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.290078Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.290095Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:50.321226Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:50.321270Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.321293Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:50.321316Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:50.321333Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless |88.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative >> TargetDiscoverer::Transfer >> TargetDiscoverer::IndexedTable >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> KikimrIcGateway::TestDropStreamingQuery [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless [GOOD] >> TargetDiscoverer::RetryableError >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |88.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |88.4%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:52.889330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:52.889413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:52.889451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:52.889484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:52.889535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:52.889559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:52.889617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:52.889699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:52.890516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:52.890781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:53.014036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:53.014110Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:53.014891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:53.026351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:53.027059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:53.027249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:53.037378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:53.037630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:53.038199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:53.038463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:53.042426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:53.042573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:53.043318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:53.043364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:53.043498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:53.043535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:53.043567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:53.043684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.049969Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:53.181911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:53.182156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.182353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:53.182413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:53.182630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:53.182691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:53.184842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:53.185034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:53.185234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.185326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:53.185363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:53.185395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:53.188452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.188515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:53.188550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:53.193706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.193765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:53.193840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:53.193899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:53.196411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:53.197793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:53.197978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:53.198925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:53.199042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:53.199104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:53.199336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:53.199385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:53.199565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:53.199672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:53.201828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-07T17:40:54.603673Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2026-01-07T17:40:54.603708Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2026-01-07T17:40:54.603740Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2026-01-07T17:40:54.603778Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2026-01-07T17:40:54.603802Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2026-01-07T17:40:54.606924Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:54.607049Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:54.607087Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:40:54.607128Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2026-01-07T17:40:54.607173Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2026-01-07T17:40:54.608546Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:54.608647Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:54.608687Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:40:54.608728Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2026-01-07T17:40:54.608769Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-07T17:40:54.611125Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:54.611230Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:54.611268Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:40:54.611303Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-07T17:40:54.611340Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-07T17:40:54.611943Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:54.612059Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:40:54.612120Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:40:54.612157Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2026-01-07T17:40:54.612194Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 2 2026-01-07T17:40:54.612274Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2026-01-07T17:40:54.616061Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:40:54.616279Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:40:54.616400Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:40:54.618252Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2026-01-07T17:40:54.619833Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2026-01-07T17:40:54.619888Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2026-01-07T17:40:54.621788Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2026-01-07T17:40:54.621909Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-07T17:40:54.621954Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:2936:4924] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-07T17:40:54.623411Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-07T17:40:54.623466Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-07T17:40:54.623584Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-07T17:40:54.623617Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-07T17:40:54.623685Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-07T17:40:54.623713Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-07T17:40:54.623777Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-07T17:40:54.623806Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-07T17:40:54.623870Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-07T17:40:54.623901Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-07T17:40:54.625961Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-07T17:40:54.626143Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-07T17:40:54.626187Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:2939:4927] 2026-01-07T17:40:54.626618Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-07T17:40:54.626890Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-07T17:40:54.626932Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:2939:4927] 2026-01-07T17:40:54.627159Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-07T17:40:54.627242Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-07T17:40:54.627319Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-07T17:40:54.627348Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:2939:4927] 2026-01-07T17:40:54.627427Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-07T17:40:54.627529Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-07T17:40:54.627558Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:2939:4927] 2026-01-07T17:40:54.627631Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-07T17:40:54.627660Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:2939:4927] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 |88.4%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TargetDiscoverer::SystemObjects |88.4%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TargetDiscoverer::InvalidCredentials >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 28087, MsgBus: 2615 2026-01-07T17:40:40.105782Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676000516815130:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:40.106230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:40.296787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:40.304264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:40.304354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:40.370187Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:40.371367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:40.372335Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676000516815025:2081] 1767807640100694 != 1767807640100697 2026-01-07T17:40:40.473384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:40.473413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:40.473424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:40.473531Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:40.537936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:40.851194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:41.110362Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:43.164181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676013401717845:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.164334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.164691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676013401717855:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.164763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.409993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:43.552331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:43.577425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:43.605010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:43.641621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676013401718159:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.641695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.641760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676013401718164:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.641931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676013401718166:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.641979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:43.645549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:43.655984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676013401718167:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-07T17:40:43.780521Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676013401718219:2765] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15745, MsgBus: 21257 2026-01-07T17:40:44.747233Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676017591608398:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:44.747294Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:44.761028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:44.846122Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:44.848265Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676017591608371:2081] 1767807644746142 != 1767807644746145 2026-01-07T17:40:44.862195Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:44.862274Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:44.882101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:44.943448Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:44.943471Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:44.943479Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:44.943586Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:44.975432Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:45.352713Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:45.360606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:40:45.389132Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:40:45.417859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-07T17:40:45.752673Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:48.207228Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676034771478482:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:48.207335Z node 2 ... CE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592676055337849219:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.669045Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.672996Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:53.684761Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592676055337849221:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:40:53.751366Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592676055337849272:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:54.607035Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592676038157977309:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:54.607157Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:40:55.248234Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-07T17:40:55.281118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-07T17:40:55.326049Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 8859, MsgBus: 16298 2026-01-07T17:40:56.325499Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676070117507139:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:56.325580Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:56.339198Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:56.413067Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:56.413150Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:56.425384Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592676070117507111:2081] 1767807656324526 != 1767807656324529 2026-01-07T17:40:56.439365Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:56.445113Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:56.493001Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:56.493025Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:56.493031Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:56.493099Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:56.511416Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:56.971988Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:57.332629Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:57.333659Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:59.511164Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676083002410004:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:59.511164Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676083002409992:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:59.511222Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676083002410006:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:59.511242Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:59.511492Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676083002410014:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:59.511538Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:59.514022Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:59.516334Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676083002410016:2573] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:40:59.523404Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592676083002410012:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:40:59.523416Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592676083002410013:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:40:59.578845Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676083002410063:2604] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:59.597968Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676083002410081:2612] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:00.066113Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:41:00.452877Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:00.558527Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:41:01.328885Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676070117507139:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:01.328995Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TargetDiscoverer::Basic >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser >> TargetDiscoverer::Dirs |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |88.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> TargetDiscoverer::Negative [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TargetDiscoverer::Transfer [GOOD] >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats >> XdsBootstrapConfigInitializer::CanNotSetGrpcXdsBootstrapConfigEnvIfVariableAlreadySet [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers >> GroupWriteTest::Simple [GOOD] >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] >> TargetDiscoverer::IndexedTable [GOOD] >> AnalyzeColumnshard::AnalyzeServerless |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |88.4%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanNotSetGrpcXdsBootstrapConfigEnvIfVariableAlreadySet [GOOD] |88.4%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> TargetDiscoverer::RetryableError [GOOD] |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |88.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> AnalyzeColumnshard::Analyze ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2026-01-07T17:41:01.919815Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676089798389154:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:01.919874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:02.329032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:02.437059Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676089798389002:2081] 1767807661910781 != 1767807661910784 2026-01-07T17:41:02.457630Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:02.463535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:02.463625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:02.468913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:02.540943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:02.833757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:02.833786Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:02.833794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:02.833870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:02.924815Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:03.128974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:03.145423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:03.205943Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2026-01-07T17:41:03.205995Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 8152420317915576012 2026-01-07T17:40:50.538157Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-07T17:40:50.562112Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-07T17:40:50.562183Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-07T17:40:50.564633Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-07T17:40:50.579414Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:50.582039Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-07T17:41:06.034021Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-07T17:41:06.034121Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:41:06.078816Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] |88.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |88.4%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2026-01-07T17:41:01.908778Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676090808137346:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:01.909592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:01.963695Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:02.347980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:02.358612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:02.358735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:02.459155Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:02.474620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:02.548297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:02.813666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:02.813729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:02.813737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:02.813846Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:02.921806Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:03.061150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:03.070367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:41:03.245560Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1767807663216, tx_id: 281474976715658 } } } 2026-01-07T17:41:03.245605Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2026-01-07T17:41:03.306173Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:168: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:03.306204Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:183: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2026-01-07T17:41:03.306240Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:193: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2026-01-07T17:41:01.903022Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676093214972918:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:01.903085Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:02.325349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:02.325501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:02.363691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:02.376165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:02.445771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:02.449204Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676093214972893:2081] 1767807661899566 != 1767807661899569 2026-01-07T17:41:02.512059Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.015020s 2026-01-07T17:41:02.591525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:02.764536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:02.764560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:02.764566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:02.764647Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:02.926932Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:03.083846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:03.091644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:41:03.095560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:03.655165Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807663132, tx_id: 1 } } } 2026-01-07T17:41:03.655194Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-07T17:41:03.681145Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807662439, tx_id: 281474976710657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807663545, tx_id: 281474976715658 } }] } } 2026-01-07T17:41:03.681201Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-07T17:41:05.552454Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807663545, tx_id: 281474976715658 } } } 2026-01-07T17:41:05.552482Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-07T17:41:05.552535Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2026-01-07T17:41:05.552627Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:142: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable >> TraverseColumnShard::TraverseColumnTable >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query [GOOD] |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::RetryableError [GOOD] Test command err: 2026-01-07T17:41:03.402621Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676100384648301:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:03.402789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:03.432559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:03.693057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:03.710735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:03.710855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:03.771931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:03.812212Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:03.815544Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676100384648274:2081] 1767807663396403 != 1767807663396406 2026-01-07T17:41:03.908782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:04.420634Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:04.445182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:04.445202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:04.445208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:04.448628Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:04.702031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:04.709159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:04.710084Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-0 } } } 2026-01-07T17:41:04.710127Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-0 }, iteration# 0 2026-01-07T17:41:04.710197Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-1 } } } 2026-01-07T17:41:04.710209Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-1 }, iteration# 1 2026-01-07T17:41:04.710235Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-2 } } } 2026-01-07T17:41:04.710264Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-2 }, iteration# 2 2026-01-07T17:41:04.710297Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-3 } } } 2026-01-07T17:41:04.710306Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-3 }, iteration# 3 2026-01-07T17:41:04.710347Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-4 } } } 2026-01-07T17:41:04.710366Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-4 }, iteration# 4 2026-01-07T17:41:04.710399Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-5 } } } 2026-01-07T17:41:04.710412Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-5 }, iteration# 5 |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TargetDiscoverer::SystemObjects [GOOD] |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> TargetDiscoverer::Basic [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2026-01-07T17:41:04.497750Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676104276684969:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:04.500081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:04.825014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:04.920591Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676104276684932:2081] 1767807664485216 != 1767807664485219 2026-01-07T17:41:04.929998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:04.930137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:04.935252Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:04.938973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:05.033771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:05.383477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:05.383503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:05.383511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:05.383601Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:05.521098Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:05.703614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:05.716726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:05.976994Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Cannot find user: user } } } 2026-01-07T17:41:05.977044Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Cannot find user: user }, iteration# 0 |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TargetDiscoverer::Dirs [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2026-01-07T17:41:04.497769Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676104667662049:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:04.497979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:04.792236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:04.803276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:04.803383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:04.818024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:04.920660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:05.015809Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:05.417662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:05.417684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:05.417795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:05.417902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:05.497180Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:05.717121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:05.724318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:05.731665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:05.929399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-07T17:41:05.948738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.071577Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807665764, tx_id: 1 } } } 2026-01-07T17:41:06.071622Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-07T17:41:06.089842Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807664938, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807665876, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807665960, tx_id: 281474976710659 } }] } } 2026-01-07T17:41:06.089872Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-07T17:41:08.112995Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807665876, tx_id: 281474976710658 } } } 2026-01-07T17:41:08.113032Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-07T17:41:08.113084Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TraverseDatashard::TraverseOneTable >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2026-01-07T17:41:04.982682Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676103973057817:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:04.984336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:05.245723Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:05.331221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:05.332238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:05.332327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:05.336137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:05.533011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:05.591515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:05.591543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:05.591550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:05.591649Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:05.847518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:05.857429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:05.989320Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:06.029096Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807665897, tx_id: 1 } } } 2026-01-07T17:41:06.029131Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-07T17:41:06.041010Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807665379, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807665967, tx_id: 281474976710658 } }] } } 2026-01-07T17:41:06.041041Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-07T17:41:08.580662Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807665967, tx_id: 281474976710658 } } } 2026-01-07T17:41:08.580688Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-07T17:41:08.580751Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] >> TraverseDatashard::TraverseTwoTables |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |88.5%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] Leader for TabletID 72057594046678944 is [1:243:2156] sender: [1:244:2060] recipient: [1:226:2145] 2026-01-07T17:40:17.698281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:40:17.698381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:40:17.698420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:40:17.698460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:40:17.698496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:40:17.698524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:40:17.698626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:40:17.698717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:40:17.699588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:40:17.700015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:40:17.775995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:40:17.776057Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:17.793063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:40:17.793477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:40:17.793670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:40:17.808035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:40:17.808336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:40:17.809039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:40:17.809784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:40:17.812141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:40:17.812342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:40:17.813512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:40:17.813571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:40:17.813739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:40:17.813784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:40:17.813826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:40:17.813946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:40:17.950891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.954485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.954671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.954762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.954829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.954891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.954982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.955072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.955138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.955218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.955298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.955375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.955468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.955542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:17.955626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rts: 2/3 2026-01-07T17:41:09.343430Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:2 progress is 2/3 2026-01-07T17:41:09.343467Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2026-01-07T17:41:09.343510Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-07T17:41:09.343836Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:41:09.343877Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:41:09.343923Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2026-01-07T17:41:09.344003Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1240:3020] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2026-01-07T17:41:09.344188Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [7:244:2156], Recipient [7:1240:3020]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2026-01-07T17:41:09.344227Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-07T17:41:09.344260Z node 7 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 104 datashard 72075186233409549 state Ready 2026-01-07T17:41:09.344310Z node 7 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186233409549 Got TEvSchemaChangedResult from SS at 72075186233409549 2026-01-07T17:41:09.344505Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:41:09.344544Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:41:09.344612Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:41:09.344660Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:41:09.344732Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:41:09.344765Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-07T17:41:09.344796Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:41:09.344835Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-07T17:41:09.344865Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:41:09.344901Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-07T17:41:09.344997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:992:2809] message: TxId: 104 2026-01-07T17:41:09.345075Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:41:09.345134Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:41:09.345177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:41:09.345374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2026-01-07T17:41:09.345423Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-07T17:41:09.345450Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-07T17:41:09.345489Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-07T17:41:09.345515Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-07T17:41:09.345539Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-07T17:41:09.345608Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2026-01-07T17:41:09.352081Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:41:09.352263Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:41:09.352380Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:992:2809] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2026-01-07T17:41:09.352563Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:41:09.352636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1293:3059] 2026-01-07T17:41:09.352882Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1295:3061], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:09.352944Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:09.352978Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-07T17:41:09.361662Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:966:2104], Recipient [7:244:2156] 2026-01-07T17:41:09.361758Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:41:09.365220Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:41:09.365785Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:41:09.365847Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:41:09.366107Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:41:09.369970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:09.370288Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2026-01-07T17:41:09.370361Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:41:09.370821Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:41:09.370872Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:41:09.371356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1369:3135], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:09.371411Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:09.371453Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:09.371612Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:992:2809], Recipient [7:244:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2026-01-07T17:41:09.371655Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:41:09.371749Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:41:09.371863Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:41:09.371904Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1367:3133] 2026-01-07T17:41:09.372159Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1369:3135], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:09.372203Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:09.372249Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2026-01-07T17:41:05.583589Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676107310286317:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:05.583639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:05.845009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:05.857476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:05.857650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:05.919901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:05.978825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:05.982377Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676107310286103:2081] 1767807665558555 != 1767807665558558 2026-01-07T17:41:06.035725Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:06.297329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:06.297351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:06.297363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:06.297473Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:06.589639Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:06.713577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:06.720912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:06.740091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:41:06.765686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:07.062549Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807666765, tx_id: 1 } } } 2026-01-07T17:41:07.062612Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-07T17:41:07.084157Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807665981, tx_id: 281474976715657 } }, { name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767807666779, tx_id: 281474976710658 } }] } } 2026-01-07T17:41:07.084183Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-07T17:41:07.102854Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807666933, tx_id: 281474976710659 } }] } } 2026-01-07T17:41:07.102879Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2026-01-07T17:41:09.293964Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807666933, tx_id: 281474976710659 } } } 2026-01-07T17:41:09.293994Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2026-01-07T17:41:09.294043Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D4F36CABD00 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:06.198373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:06.198475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:06.198516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:06.198582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:06.198633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:06.198682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:06.198763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:06.198856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:06.199709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:06.200013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:06.286359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:06.286425Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:06.300402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:06.300795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:06.301002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:06.308804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:06.309187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:06.309909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:06.310197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:06.313320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:06.313525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:06.314687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:06.314763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:06.314883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:06.314931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:06.315029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:06.315228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:06.439577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.442622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.442768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.442823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.442875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.442922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.442960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.443010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.443068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.443105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.443137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.443190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.443243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.443323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:06.443364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 Leader for TabletID 72057594046678944 is [1:898:2846] sender: [1:971:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 2026-01-07T17:41:10.830314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 2026-01-07T17:41:10.841547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 0 2026-01-07T17:41:10.841668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-07T17:41:10.841738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:41:10.841933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-07T17:41:10.841998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-07T17:41:10.842030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:41:10.842107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D4F36D5C100, stats written 2 2026-01-07T17:41:10.842667Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:10.850217Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 224us result status StatusSuccess 2026-01-07T17:41:10.850681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.5%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysViewTest::ReadOnlyMode |88.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 5961, MsgBus: 29486 2026-01-07T17:40:31.425276Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675964630609029:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:31.425346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:31.848565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:31.856375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:31.856479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:31.946774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:31.951168Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675964630609001:2081] 1767807631423651 != 1767807631423654 2026-01-07T17:40:31.972884Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:32.107662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:32.107708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:32.107735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:32.107836Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:32.137016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:32.460795Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:32.523708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:32.583300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:32.741600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:32.972262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:33.072283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:34.972291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675977515512772:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:34.972416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:34.972777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675977515512782:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:34.972841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:35.332047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:35.359286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:35.394376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:35.428623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:35.458588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:35.496882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:35.537508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:35.578332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:35.661293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675981810480952:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:35.661386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675981810480957:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:35.661387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:35.661523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675981810480959:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:35.661555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:35.664905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:35.674846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675981810480960:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:40:35.748617Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675981810481012:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:36.425733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675964630609029:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:36.425837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:40:37.129192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... ileState: Unknown -> Disconnected 2026-01-07T17:41:01.599675Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:01.604863Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:01.668166Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:01.668191Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:01.668200Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:01.668283Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:01.741874Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:02.246330Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:02.254744Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:02.275639Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:02.357595Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:02.361336Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:02.537266Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:02.640711Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:05.990919Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676106854117712:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:05.991028Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:05.991655Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676106854117722:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:05.991723Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:06.082731Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.128564Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.182038Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.233376Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.277245Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.389236Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.391448Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676089674246719:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:06.391511Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:06.448558Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.515103Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:06.619154Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676111149085892:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:06.619318Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:06.619575Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676111149085898:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:06.619628Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676111149085897:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:06.619642Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:06.623662Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:06.636877Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592676111149085901:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:41:06.692392Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676111149085952:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:09.161114Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-07T17:41:09.169674Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2026-01-07T17:41:09.214665Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-07T17:41:09.359464Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> StreamCreator::WithResolvedTimestamps >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> TExtSubDomainTest::GenericCases >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |88.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:13.063212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:13.063318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:13.063367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:13.063402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:13.063437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:13.063465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:13.063516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:13.063585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:13.064389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:13.064670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:13.146836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:13.146891Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:13.160819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:13.161191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:13.161406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:13.167400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:13.167691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:13.168373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:13.168629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:13.170982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:13.171168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:13.172301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:13.172353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:13.172463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:13.172509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:13.172595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:13.172767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:13.311620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.312510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.312644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.312736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.312808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.312866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.312921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.313053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.313132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.313215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.313271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.313335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.313406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.313499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:13.313578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... el: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:41:14.566927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-07T17:41:14.567172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2026-01-07T17:41:14.568045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:14.568196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:14.568252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-07T17:41:14.568429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:41:14.568649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-07T17:41:14.568755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:41:14.572947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:14.573021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:41:14.573390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:41:14.573550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:14.573602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:818:2775], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-07T17:41:14.573655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:818:2775], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:41:14.574058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:41:14.574122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:41:14.574241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:41:14.574279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:14.574324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:41:14.574378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:14.574423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:41:14.574467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:14.574514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:41:14.574548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:41:14.574649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:41:14.574721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:41:14.574764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2026-01-07T17:41:14.574797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:41:14.575704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:14.575834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:14.575901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:41:14.575948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2026-01-07T17:41:14.576009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-07T17:41:14.577790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:14.577915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:14.577960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:41:14.577993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:41:14.578030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:41:14.578123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:41:14.583644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:41:14.584190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:41:14.584494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:41:14.584548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:41:14.585053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:41:14.585195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:41:14.585246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:849:2804] TestWaitNotification: OK eventTxId 102 2026-01-07T17:41:14.585805Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:14.586061Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 296us result status StatusSuccess 2026-01-07T17:41:14.586508Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:14.324317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:14.324447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:14.324489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:14.324529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:14.324570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:14.324600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:14.324651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:14.324716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:14.325608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:14.325940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:14.458885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:14.458950Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:14.488064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:14.488434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:14.488622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:14.511547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:14.511844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:14.512480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:14.512795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:14.516053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:14.516278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:14.517554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:14.517617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:14.517730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:14.517783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:14.517879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:14.518171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:14.751661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.752712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.752844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.752956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:14.753838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ished: true CreateTxId: 281474976710675 CreateStep: 5000031 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_hour" PathId: 36 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710691 CreateStep: 5000018 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_minute" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710664 CreateStep: 5000007 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_hour" PathId: 14 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710669 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_minute" PathId: 28 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710683 CreateStep: 5000025 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_hour" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710692 CreateStep: 5000016 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_minute" PathId: 25 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710680 CreateStep: 5000030 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_hour" PathId: 15 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710670 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_minute" PathId: 16 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710671 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_hour" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710665 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_minute" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710684 CreateStep: 5000023 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:15.515209Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:41:15.519785Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-07T17:41:15.520654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:15.521003Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 319us result status StatusSuccess 2026-01-07T17:41:15.521343Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:15.521825Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:15.522017Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 192us result status StatusSuccess 2026-01-07T17:41:15.522297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710662 CreateStep: 5000011 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:15.522780Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/query_metrics_one_minute" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:15.522979Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/query_metrics_one_minute" took 208us result status StatusSuccess 2026-01-07T17:41:15.523259Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/query_metrics_one_minute" PathDescription { Self { Name: "query_metrics_one_minute" PathId: 34 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710689 CreateStep: 5000022 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "query_metrics_one_minute" Type: EQueryMetricsOneMinute SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 34 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets >> TOlap::StoreStatsQuota [GOOD] >> TOlapNaming::AlterColumnStoreFailed >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] Test command err: 2026-01-07T17:37:18.892463Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675134956887434:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:18.892851Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:18.931879Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:37:18.932120Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:19.377004Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:19.402145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:19.402244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:19.536688Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:19.545682Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675134956887300:2081] 1767807438885066 != 1767807438885069 2026-01-07T17:37:19.550541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:19.673049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:19.845461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003c9b/r3tmp/yandexJasSin.tmp 2026-01-07T17:37:19.845496Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003c9b/r3tmp/yandexJasSin.tmp 2026-01-07T17:37:19.845689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003c9b/r3tmp/yandexJasSin.tmp 2026-01-07T17:37:19.845772Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:19.916214Z INFO: TTestServer started on Port 16815 GrpcPort 14022 2026-01-07T17:37:19.913457Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:14022 2026-01-07T17:37:20.273146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:20.285803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:37:20.306017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:37:20.443446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:20.646385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:37:20.660501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-07T17:37:22.990930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675152136757524:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.991057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.993783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675152136757537:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.993897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675152136757538:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.994037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.998031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:23.016431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675152136757541:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:37:23.271322Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675156431724901:2648] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:23.300547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:23.373491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:23.457895Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675156431724909:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:23.458362Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OTU0ODg2NjAtZDBmNWYyMmUtNTNkODk1ZDUtMTA4MGE2NTI=, ActorId: [1:7592675152136757522:2331], ActorState: ExecuteState, LegacyTraceId: 01kecrhr0z736va6pkbyzzhdh8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:23.461691Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:23.519377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-07T17:37:23.893160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675134956887434:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:23.899710Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7592675156431725197:2829] === CheckClustersList. Ok 2026-01-07T17:37:28.987921Z :WriteToTopic_Demo_23_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-07T17:37:29.004488Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:29.022675Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592675182201529168:2924] connected; active server actors: 1 2026-01-07T17:37:29.022871Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:37:29.043477Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:37:29.043621Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: ... tx events 2026-01-07T17:41:13.729701Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 100000 offset 79 count 1 size 1000243 actorID [13:7592676122208441328:2469] 2026-01-07T17:41:13.729773Z node 13 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:41:13.729834Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 1000117 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:41:13.729874Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] TPartition::ReplyWrite. Partition: {0, {13, 281474976710674}, 100000} 2026-01-07T17:41:13.729933Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {13, 281474976710674}, 100000}, SeqNo: 80, partNo: 0, Offset: 79 is stored on disk 2026-01-07T17:41:13.729967Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] TPartition::ReplyWrite. Partition: {0, {13, 281474976710674}, 100000} 2026-01-07T17:41:13.730000Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {13, 281474976710674}, 100000}, SeqNo: 80, partNo: 1, Offset: 79 is stored on disk 2026-01-07T17:41:13.730271Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037894' partition 100000 offset 79 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-07T17:41:13.734492Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:41:13.734535Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.734550Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:13.734570Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.734584Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Try persist 2026-01-07T17:41:13.734615Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] No data for blobs compaction 2026-01-07T17:41:13.734696Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic_A' partition: 0 messageNo: 159 requestId: cookie: 80 2026-01-07T17:41:13.735720Z node 13 :PERSQUEUE DEBUG: partition.cpp:4395: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-07T17:41:13.736004Z node 13 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:41:13.736035Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2026-01-07T17:41:13.742902Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 63 count 8 actorID [13:7592676122208441328:2469] 2026-01-07T17:41:13.742935Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 71 count 5 actorID [13:7592676122208441328:2469] 2026-01-07T17:41:13.742952Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 76 count 1 actorID [13:7592676122208441328:2469] 2026-01-07T17:41:13.742965Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 77 count 1 actorID [13:7592676122208441328:2469] 2026-01-07T17:41:13.742981Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 78 count 1 actorID [13:7592676122208441328:2469] 2026-01-07T17:41:13.742993Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 79 count 1 actorID [13:7592676122208441328:2469] 2026-01-07T17:41:13.743073Z node 13 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:41:13.743238Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] No data for blobs compaction 2026-01-07T17:41:13.743264Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:41:13.743284Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.743298Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:13.743317Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.743332Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Try persist 2026-01-07T17:41:13.743355Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] No data for blobs compaction 2026-01-07T17:41:13.743734Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 63 partno 0 count 8 parts 8 suffix '0' size 8001772 2026-01-07T17:41:13.743772Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 71 partno 0 count 5 parts 5 suffix '124' size 5001099 2026-01-07T17:41:13.743802Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 76 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-07T17:41:13.743831Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 77 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-07T17:41:13.743860Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 78 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-07T17:41:13.743915Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 79 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-07T17:41:13.766799Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72075186224037894] DeletePartition {0, {13, 281474976710674}, 100000} 2026-01-07T17:41:13.766885Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:41:13.770153Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:41:13.770375Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:13.770417Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.770441Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:13.770485Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.770513Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:13.823505Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:13.823546Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.823560Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:13.823583Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.823596Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:41:13.873045Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:13.873086Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.873099Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:13.873120Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.873132Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:13.895340Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:41:13.923808Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:13.923855Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.923870Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:13.923903Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.923916Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:41:13.977098Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:13.977155Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.977171Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:13.977198Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:13.977212Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin >> StreamCreator::WithResolvedTimestamps [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> TNetClassifierTest::TestInitFromRemoteSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false [GOOD] Test command err: 2026-01-07T17:41:14.437622Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676148216992111:2184];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:14.437767Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:14.511449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:14.997037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:15.016259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:15.016377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:15.021880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:15.117045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:15.118570Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676148216991965:2081] 1767807674411193 != 1767807674411196 2026-01-07T17:41:15.181462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:15.329162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676148216992228:2106] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:15.329218Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676152511959830:2268] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:15.329332Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676148216992254:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:15.329435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676148216992413:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676148216992254:2119], cookie# 1 2026-01-07T17:41:15.331121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676148216992457:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992454:2206], cookie# 1 2026-01-07T17:41:15.331186Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676148216992458:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992455:2206], cookie# 1 2026-01-07T17:41:15.331223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676148216992459:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992456:2206], cookie# 1 2026-01-07T17:41:15.331268Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676148216991933:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992457:2206], cookie# 1 2026-01-07T17:41:15.331303Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676148216991939:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992459:2206], cookie# 1 2026-01-07T17:41:15.331343Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676148216991936:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992458:2206], cookie# 1 2026-01-07T17:41:15.331377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676148216992457:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676148216991933:2049], cookie# 1 2026-01-07T17:41:15.331435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676148216992459:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676148216991939:2055], cookie# 1 2026-01-07T17:41:15.331456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676148216992458:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676148216991936:2052], cookie# 1 2026-01-07T17:41:15.331530Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676148216992413:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676148216992454:2206], cookie# 1 2026-01-07T17:41:15.331561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676148216992413:2206][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:15.331580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676148216992413:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676148216992456:2206], cookie# 1 2026-01-07T17:41:15.331606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676148216992413:2206][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:15.331648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676148216992413:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676148216992455:2206], cookie# 1 2026-01-07T17:41:15.331660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676148216992413:2206][/dc-1] Sync cookie mismatch: sender# [1:7592676148216992455:2206], cookie# 1, current cookie# 0 2026-01-07T17:41:15.331738Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676148216992254:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:15.346493Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676148216992254:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676148216992413:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:15.346675Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676148216992254:2119], cacheItem# { Subscriber: { Subscriber: [1:7592676148216992413:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:15.350322Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676152511959831:2269], recipient# [1:7592676152511959830:2268], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:15.350398Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676152511959830:2268] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:15.409986Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676152511959830:2268] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:15.420874Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676152511959830:2268] Handle TEvDescribeSchemeResult Forward to# [1:7592676152511959829:2267] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:15.429234Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676148216992228:2106] Handle TEvProposeTransaction 2026-01-07T17:41:15.429264Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592676148216992228:2106] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:41:15.429321Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592676148216992228:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592676152511959836:2273] 2026-01-07T17:41:15.437128Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected eve ... fy# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-01-07T17:41:15.658595Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676148216992254:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676152511959887:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767807675683 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:15.658696Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676148216992254:2119], cacheItem# { Subscriber: { Subscriber: [1:7592676152511959887:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767807675683 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:15.658900Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676152511959894:2311], recipient# [1:7592676152511959886:2309], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:15.658949Z node 1 :TX_PROXY INFO: describe.cpp:354: Actor# [1:7592676152511959886:2309] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-07T17:41:15.681525Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676148216992228:2106] Handle TEvNavigate describe path /dc-1 2026-01-07T17:41:15.681576Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676152511959896:2313] HANDLE EvNavigateScheme /dc-1 2026-01-07T17:41:15.681675Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676148216992254:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:15.681755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676148216992413:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676148216992254:2119], cookie# 4 2026-01-07T17:41:15.681812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676148216992457:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992454:2206], cookie# 4 2026-01-07T17:41:15.681827Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676148216992458:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992455:2206], cookie# 4 2026-01-07T17:41:15.681859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676148216992459:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992456:2206], cookie# 4 2026-01-07T17:41:15.681888Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676148216991939:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992459:2206], cookie# 4 2026-01-07T17:41:15.681960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676148216992459:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676148216991939:2055], cookie# 4 2026-01-07T17:41:15.681985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676148216992413:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676148216992456:2206], cookie# 4 2026-01-07T17:41:15.682003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676148216992413:2206][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:15.682021Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676148216991933:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992457:2206], cookie# 4 2026-01-07T17:41:15.682040Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676148216991936:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676148216992458:2206], cookie# 4 2026-01-07T17:41:15.682065Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676148216992457:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676148216991933:2049], cookie# 4 2026-01-07T17:41:15.682089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676148216992458:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676148216991936:2052], cookie# 4 2026-01-07T17:41:15.682110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676148216992413:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676148216992454:2206], cookie# 4 2026-01-07T17:41:15.682129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676148216992413:2206][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:15.682169Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676148216992413:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676148216992455:2206], cookie# 4 2026-01-07T17:41:15.682194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676148216992413:2206][/dc-1] Sync cookie mismatch: sender# [1:7592676148216992455:2206], cookie# 4, current cookie# 0 2026-01-07T17:41:15.682249Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676148216992254:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:15.682309Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676148216992254:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676148216992413:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807675641 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:15.682386Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676148216992254:2119], cacheItem# { Subscriber: { Subscriber: [1:7592676148216992413:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807675641 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-07T17:41:15.682506Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676152511959897:2314], recipient# [1:7592676152511959896:2313], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:15.682532Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676152511959896:2313] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:15.682580Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676152511959896:2313] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:15.683573Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676152511959896:2313] Handle TEvDescribeSchemeResult Forward to# [1:7592676152511959895:2312] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767807675641 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] |88.6%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2026-01-07T17:41:13.484725Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676141637212193:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:13.484795Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:13.548468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:14.011946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:14.012093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:14.025648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:14.033091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:14.089072Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676141637212153:2081] 1767807673419807 != 1767807673419810 2026-01-07T17:41:14.104466Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:14.193088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:14.585414Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:14.624196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:14.624219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:14.624228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:14.624324Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:15.135382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:15.145816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:15.153712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:15.274974Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:41:15.275005Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:41:15.275686Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:41:18.223725Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807675263, tx_id: 281474976710658 } } } 2026-01-07T17:41:18.224357Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:41:18.226185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:18.227970Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-07T17:41:18.227985Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-07T17:41:18.261366Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-07T17:41:18.261396Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:41:18.262169Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-07T17:41:18.487226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676141637212193:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:18.487277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:18.542493Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:18.542513Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-07T17:41:18.560254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:41:18.589118Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:18.589146Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> BasicUsage::WriteSessionSwitchDatabases [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] >> TExtSubDomainTest::GenericCases [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] Test command err: 2026-01-07T17:40:15.014775Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675894379644980:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:15.014823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:15.242996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:15.272726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:15.272827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:15.314474Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675890084677648:2081] 1767807615013509 != 1767807615013512 2026-01-07T17:40:15.328100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:15.337630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:15.438509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:15.438534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:15.438542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:15.438685Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:15.459874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:15.677625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:16.026311Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:18.201216Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:40:18.201378Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:40:18.201426Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:40:18.201593Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675907264547692:2320], Start check tables existence, number paths: 2 2026-01-07T17:40:18.206807Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675907264547692:2320], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:40:18.206873Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675907264547692:2320], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:40:18.206904Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675907264547692:2320], Successfully finished 2026-01-07T17:40:18.206961Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:40:18.207098Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=YjU4MjYzMDQtYmQ3NzZiMmUtYWUxMjFkZDctNDUwNmY4YTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YjU4MjYzMDQtYmQ3NzZiMmUtYWUxMjFkZDctNDUwNmY4YTc= temp_dir_name# b9fc85fe-456e-c2c7-6731-e296fd55a98b trace_id# 2026-01-07T17:40:18.207661Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=YjU4MjYzMDQtYmQ3NzZiMmUtYWUxMjFkZDctNDUwNmY4YTc=, ActorId: [1:7592675907264547712:2325], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:40:18.209471Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:40:18.221287Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675907264547722:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:40:18.225087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:18.228241Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675907264547722:2498], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-07T17:40:18.230702Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675907264547722:2498], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:40:18.235931Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675907264547722:2498], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:40:18.295897Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675907264547722:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:40:18.342812Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675907264547773:2530] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:18.342976Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675907264547722:2498], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:40:18.352895Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=YTBiY2IwNTktYjEyMTg3ZDktZGU1YWE0YjktYjlhNmMyYjE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YTBiY2IwNTktYjEyMTg3ZDktZGU1YWE0YjktYjlhNmMyYjE= temp_dir_name# ecd57a59-4473-3bcc-b15f-47bf1ea91edd trace_id# 2026-01-07T17:40:18.353049Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=YTBiY2IwNTktYjEyMTg3ZDktZGU1YWE0YjktYjlhNmMyYjE=, ActorId: [1:7592675907264547780:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:40:18.353394Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:40:18.353422Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-07T17:40:18.353508Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675907264547782:2327], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:40:18.353623Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:40:18.353751Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=YTBiY2IwNTktYjEyMTg3ZDktZGU1YWE0YjktYjlhNmMyYjE=, ActorId: [1:7592675907264547780:2326], ActorState: ReadyState, LegacyTraceId: 01kecrq39h55qmcnc2y742yapn, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7592675907264547779:2535] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-07T17:40:18.353868Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7592675907264547780:2326], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YTBiY2IwNTktYjEyMTg3ZDktZGU1YWE0YjktYjlhNmMyYjE= 2026-01-07T17:40:18.353926Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7592675907264547787:2328], Database: /Root, Start database fetching 2026-01-07T17:40:18.354897Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7592675907264547787:2328], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-07T17:40:18.355068Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675907264547782:2327], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:40:18.355118Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-07T17:40:18.355161Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-07T17:40:18.355174Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-07T17:40:18.355421Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7592675907264547793:2329], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YTBiY2IwNTktYjEyMTg3ZDktZGU1YWE0YjktYjlhNmMyYjE=, Start pool fetching 2026-01-07T17:40:18.355452Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7592675907264547794:2330], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-07T17:40:18.355465Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675907264547795:2331], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:40:18.356611Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7592675907264547794:2330], DatabaseId: /Root, PoolId: sample_pool_id, Got w ... is_rollback# false trace_id# 2026-01-07T17:41:18.034820Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: ExecuteState, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, Forwarded TEvStreamData to [10:7592676161852643472:3825] trace_id# 2026-01-07T17:41:18.039753Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: ExecuteState, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-07T17:41:18.039957Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: ExecuteState, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, TxInfo status# Committed kind# ReadOnly total_duration# 34.706 server_duration# 34.604 queries_count# 2 trace_id# 2026-01-07T17:41:18.040035Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: ExecuteState, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:41:18.040561Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: ExecuteState, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:18.040608Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: ExecuteState, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, EndCleanup is_final# true trace_id# 2026-01-07T17:41:18.040678Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: ExecuteState, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, Sent query response back to proxy proxy_request_id# 5 proxy_id# [12:7592676125268039736:2266] trace_id# 2026-01-07T17:41:18.040703Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: unknown state, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:18.041307Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=12&id=ZDAxZmM5YTUtODU4NTNiYWYtNjc1ZmI4ZS0yZDQxMDUzMw==, ActorId: [12:7592676159627779250:2398], ActorState: unknown state, LegacyTraceId: 01kecrrx1qcpvqm7gt3rj60cad, Session actor destroyed trace_id# 2026-01-07T17:41:18.052874Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU= temp_dir_name# 3cedec11-46c5-80bb-4053-c2b2314a04e4 trace_id# 2026-01-07T17:41:18.053285Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:18.053776Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ReadyState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Received request proxy_request_id# 6 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT * FROM `.sys/resource_pools` WHERE Name >= "default" rpc_actor# [10:7592676166147610781:3836] database# /Root/test-dedicated database_id# /Root/test-dedicated pool_id# default trace_id# 2026-01-07T17:41:18.053828Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ReadyState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Request placed into pool from cache pool_id# default trace_id# 2026-01-07T17:41:18.053910Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Sending CompileQuery request trace_id# 2026-01-07T17:41:18.305438Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136865685928472 trace_id# 2026-01-07T17:41:18.305518Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Sending to Executer span_id_size# 8 trace_id# 2026-01-07T17:41:18.305696Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Created new KQP executer executer_id# [12:7592676163922746585:2408] is_rollback# false trace_id# 2026-01-07T17:41:18.323009Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Forwarded TEvStreamData to [10:7592676166147610781:3836] trace_id# 2026-01-07T17:41:18.325332Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-07T17:41:18.325509Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, TxInfo status# Committed kind# ReadOnly total_duration# 20.218 server_duration# 20.109 queries_count# 2 trace_id# 2026-01-07T17:41:18.325600Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:41:18.326064Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:18.326121Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, EndCleanup is_final# true trace_id# 2026-01-07T17:41:18.326191Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: ExecuteState, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Sent query response back to proxy proxy_request_id# 6 proxy_id# [12:7592676125268039736:2266] trace_id# 2026-01-07T17:41:18.326226Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: unknown state, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:18.326867Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=12&id=ZGQzMDBmNWYtM2U3MjQ4OWEtODUxZjhhMTItNDZlNDdhMmU=, ActorId: [12:7592676163922746577:2408], ActorState: unknown state, LegacyTraceId: 01kecrrxk55cbvnpmfdqqhx7jb, Session actor destroyed trace_id# 2026-01-07T17:41:18.381369Z node 10 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2026-01-07T17:41:18.381837Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:41:18.381995Z node 10 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2026-01-07T17:41:18.382169Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:41:18.398471Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=MjdiMmVmYmItYjc1YTA5NGEtNWQ5MjdlZTktNGFhMTU4ZTk=, ActorId: [10:7592676127492903864:2333], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:18.398533Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=MjdiMmVmYmItYjc1YTA5NGEtNWQ5MjdlZTktNGFhMTU4ZTk=, ActorId: [10:7592676127492903864:2333], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:18.398573Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=MjdiMmVmYmItYjc1YTA5NGEtNWQ5MjdlZTktNGFhMTU4ZTk=, ActorId: [10:7592676127492903864:2333], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:18.398613Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=MjdiMmVmYmItYjc1YTA5NGEtNWQ5MjdlZTktNGFhMTU4ZTk=, ActorId: [10:7592676127492903864:2333], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:18.398713Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=MjdiMmVmYmItYjc1YTA5NGEtNWQ5MjdlZTktNGFhMTU4ZTk=, ActorId: [10:7592676127492903864:2333], ActorState: unknown state, Session actor destroyed trace_id# |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true [GOOD] Test command err: 2026-01-07T17:41:15.937319Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676152299240655:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:15.937502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:16.019560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:16.545097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:16.595120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:16.595196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:16.778244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:16.832361Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:16.837081Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676152299240432:2081] 1767807675880834 != 1767807675880837 2026-01-07T17:41:16.837144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:16.937192Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:17.523981Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676152299240694:2105] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:17.524022Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676160889175802:2467] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:17.524118Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676156594208016:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:17.524194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676156594208177:2202][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676156594208016:2118], cookie# 1 2026-01-07T17:41:17.525660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676156594208215:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208212:2202], cookie# 1 2026-01-07T17:41:17.525694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676156594208216:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208213:2202], cookie# 1 2026-01-07T17:41:17.525708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676156594208217:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208214:2202], cookie# 1 2026-01-07T17:41:17.525760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676152299240400:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208215:2202], cookie# 1 2026-01-07T17:41:17.525794Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676152299240403:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208216:2202], cookie# 1 2026-01-07T17:41:17.525815Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676152299240406:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208217:2202], cookie# 1 2026-01-07T17:41:17.525872Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676156594208215:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676152299240400:2049], cookie# 1 2026-01-07T17:41:17.525902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676156594208216:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676152299240403:2052], cookie# 1 2026-01-07T17:41:17.525927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676156594208217:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676152299240406:2055], cookie# 1 2026-01-07T17:41:17.525968Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676156594208177:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676156594208212:2202], cookie# 1 2026-01-07T17:41:17.525993Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676156594208177:2202][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:17.526034Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676156594208177:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676156594208213:2202], cookie# 1 2026-01-07T17:41:17.526058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676156594208177:2202][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:17.526084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676156594208177:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7592676156594208214:2202], cookie# 1 2026-01-07T17:41:17.526106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676156594208177:2202][/dc-1] Sync cookie mismatch: sender# [1:7592676156594208214:2202], cookie# 1, current cookie# 0 2026-01-07T17:41:17.526155Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676156594208016:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:17.526230Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676156594208016:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676156594208177:2202] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:17.526304Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676156594208016:2118], cacheItem# { Subscriber: { Subscriber: [1:7592676156594208177:2202] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:17.565707Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676160889175803:2468], recipient# [1:7592676160889175802:2467], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:17.565798Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676160889175802:2467] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:17.777482Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676160889175802:2467] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:17.780154Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676160889175802:2467] Handle TEvDescribeSchemeResult Forward to# [1:7592676160889175801:2466] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:17.790260Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676152299240694:2105] Handle TEvProposeTransaction 2026-01-07T17:41:17.790284Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592676152299240694:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:41:17.790331Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592676152299240694:2105] Cookie# 0 userReqId# "" t ... d: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767807678210 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:18.184716Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676165184143149:2505], recipient# [1:7592676165184143141:2503], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:38:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:18.184744Z node 1 :TX_PROXY INFO: describe.cpp:354: Actor# [1:7592676165184143141:2503] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-07T17:41:18.187610Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676152299240400:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [1:7592676165184143146:2504], cookie# 1 2026-01-07T17:41:18.187680Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676165184143146:2504][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 Cluster State: { } }: sender# [1:7592676152299240400:2049], cookie# 1 2026-01-07T17:41:18.187712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676165184143142:2504][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 Cluster State: { } }: sender# [1:7592676165184143143:2504], cookie# 1 2026-01-07T17:41:18.187727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676165184143142:2504][/dc-1/USER_0] Sync cookie mismatch: sender# [1:7592676165184143143:2504], cookie# 1, current cookie# 0 2026-01-07T17:41:18.225361Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676152299240694:2105] Handle TEvNavigate describe path /dc-1 2026-01-07T17:41:18.225410Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676165184143151:2507] HANDLE EvNavigateScheme /dc-1 2026-01-07T17:41:18.225497Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676156594208016:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:18.225563Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676156594208177:2202][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676156594208016:2118], cookie# 4 2026-01-07T17:41:18.225615Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676156594208215:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208212:2202], cookie# 4 2026-01-07T17:41:18.225630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676156594208216:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208213:2202], cookie# 4 2026-01-07T17:41:18.225643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676156594208217:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208214:2202], cookie# 4 2026-01-07T17:41:18.225663Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676152299240403:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208216:2202], cookie# 4 2026-01-07T17:41:18.225701Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676152299240406:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208217:2202], cookie# 4 2026-01-07T17:41:18.225803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676156594208216:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7592676152299240403:2052], cookie# 4 2026-01-07T17:41:18.225825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676156594208217:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7592676152299240406:2055], cookie# 4 2026-01-07T17:41:18.225850Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676156594208177:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7592676156594208213:2202], cookie# 4 2026-01-07T17:41:18.225866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676156594208177:2202][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:18.225881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676156594208177:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7592676156594208214:2202], cookie# 4 2026-01-07T17:41:18.225909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676156594208177:2202][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:18.225959Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676156594208016:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:18.226010Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676156594208016:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676156594208177:2202] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807678147 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:18.226065Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676156594208016:2118], cacheItem# { Subscriber: { Subscriber: [1:7592676156594208177:2202] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807678147 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-07T17:41:18.226186Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676165184143152:2508], recipient# [1:7592676165184143151:2507], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:18.226214Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676152299240400:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676156594208215:2202], cookie# 4 2026-01-07T17:41:18.226248Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676165184143151:2507] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:18.226299Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676165184143151:2507] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:18.226330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676156594208215:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7592676152299240400:2049], cookie# 4 2026-01-07T17:41:18.226351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676156594208177:2202][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7592676156594208212:2202], cookie# 4 2026-01-07T17:41:18.226360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676156594208177:2202][/dc-1] Sync cookie mismatch: sender# [1:7592676156594208212:2202], cookie# 4, current cookie# 0 2026-01-07T17:41:18.226914Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676165184143151:2507] Handle TEvDescribeSchemeResult Forward to# [1:7592676165184143150:2506] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 128 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767807678147 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:37:35.559568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:35.559674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:35.559721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:35.559760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:35.559796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:35.559829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:35.559904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:35.559990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:35.560909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:35.561246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:35.660411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:37:35.660475Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:35.677701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:35.678129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:35.678329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:35.685097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:35.685462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:35.686186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:35.686448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:35.689838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:35.690048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:35.691168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:35.691228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:35.691342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:35.691395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:35.691495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:35.691685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:35.845946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.846894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:37:35.847890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... nStep5000039 State->FrontStep: 5000039 2026-01-07T17:41:21.694129Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:21.694201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:41:21.694448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:41:21.694619Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:21.694666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:41:21.694718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-07T17:41:21.694802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:41:21.694860Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:41:21.694925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-07T17:41:21.696074Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:21.696196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:21.696247Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:41:21.696294Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:41:21.696337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:41:21.697991Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:21.698089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:21.698120Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:41:21.698158Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:41:21.698195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:41:21.698285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:41:21.705386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-07T17:41:21.705491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2026-01-07T17:41:21.705595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2026-01-07T17:41:21.705937Z node 2 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2026-01-07T17:41:21.706264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6412: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:21.706364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2026-01-07T17:41:21.707274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:41:21.717803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:41:21.725761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:41:21.739067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2026-01-07T17:41:21.739142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:41:21.739298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:41:21.741273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:41:21.741447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:41:21.741489Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:41:21.741612Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:41:21.741662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:21.741697Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:41:21.741730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:21.741782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:41:21.741856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:753:2729] message: TxId: 102 2026-01-07T17:41:21.741908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:21.741949Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:41:21.741988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:41:21.742135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:41:21.749883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:41:21.749961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:815:2783] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-07T17:41:21.753371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:41:21.753621Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:41:21.753888Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2026-01-07T17:41:21.762005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:21.762266Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:41:21.762631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:41:21.762677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:41:21.763093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:41:21.763197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:41:21.763241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:853:2821] TestWaitNotification: OK eventTxId 103 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2026-01-07T17:41:12.053231Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676139262739554:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:12.053605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:12.109918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:12.454089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:12.454224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:12.553151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:12.574976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:12.749099Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676134967772032:2081] 1767807671977483 != 1767807671977486 2026-01-07T17:41:12.759476Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:12.825471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:13.077348Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:13.078179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:13.078186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:13.078193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:13.078252Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:18230 2026-01-07T17:41:13.682160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:13.692512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:13.709945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:41:13.769511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-07T17:41:16.702196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676156442609474:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:16.702361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:16.703748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676156442609486:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:16.703801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676156442609487:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:16.709387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:16.714480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:16.745525Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676156442609491:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:41:17.044476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676139262739554:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:17.044557Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:17.405136Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676156442609556:2596] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:17.454022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:17.928806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:17.950671Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676160737576864:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:41:17.952493Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmMwYjg5YWYtZjM5ZTZjNTUtMjE5Zjk3ZTEtMTdlNDUxN2I=, ActorId: [1:7592676156442609472:2326], ActorState: ExecuteState, LegacyTraceId: 01kecrrw8r4yx6f1vd9zxsrnwe, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:17.955045Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:41:18.081533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2026-01-07T17:38:56.527957Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1767807536527917 2026-01-07T17:38:56.735543Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675556397022785:2095];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:56.735738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:56.805064Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675554566921764:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:56.811420Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:56.814580Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:38:56.828762Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:38:56.999722Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:57.016045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:38:57.054129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:57.054225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:57.055744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:57.055809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:57.064077Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:38:57.064227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:57.077032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:57.137140Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:57.180123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:38:57.207963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001b72/r3tmp/yandexynsoIn.tmp 2026-01-07T17:38:57.207986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001b72/r3tmp/yandexynsoIn.tmp 2026-01-07T17:38:57.208168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001b72/r3tmp/yandexynsoIn.tmp 2026-01-07T17:38:57.208280Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:38:57.247595Z INFO: TTestServer started on Port 12946 GrpcPort 31274 2026-01-07T17:38:57.256718Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:31274 2026-01-07T17:38:57.412797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:38:57.759797Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:57.826357Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:38:59.919651Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675567451823967:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:59.919668Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675567451823975:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:59.919750Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:59.920016Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592675567451823982:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:59.920085Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:38:59.927878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:38:59.946516Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592675567451823981:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-07T17:39:00.247255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:00.261544Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675569281926023:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:00.262030Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTg0NjVmNmYtMjdkZWFjZTEtOGNkYmM1ZWItMTYwNWYyYw==, ActorId: [1:7592675569281925996:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrmpqd98026kwk3rdyyv8c, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:00.264867Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:39:00.261217Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592675571746791307:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:00.287935Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592675571746791322:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:39:00.288440Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZTAwZTM1ZjgtNGExNDA1MDItMzBmM2Y5NDEtYTZhMjZlODU=, ActorId: [2:7592675567451823965:2301], ActorState: ExecuteState, LegacyTraceId: 01kecrmppd313a62ttqn7f6g6c, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:39:00.288905Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17: ... me { } } 2026-01-07T17:41:19.864660Z :DEBUG: [/Root] TraceId [] SessionId [src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2026-01-07T17:41:19.864687Z :DEBUG: [/Root] TraceId [] SessionId [src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0] MessageGroupId [src_id] Write session: acknoledged message 1 2026-01-07T17:41:19.869045Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0 grpc read done: success: 0 data: 2026-01-07T17:41:19.869080Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0 grpc read failed 2026-01-07T17:41:19.869105Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0 grpc closed 2026-01-07T17:41:19.869120Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0 is DEAD 2026-01-07T17:41:19.869595Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:41:19.870054Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7592676083872602910:3549] destroyed 2026-01-07T17:41:19.870098Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:41:19.870131Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:19.870147Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:19.870164Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:19.870182Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:19.870196Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:19.870242Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:19.870257Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:19.870269Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:19.870285Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:19.870298Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:19.873916Z :DEBUG: [/Root] TraceId [] SessionId [src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2026-01-07T17:41:19.874029Z :ERROR: [/Root] TraceId [] SessionId [src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2026-01-07T17:41:19.874070Z :ERROR: [/Root] TraceId [] SessionId [src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2026-01-07T17:41:19.874103Z :INFO: [/Root] TraceId [] SessionId [src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0] MessageGroupId [src_id] Write session will now close 2026-01-07T17:41:19.874176Z :DEBUG: [/Root] TraceId [] SessionId [src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0] MessageGroupId [src_id] Write session: aborting 2026-01-07T17:41:19.977116Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:19.977157Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:19.977172Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:19.977194Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:19.977209Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:19.993545Z :DEBUG: [/Root] TraceId [] SessionId [src_id|b01a82f8-74ecf8df-7d7c3ca2-b8c4f188_0] MessageGroupId [src_id] Write session: destroy 2026-01-07T17:41:20.078998Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.079042Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.079057Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.079077Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.079091Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:20.182396Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.182444Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.182458Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.182477Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.182491Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:20.281855Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.281886Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.281900Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.281916Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.281926Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:20.400127Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.400159Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.400171Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.400191Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.400202Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:20.502873Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.502909Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.502924Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.502946Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.502959Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:20.605239Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.605278Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.605294Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.605318Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.605334Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:20.712411Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.712457Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.712473Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.712495Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.712509Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:20.814443Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.814490Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.814507Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.814531Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.814546Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:41:20.917066Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:41:20.917114Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.917131Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:41:20.917157Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:41:20.917176Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin |88.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TNetClassifierTest::TestInitFromBadlyFormattedFile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2026-01-07T17:41:15.058707Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676151334033921:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:15.058759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:15.655729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:15.688619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:15.688691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:15.710826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:15.812151Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:15.991166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:16.031051Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676151334033962:2119] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:16.031115Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676155629001761:2448] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:16.031298Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676151334033968:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:16.031406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676151334034230:2287][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676151334033968:2121], cookie# 1 2026-01-07T17:41:16.033758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676151334034241:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676151334034238:2287], cookie# 1 2026-01-07T17:41:16.033826Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676151334034242:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676151334034239:2287], cookie# 1 2026-01-07T17:41:16.033841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676151334034243:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676151334034240:2287], cookie# 1 2026-01-07T17:41:16.033876Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676147039066357:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676151334034241:2287], cookie# 1 2026-01-07T17:41:16.033902Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676147039066360:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676151334034242:2287], cookie# 1 2026-01-07T17:41:16.033956Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676147039066363:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676151334034243:2287], cookie# 1 2026-01-07T17:41:16.034002Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676151334034241:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676147039066357:2050], cookie# 1 2026-01-07T17:41:16.034019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676151334034242:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676147039066360:2053], cookie# 1 2026-01-07T17:41:16.034035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676151334034243:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676147039066363:2056], cookie# 1 2026-01-07T17:41:16.034091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676151334034230:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676151334034238:2287], cookie# 1 2026-01-07T17:41:16.034122Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676151334034230:2287][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:16.034141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676151334034230:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676151334034239:2287], cookie# 1 2026-01-07T17:41:16.034164Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676151334034230:2287][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:16.034233Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676151334034230:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676151334034240:2287], cookie# 1 2026-01-07T17:41:16.034259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676151334034230:2287][/dc-1] Sync cookie mismatch: sender# [1:7592676151334034240:2287], cookie# 1, current cookie# 0 2026-01-07T17:41:16.034329Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676151334033968:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:16.039658Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676151334033968:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676151334034230:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:16.039782Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676151334033968:2121], cacheItem# { Subscriber: { Subscriber: [1:7592676151334034230:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:16.042204Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676155629001762:2449], recipient# [1:7592676155629001761:2448], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:16.042250Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676155629001761:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:16.067251Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676155629001761:2448] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:16.070414Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676155629001761:2448] Handle TEvDescribeSchemeResult Forward to# [1:7592676155629001760:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:16.081669Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676151334033962:2119] Handle TEvProposeTransaction 2026-01-07T17:41:16.081704Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592676151334033962:2119] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:41:16.081783Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592676151334033962:2119] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592676155629001767:2453] 2026-01-07T17:41:16.085095Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:16.105040Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676151334033968:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Oper ... SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7592676177103839090:3042][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [1:7592676151334033968:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:41:21.419347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592676177103839101:3043][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592676147039066357:2050] 2026-01-07T17:41:21.419367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592676177103839102:3043][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592676147039066360:2053] 2026-01-07T17:41:21.419403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592676177103839103:3043][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592676147039066363:2056] 2026-01-07T17:41:21.419440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592676177103839091:3043][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592676177103839098:3043] 2026-01-07T17:41:21.419463Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592676177103839091:3043][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592676177103839099:3043] 2026-01-07T17:41:21.419484Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592676177103839091:3043][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592676151334033968:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:41:21.419502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592676177103839091:3043][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592676177103839100:3043] 2026-01-07T17:41:21.419521Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7592676177103839091:3043][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7592676151334033968:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:41:21.419550Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676147039066357:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676177103839095:3042] 2026-01-07T17:41:21.419565Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676147039066357:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676177103839101:3043] 2026-01-07T17:41:21.419578Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676147039066360:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676177103839096:3042] 2026-01-07T17:41:21.419590Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676147039066360:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676177103839102:3043] 2026-01-07T17:41:21.419604Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676147039066363:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676177103839097:3042] 2026-01-07T17:41:21.419619Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676147039066363:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676177103839103:3043] 2026-01-07T17:41:21.419671Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676151334033968:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-07T17:41:21.419741Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676151334033968:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592676177103839090:3042] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:21.419858Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676151334033968:2121], cacheItem# { Subscriber: { Subscriber: [1:7592676177103839090:3042] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:21.419890Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676151334033968:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-07T17:41:21.419946Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676151334033968:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592676177103839091:3043] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:21.420009Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676151334033968:2121], cacheItem# { Subscriber: { Subscriber: [1:7592676177103839091:3043] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:21.420118Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676177103839104:3044], recipient# [1:7592676177103839086:2333], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:22.069664Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676151334033968:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:22.069821Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676151334033968:2121], cacheItem# { Subscriber: { Subscriber: [1:7592676155629001769:2454] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:22.069939Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676181398806420:3048], recipient# [1:7592676181398806419:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:22.209154Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676151334033968:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:22.209298Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676151334033968:2121], cacheItem# { Subscriber: { Subscriber: [1:7592676155629001769:2454] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:22.209396Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676181398806422:3049], recipient# [1:7592676181398806421:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 28892, MsgBus: 10384 2026-01-07T17:40:49.829268Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676042006296039:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:49.829338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:50.180878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:50.274855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:50.274929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:50.284307Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:50.295280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:50.365713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:50.412974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:50.412997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:50.413009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:50.413107Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:50.855599Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:51.027289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:51.090511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-07T17:40:53.273724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676059186166129:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.273860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.274369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676059186166139:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.274424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.508513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.628289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.658930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.689394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.732743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676059186166442:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.732815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.733019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676059186166447:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.733075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676059186166448:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.733151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.736895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:53.746399Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676059186166451:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-07T17:40:53.884878Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676059186166502:2766] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24419, MsgBus: 11088 2026-01-07T17:40:54.975358Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676061826776663:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:54.975398Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:55.000293Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:55.100396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:55.100474Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:55.100773Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:55.102375Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676061826776633:2081] 1767807654974404 != 1767807654974407 2026-01-07T17:40:55.129975Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:55.165866Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:55.168434Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:55.168448Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:55.168456Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:55.168512Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:55.511286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:55.530864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:55.589026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:55.726261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_tabl ... , at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 2370, MsgBus: 23043 2026-01-07T17:41:15.147445Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676151483311025:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:15.147498Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:15.241017Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:15.247931Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592676151483310919:2081] 1767807675144627 != 1767807675144630 2026-01-07T17:41:15.259505Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:15.287663Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:15.287771Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:15.297427Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:15.428482Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:15.428510Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:15.428519Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:15.428604Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:15.482797Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:16.070809Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:16.082923Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:16.096420Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:16.153253Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:16.192660Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:16.599974Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:16.734818Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:19.806087Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676168663181990:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:19.806178Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:19.806470Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676168663182000:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:19.806525Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:19.885483Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:19.932372Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:19.974100Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:20.019941Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:20.075816Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:20.132810Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:20.155200Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676151483311025:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:20.155258Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:20.187274Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:20.272556Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:20.448039Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676172958150173:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:20.448174Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:20.448587Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676172958150178:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:20.448642Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676172958150179:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:20.448706Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:20.454617Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:20.474527Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592676172958150182:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:41:20.548432Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676172958150233:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |88.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2026-01-07T17:41:20.835851Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676173648786066:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:20.840965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:20.875783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:21.260690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:21.260795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:21.264942Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:16028) connection closed with error: Connection refused 2026-01-07T17:41:21.265765Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:41:21.266822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:21.269777Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:21.270433Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676173648786015:2081] 1767807680809446 != 1767807680809449 2026-01-07T17:41:21.299758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:21.302272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:21.302284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:21.302294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:21.302360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:21.529583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:21.808506Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> Donor::ContinueWithFaultyDonor |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |88.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |88.6%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> KqpErrors::ProposeResultLost_RwTx+UseSink >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true >> KqpErrors::ProposeError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-07T17:41:16.017450Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676154400677820:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:16.021389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:16.094717Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:16.209173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:16.720618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:16.803120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:16.803229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:16.819988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:16.937092Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676150105710311:2081] 1767807675963776 != 1767807675963779 2026-01-07T17:41:16.962158Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:17.013186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:17.085206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:17.821161Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676154400677838:2095] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:17.821218Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676158695645694:2457] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:17.821301Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676154400677948:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:17.821394Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676154400678169:2299][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676154400677948:2140], cookie# 1 2026-01-07T17:41:17.833651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676154400678195:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154400678192:2299], cookie# 1 2026-01-07T17:41:17.833717Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676150105710279:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154400678195:2299], cookie# 1 2026-01-07T17:41:17.833761Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676154400678196:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154400678193:2299], cookie# 1 2026-01-07T17:41:17.833780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676154400678197:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154400678194:2299], cookie# 1 2026-01-07T17:41:17.833825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676154400678195:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676150105710279:2049], cookie# 1 2026-01-07T17:41:17.833877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676154400678169:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676154400678192:2299], cookie# 1 2026-01-07T17:41:17.833907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676154400678169:2299][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:17.833928Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676150105710285:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154400678197:2299], cookie# 1 2026-01-07T17:41:17.833955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676154400678197:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676150105710285:2055], cookie# 1 2026-01-07T17:41:17.833971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676154400678169:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676154400678194:2299], cookie# 1 2026-01-07T17:41:17.833992Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676154400678169:2299][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:17.834048Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676154400677948:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:17.837511Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676150105710282:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154400678196:2299], cookie# 1 2026-01-07T17:41:17.837717Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676154400678196:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676150105710282:2052], cookie# 1 2026-01-07T17:41:17.837745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676154400678169:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676154400678193:2299], cookie# 1 2026-01-07T17:41:17.837790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676154400678169:2299][/dc-1] Sync cookie mismatch: sender# [1:7592676154400678193:2299], cookie# 1, current cookie# 0 2026-01-07T17:41:17.840856Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676154400677948:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676154400678169:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:17.840987Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676154400677948:2140], cacheItem# { Subscriber: { Subscriber: [1:7592676154400678169:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:17.843257Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676158695645695:2458], recipient# [1:7592676158695645694:2457], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:17.843313Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676158695645694:2457] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:17.924362Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676158695645694:2457] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:17.938067Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676158695645694:2457] Handle TEvDescribeSchemeResult Forward to# [1:7592676158695645693:2456] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:17.945500Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676154400677838:2095] Handle TEvProposeTransaction 2026-01-07T17:41:17.945525Z node 1 :TX_PROXY DEBUG: proxy ... 2026-01-07T17:41:24.854522Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7592676187399852176:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7592676187399852879:2591] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767807683824 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:24.854590Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592676187399852176:2109], cacheItem# { Subscriber: { Subscriber: [2:7592676187399852879:2591] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767807683824 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2026-01-07T17:41:24.854718Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592676191694820337:2677], recipient# [2:7592676191694820336:2676], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:24.854742Z node 2 :TX_PROXY INFO: describe.cpp:354: Actor# [2:7592676191694820336:2676] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-07T17:41:24.859672Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7592676187399852168:2106] Handle TEvNavigate describe path /dc-1 2026-01-07T17:41:24.859712Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7592676191694820339:2679] HANDLE EvNavigateScheme /dc-1 2026-01-07T17:41:24.859792Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592676187399852176:2109], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:24.859880Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7592676187399852222:2132][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7592676187399852176:2109], cookie# 4 2026-01-07T17:41:24.859933Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592676187399852237:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676187399852234:2132], cookie# 4 2026-01-07T17:41:24.859976Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592676187399852238:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676187399852235:2132], cookie# 4 2026-01-07T17:41:24.859993Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592676187399852239:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676187399852236:2132], cookie# 4 2026-01-07T17:41:24.860017Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592676187399851888:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676187399852237:2132], cookie# 4 2026-01-07T17:41:24.860044Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592676187399851891:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676187399852238:2132], cookie# 4 2026-01-07T17:41:24.860060Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592676187399851894:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676187399852239:2132], cookie# 4 2026-01-07T17:41:24.860102Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592676187399852237:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592676187399851888:2049], cookie# 4 2026-01-07T17:41:24.860120Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592676187399852238:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592676187399851891:2052], cookie# 4 2026-01-07T17:41:24.860138Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592676187399852239:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592676187399851894:2055], cookie# 4 2026-01-07T17:41:24.860187Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592676187399852222:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592676187399852234:2132], cookie# 4 2026-01-07T17:41:24.860209Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7592676187399852222:2132][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:24.860228Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592676187399852222:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592676187399852235:2132], cookie# 4 2026-01-07T17:41:24.860250Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7592676187399852222:2132][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:24.860276Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592676187399852222:2132][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7592676187399852236:2132], cookie# 4 2026-01-07T17:41:24.860288Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7592676187399852222:2132][/dc-1] Sync cookie mismatch: sender# [2:7592676187399852236:2132], cookie# 4, current cookie# 0 2026-01-07T17:41:24.860320Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7592676187399852176:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:24.860382Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7592676187399852176:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7592676187399852222:2132] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807683789 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:24.860446Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592676187399852176:2109], cacheItem# { Subscriber: { Subscriber: [2:7592676187399852222:2132] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807683789 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-07T17:41:24.860574Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592676191694820340:2680], recipient# [2:7592676191694820339:2679], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:24.860602Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7592676191694820339:2679] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:24.860669Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7592676191694820339:2679] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:24.861282Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7592676191694820339:2679] Handle TEvDescribeSchemeResult Forward to# [2:7592676191694820338:2678] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767807683789 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TraverseDatashard::TraverseOneTable [GOOD] >> KqpErrors::ResolveTableError >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |88.7%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher >> Donor::MultipleEvicts >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> Donor::ContinueWithFaultyDonor [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> TDataShardTrace::TestTraceDistributedSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2026-01-07T17:41:25.967953Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676195477955673:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:25.968366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:26.425476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:26.425568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:26.457649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:26.489063Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676195477955568:2081] 1767807685945948 != 1767807685945951 2026-01-07T17:41:26.505747Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:26.516581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:26.593493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001768/r3tmp/yandexG9gbnh.tmp 2026-01-07T17:41:26.593526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001768/r3tmp/yandexG9gbnh.tmp 2026-01-07T17:41:26.593642Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:344: invalid NetData format 2026-01-07T17:41:26.593666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: /home/runner/.ya/build/build_root/ec9d/001768/r3tmp/yandexG9gbnh.tmp 2026-01-07T17:41:26.593767Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:26.675369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:14.599625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:14.711863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:14.719846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:14.720240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:14.720379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:15.293494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:15.409405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:15.409561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:15.444865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:15.542552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:16.642450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:16.643288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:16.643338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:16.643372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:16.644058Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:16.713437Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:17.906624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:22.248216Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:22.263649Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:41:22.268522Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:22.345833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:22.345966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:22.393543Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:22.401525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:22.676974Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:22.677116Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:22.678558Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.679258Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.679772Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.686176Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.686828Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.687027Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.687297Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.687483Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.687650Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:22.706068Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:23.166632Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:23.293686Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:41:23.293811Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:41:23.374556Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:41:23.376060Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:41:23.376295Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:41:23.376357Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:41:23.376424Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:41:23.376477Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:41:23.376547Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:41:23.376608Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:41:23.386133Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:41:23.417239Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:23.417358Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:23.479853Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:41:23.480789Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:41:23.619096Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:41:23.625343Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:41:23.676353Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:41:23.676451Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:41:23.676567Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:41:23.688543Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:23.705824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:23.737343Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:41:23.737545Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:41:23.776033Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:41:24.124607Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:41:24.247822Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:41:24.635458Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:41:24.823830Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:41:24.823925Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:41:25.746502Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:26.054236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2744:3373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.054413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.054992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2762:3378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.055097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.074839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:26.677495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3044:3426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.677665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.774706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3048:3429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.774876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.776155Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3051:3432]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:26.776378Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:41:26.776467Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3053:3434] 2026-01-07T17:41:26.776534Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3053:3434] 2026-01-07T17:41:26.777229Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3054:3179] 2026-01-07T17:41:26.777622Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3053:3434], server id = [2:3054:3179], tablet id = 72075186224037894, status = OK 2026-01-07T17:41:26.777856Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3054:3179], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:41:26.777938Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:41:26.778159Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:41:26.778231Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3051:3432], StatRequests.size() = 1 2026-01-07T17:41:26.798886Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:41:26.799230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3058:3438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.799358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.799992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3062:3442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.800089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.800174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3065:3445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.808203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:26.932107Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:41:26.932200Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:41:27.044322Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3053:3434], schemeshard count = 1 2026-01-07T17:41:27.310905Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3067:3447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:41:27.489962Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3171:3514] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:27.510241Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3194:3530]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:27.510484Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:41:27.510524Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3194:3530], StatRequests.size() = 1 2026-01-07T17:41:27.791358Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3242:3221]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:41:27.795761Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:41:27.795844Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:41:27.796423Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:41:27.796473Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:41:27.796519Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:41:27.836764Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:41:27.854188Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-07T17:41:27.854701Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-07T17:41:27.855021Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-07T17:41:27.855095Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TraverseDatashard::TraverseTwoTables [GOOD] >> Donor::ConsistentWritesWhenSwitchingToDonorMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] Test command err: 2026-01-07T17:41:19.735708Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676170078415989:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:19.738767Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:20.217106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:20.252078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:20.252158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:20.263668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:20.373765Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:20.382072Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676170078415944:2081] 1767807679700918 != 1767807679700921 2026-01-07T17:41:20.431265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:20.781395Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:20.789199Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676170078416161:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:20.794872Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592676170078416161:2107], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-07T17:41:20.801197Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7592676174373383810:2267][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:41:20.803487Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592676170078415912:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7592676174373383815:2267] 2026-01-07T17:41:20.803511Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592676170078415912:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-07T17:41:20.803577Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592676170078415912:2049] Subscribe: subscriber# [1:7592676174373383815:2267], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:41:20.803679Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592676170078415915:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7592676174373383816:2267] 2026-01-07T17:41:20.803686Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592676170078415915:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-07T17:41:20.803705Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592676170078415915:2052] Subscribe: subscriber# [1:7592676174373383816:2267], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:41:20.803739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592676170078415918:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7592676174373383817:2267] 2026-01-07T17:41:20.803748Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592676170078415918:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-07T17:41:20.803768Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592676170078415918:2055] Subscribe: subscriber# [1:7592676174373383817:2267], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:41:20.803822Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592676174373383815:2267][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7592676170078415912:2049] 2026-01-07T17:41:20.803876Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592676174373383816:2267][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7592676170078415915:2052] 2026-01-07T17:41:20.803915Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592676174373383817:2267][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7592676170078415918:2055] 2026-01-07T17:41:20.803964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592676174373383810:2267][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7592676174373383812:2267] 2026-01-07T17:41:20.804009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592676174373383810:2267][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7592676174373383813:2267] 2026-01-07T17:41:20.804061Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592676174373383810:2267][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7592676170078416161:2107], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:41:20.804084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592676174373383810:2267][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7592676174373383814:2267] 2026-01-07T17:41:20.804110Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7592676174373383810:2267][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7592676170078416161:2107], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:41:20.804130Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676170078416106:2104] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:20.804172Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676170078415912:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676174373383815:2267] 2026-01-07T17:41:20.804186Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676170078415915:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676174373383816:2267] 2026-01-07T17:41:20.804198Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592676170078415918:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592676174373383817:2267] 2026-01-07T17:41:20.804245Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676170078416161:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2026-01-07T17:41:20.804343Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676170078416161:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592676174373383810:2267] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:20.815579Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676174373383818:2269] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:20.827668Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676170078416161:2107], cacheItem# { Subscriber: { Subscriber: [1:7592676174373383810:2267] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:20.827887Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676174373383819:2270], recipient# [1:7592676174373383809:2283], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:20.827967Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676170078416161:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:20.828059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676174373383695:2210][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676170078416161:2107], cookie# 1 2026-01-07T17:41:20.828122Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676174373383741:2210][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676174373383738:2210], cookie# 1 2026-01-07T17:41:20.828139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676174373383742:2210][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676174373383739:2210], cookie# 1 2026-01-07T17:41:20.828152Z node 1 :SCHEME_BOARD_SUBSCRIB ... hemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2026-01-07T17:41:26.213075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2026-01-07T17:41:26.213094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2026-01-07T17:41:26.213122Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:41:26.213156Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:41:26.216120Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7592676194471176651:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Strong: 1 } 2026-01-07T17:41:26.216259Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7592676194471176651:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7592676198766144484:2502] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767807686197 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [2:7592676198766144484:2502] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767807686197 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2026-01-07T17:41:26.216574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-07T17:41:26.217487Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7592676194471176486:2090] Handle TEvNavigate describe path /dc-1 2026-01-07T17:41:26.217533Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7592676198766144512:2521] HANDLE EvNavigateScheme /dc-1 2026-01-07T17:41:26.217622Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592676194471176651:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:26.217704Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7592676194471176777:2175][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7592676194471176651:2115], cookie# 4 2026-01-07T17:41:26.217764Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592676194471176793:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676194471176790:2175], cookie# 4 2026-01-07T17:41:26.217781Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592676194471176794:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676194471176791:2175], cookie# 4 2026-01-07T17:41:26.217803Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7592676194471176795:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676194471176792:2175], cookie# 4 2026-01-07T17:41:26.217830Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592676194471176349:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676194471176793:2175], cookie# 4 2026-01-07T17:41:26.217858Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592676194471176352:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676194471176794:2175], cookie# 4 2026-01-07T17:41:26.217876Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7592676194471176355:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7592676194471176795:2175], cookie# 4 2026-01-07T17:41:26.217922Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592676194471176793:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7592676194471176349:2049], cookie# 4 2026-01-07T17:41:26.217939Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592676194471176794:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7592676194471176352:2052], cookie# 4 2026-01-07T17:41:26.217973Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7592676194471176795:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7592676194471176355:2055], cookie# 4 2026-01-07T17:41:26.218019Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592676194471176777:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7592676194471176790:2175], cookie# 4 2026-01-07T17:41:26.218040Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7592676194471176777:2175][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:26.218058Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592676194471176777:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7592676194471176791:2175], cookie# 4 2026-01-07T17:41:26.218082Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7592676194471176777:2175][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:26.218107Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7592676194471176777:2175][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7592676194471176792:2175], cookie# 4 2026-01-07T17:41:26.218119Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7592676194471176777:2175][/dc-1] Sync cookie mismatch: sender# [2:7592676194471176792:2175], cookie# 4, current cookie# 0 2026-01-07T17:41:26.218182Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7592676194471176651:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:26.218249Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7592676194471176651:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7592676194471176777:2175] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807686141 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:26.218333Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592676194471176651:2115], cacheItem# { Subscriber: { Subscriber: [2:7592676194471176777:2175] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767807686141 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-07T17:41:26.218486Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592676198766144513:2522], recipient# [2:7592676198766144512:2521], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:26.218516Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7592676198766144512:2521] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:26.218606Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7592676198766144512:2521] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:26.219254Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7592676198766144512:2521] Handle TEvDescribeSchemeResult Forward to# [2:7592676198766144511:2520] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767807686141 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ContinueWithFaultyDonor [GOOD] Test command err: RandomSeed# 10093349586841929342 2026-01-07T17:41:29.921243Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:29.923292Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18255585548316393718] 2026-01-07T17:41:29.946483Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] Leader for TabletID 72057594046678944 is [1:243:2156] sender: [1:244:2060] recipient: [1:226:2145] 2026-01-07T17:40:18.988507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:40:18.988600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:40:18.988638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:40:18.988674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:40:18.988748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:40:18.988776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:40:18.988847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:40:18.988945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:40:18.989863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:40:18.990238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:40:19.075181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:40:19.075245Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:19.098618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:40:19.099063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:40:19.099259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:40:19.108648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:40:19.108998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:40:19.109744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:40:19.111327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:40:19.114837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:40:19.114994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:40:19.115846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:40:19.115890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:40:19.116040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:40:19.116078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:40:19.116107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:40:19.116188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:40:19.253339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.257351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.257543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.257635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.257704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.257766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.257836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.257912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.257996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.258073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.258153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.258215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.258324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.258410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:19.258501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:41:30.328294Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:41:30.328352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:41:30.328754Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1096:2913], Recipient [7:245:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:30.328816Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:30.328859Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:30.329035Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:989:2806], Recipient [7:245:2157]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2026-01-07T17:41:30.329074Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:41:30.329165Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:41:30.329283Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:41:30.329331Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:1094:2911] 2026-01-07T17:41:30.329517Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1096:2913], Recipient [7:245:2157]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:30.329551Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:30.329592Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2026-01-07T17:41:30.330048Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:965:2104], Recipient [7:245:2157] 2026-01-07T17:41:30.330106Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:41:30.333124Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 965 RawX2: 34359740472 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:41:30.333490Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:41:30.333662Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:41:30.333917Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:41:30.336614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:30.337028Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2026-01-07T17:41:30.337113Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2026-01-07T17:41:30.337588Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2026-01-07T17:41:30.337641Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2026-01-07T17:41:30.338039Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1102:2919], Recipient [7:245:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:30.338105Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:30.338147Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:30.338308Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:989:2806], Recipient [7:245:2157]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2026-01-07T17:41:30.338343Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:41:30.338431Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-01-07T17:41:30.338546Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:41:30.338587Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:1100:2917] 2026-01-07T17:41:30.338769Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1102:2919], Recipient [7:245:2157]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:30.338805Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:30.338846Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-07T17:41:30.339308Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:965:2104], Recipient [7:245:2157] 2026-01-07T17:41:30.339366Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:41:30.342619Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 965 RawX2: 34359740472 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:41:30.342971Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:41:30.343054Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2026-01-07T17:41:30.343294Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:41:30.345880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:30.346235Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2026-01-07T17:41:30.346319Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-07T17:41:30.346859Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-07T17:41:30.346923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-07T17:41:30.347345Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1108:2925], Recipient [7:245:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:30.347430Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:30.347481Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:30.347676Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:989:2806], Recipient [7:245:2157]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2026-01-07T17:41:30.347725Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:41:30.347825Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-07T17:41:30.347987Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-07T17:41:30.348045Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:1106:2923] 2026-01-07T17:41:30.348266Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1108:2925], Recipient [7:245:2157]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:30.348320Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:41:30.348358Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] Test command err: 2026-01-07T17:40:14.716941Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675890306884678:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:14.717003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:15.007437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:15.007533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:15.038243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:15.042822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:15.184156Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675890306884574:2081] 1767807614713342 != 1767807614713345 2026-01-07T17:40:15.203934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:15.216873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:15.216893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:15.216900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:15.217003Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:15.252621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:15.438252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:15.752132Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:17.923323Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:40:17.926606Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675903191787318:2319], Start check tables existence, number paths: 2 2026-01-07T17:40:17.926990Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=OTA3NmE5ZS00OTI2MWM4ZS04MDBjZDJhMC05MTgzYTUyZA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# OTA3NmE5ZS00OTI2MWM4ZS04MDBjZDJhMC05MTgzYTUyZA== temp_dir_name# 2445d5a5-4428-e91d-a78f-5484b3a0779d trace_id# 2026-01-07T17:40:17.927217Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=OTA3NmE5ZS00OTI2MWM4ZS04MDBjZDJhMC05MTgzYTUyZA==, ActorId: [1:7592675903191787323:2324], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:40:17.938132Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:40:17.938155Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675903191787318:2319], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:40:17.938210Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:40:17.938216Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675903191787318:2319], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:40:17.938253Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675903191787318:2319], Successfully finished 2026-01-07T17:40:17.938321Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:40:17.938360Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:40:17.943596Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675903191787349:2499], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:40:17.947092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:17.948224Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675903191787349:2499], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2026-01-07T17:40:17.948402Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675903191787349:2499], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:40:17.958173Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675903191787349:2499], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:40:18.037659Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675903191787349:2499], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:40:18.075289Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675907486754697:2532] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:18.075422Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675903191787349:2499], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:40:18.078214Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NTg1YmM2YS0yNzU5Yzc0Mi00NWVjODMxZS1kMjYzYWRjNw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NTg1YmM2YS0yNzU5Yzc0Mi00NWVjODMxZS1kMjYzYWRjNw== temp_dir_name# 37ba34d6-4db5-6fa1-6fe8-00a36e632172 trace_id# 2026-01-07T17:40:18.078332Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NTg1YmM2YS0yNzU5Yzc0Mi00NWVjODMxZS1kMjYzYWRjNw==, ActorId: [1:7592675907486754704:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:40:18.078542Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:40:18.078561Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-07T17:40:18.078805Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NTg1YmM2YS0yNzU5Yzc0Mi00NWVjODMxZS1kMjYzYWRjNw==, ActorId: [1:7592675907486754704:2326], ActorState: ReadyState, LegacyTraceId: 01kecrq30ye1p44ps20eps96gn, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7592675907486754703:2537] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-07T17:40:18.078869Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7592675907486754704:2326], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTg1YmM2YS0yNzU5Yzc0Mi00NWVjODMxZS1kMjYzYWRjNw== 2026-01-07T17:40:18.078971Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675907486754706:2327], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:40:18.079063Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7592675907486754707:2328], Database: /Root, Start database fetching 2026-01-07T17:40:18.079218Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:40:18.079275Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7592675907486754707:2328], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-07T17:40:18.079341Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-07T17:40:18.079436Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7592675907486754714:2329], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTg1YmM2YS0yNzU5Yzc0Mi00NWVjODMxZS1kMjYzYWRjNw==, Start pool fetching 2026-01-07T17:40:18.079474Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675907486754716:2330], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:40:18.080178Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675907486754706:2327], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:40:18.080180Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675907486754716:2330], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:40:18.080236Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:107: [WorkloadService] [TPoolResolverActor] ActorId: [1:7592675907486754714:2329], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTg1YmM2YS0yNzU5Yzc0Mi00NWVjODMxZS1kMjYzYWRjNw==, Pool info successfully resolved 2026-01-07T17:40:18.080242Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-07T17:40:18.080269Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler ... ce_id# 2026-01-07T17:41:27.674195Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=8&id=YzVjZTM3YzAtZjgxZTkzMDEtZTUzMjRiYmMtNmY0NDM0ZTc=, ActorId: [8:7592676203901363899:2684], ActorState: ExecuteState, LegacyTraceId: 01kecrs6v9cs07tex1r67qnd1e, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:41:27.674474Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=YzVjZTM3YzAtZjgxZTkzMDEtZTUzMjRiYmMtNmY0NDM0ZTc=, ActorId: [8:7592676203901363899:2684], ActorState: ExecuteState, LegacyTraceId: 01kecrs6v9cs07tex1r67qnd1e, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:27.674515Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=YzVjZTM3YzAtZjgxZTkzMDEtZTUzMjRiYmMtNmY0NDM0ZTc=, ActorId: [8:7592676203901363899:2684], ActorState: ExecuteState, LegacyTraceId: 01kecrs6v9cs07tex1r67qnd1e, EndCleanup is_final# true trace_id# 2026-01-07T17:41:27.674581Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=YzVjZTM3YzAtZjgxZTkzMDEtZTUzMjRiYmMtNmY0NDM0ZTc=, ActorId: [8:7592676203901363899:2684], ActorState: ExecuteState, LegacyTraceId: 01kecrs6v9cs07tex1r67qnd1e, Sent query response back to proxy proxy_request_id# 61 proxy_id# [8:7592676130886917811:2265] trace_id# 2026-01-07T17:41:27.674613Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=YzVjZTM3YzAtZjgxZTkzMDEtZTUzMjRiYmMtNmY0NDM0ZTc=, ActorId: [8:7592676203901363899:2684], ActorState: unknown state, LegacyTraceId: 01kecrs6v9cs07tex1r67qnd1e, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:27.675338Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=YzVjZTM3YzAtZjgxZTkzMDEtZTUzMjRiYmMtNmY0NDM0ZTc=, ActorId: [8:7592676203901363899:2684], ActorState: unknown state, LegacyTraceId: 01kecrs6v9cs07tex1r67qnd1e, Session actor destroyed trace_id# 2026-01-07T17:41:27.706302Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=MjcxZTllZjEtZTFhMjM1ODgtMjZkNzUzOGMtYTBlYTM1ZTA=, ActorId: [8:7592676156656722220:2327], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:27.706376Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=MjcxZTllZjEtZTFhMjM1ODgtMjZkNzUzOGMtYTBlYTM1ZTA=, ActorId: [8:7592676156656722220:2327], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:27.706416Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=MjcxZTllZjEtZTFhMjM1ODgtMjZkNzUzOGMtYTBlYTM1ZTA=, ActorId: [8:7592676156656722220:2327], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:27.706457Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=MjcxZTllZjEtZTFhMjM1ODgtMjZkNzUzOGMtYTBlYTM1ZTA=, ActorId: [8:7592676156656722220:2327], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:27.706556Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=MjcxZTllZjEtZTFhMjM1ODgtMjZkNzUzOGMtYTBlYTM1ZTA=, ActorId: [8:7592676156656722220:2327], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:41:27.713366Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E= temp_dir_name# 2234f4f4-41d1-2d75-85e6-73a1ba87bc5b trace_id# 2026-01-07T17:41:27.714090Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:27.714421Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ReadyState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Received request proxy_request_id# 65 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# /*UI-QUERY-EXCLUDE*/ SELECT * FROM `//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpc_actor# [8:7592676203901363929:2697] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-07T17:41:27.714458Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ReadyState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Request placed into pool from cache pool_id# default trace_id# 2026-01-07T17:41:27.714570Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Sending CompileQuery request trace_id# 2026-01-07T17:41:27.937429Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:967} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Acquire mvcc snapshot trace_id# 2026-01-07T17:41:27.958166Z node 8 :KQP_SESSION TRACE: {KQPSA@kqp_session_actor.cpp:1008} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Read snapshot result status# UNAVAILABLE step# 1767807687000 tx_id# 18446744073709551615 trace_id# 2026-01-07T17:41:27.958260Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, ExecutePhyTx literal# false commit# false deferred_effects_size# 0 tx# 136788324879512 trace_id# 2026-01-07T17:41:27.958323Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Sending to Executer span_id_size# 8 trace_id# 2026-01-07T17:41:27.958506Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Created new KQP executer executer_id# [8:7592676203901363943:2696] is_rollback# false trace_id# 2026-01-07T17:41:27.964556Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-07T17:41:27.964739Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, TxInfo status# Active kind# ReadOnly total_duration# 0 server_duration# 27.322 queries_count# 2 trace_id# 2026-01-07T17:41:27.964916Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:41:27.965358Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:27.965399Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, EndCleanup is_final# false trace_id# 2026-01-07T17:41:27.965476Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ExecuteState, LegacyTraceId: 01kecrs7121r01r1vjed3t26yj, Sent query response back to proxy proxy_request_id# 65 proxy_id# [8:7592676130886917811:2265] trace_id# 2026-01-07T17:41:27.973622Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:27.973734Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ReadyState, Sending to Executer span_id_size# 8 trace_id# 2026-01-07T17:41:27.973851Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ReadyState, Created new KQP executer executer_id# [8:7592676203901363954:2696] is_rollback# true trace_id# 2026-01-07T17:41:27.973922Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 1 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:27.974747Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: CleanupState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:27.974795Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:27.974980Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=MWU4NWZmZTQtMWFkZjU3MjYtYTM2Y2YwNDUtNzMyNjFjN2E=, ActorId: [8:7592676203901363928:2696], ActorState: unknown state, Session actor destroyed trace_id# |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |88.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-07T17:41:14.588613Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676149030956227:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:14.588683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:15.241086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:15.350907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:15.350993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:15.389340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:15.457244Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:15.497109Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:15.586437Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:15.728944Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676149030956256:2119] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:15.729003Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676153325924054:2447] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:15.729103Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676149030956262:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:15.729392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676153325923810:2288][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676149030956262:2121], cookie# 1 2026-01-07T17:41:15.732352Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676153325923862:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676153325923859:2288], cookie# 1 2026-01-07T17:41:15.732396Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676153325923863:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676153325923860:2288], cookie# 1 2026-01-07T17:41:15.732461Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676153325923864:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676153325923861:2288], cookie# 1 2026-01-07T17:41:15.732506Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676149030955939:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676153325923862:2288], cookie# 1 2026-01-07T17:41:15.732534Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676149030955942:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676153325923863:2288], cookie# 1 2026-01-07T17:41:15.732563Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676149030955945:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676153325923864:2288], cookie# 1 2026-01-07T17:41:15.732604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676153325923862:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676149030955939:2050], cookie# 1 2026-01-07T17:41:15.732677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676153325923863:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676149030955942:2053], cookie# 1 2026-01-07T17:41:15.732706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676153325923864:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676149030955945:2056], cookie# 1 2026-01-07T17:41:15.732743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676153325923810:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676153325923859:2288], cookie# 1 2026-01-07T17:41:15.732763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676153325923810:2288][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:15.732798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676153325923810:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676153325923860:2288], cookie# 1 2026-01-07T17:41:15.732824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676153325923810:2288][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:15.732860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676153325923810:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676153325923861:2288], cookie# 1 2026-01-07T17:41:15.732876Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676153325923810:2288][/dc-1] Sync cookie mismatch: sender# [1:7592676153325923861:2288], cookie# 1, current cookie# 0 2026-01-07T17:41:15.732953Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676149030956262:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:15.739188Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676149030956262:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676153325923810:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:15.739302Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676149030956262:2121], cacheItem# { Subscriber: { Subscriber: [1:7592676153325923810:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:15.743740Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676153325924055:2448], recipient# [1:7592676153325924054:2447], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:15.743806Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676153325924054:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:15.785464Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676153325924054:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:15.790164Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676153325924054:2447] Handle TEvDescribeSchemeResult Forward to# [1:7592676153325924053:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:15.796694Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676149030956256:2119] Handle TEvProposeTransaction 2026-01-07T17:41:15.796731Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592676149030956256:2119] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:41:15.796801Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592676149030956256:2119] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592676153325924060:2452] 2026-01-07T17:41:15.971746Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592676153325924060:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-numbe ... h: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:29.186735Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:7592676210635653763:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:41:29.187130Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676210635653764:2815], recipient# [3:7592676210635653744:2311], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:29.187610Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7592676189160815936:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7592676210635653768:2814] 2026-01-07T17:41:29.187622Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7592676189160815936:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-07T17:41:29.187662Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7592676189160815936:2050] Subscribe: subscriber# [3:7592676210635653768:2814], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:41:29.187691Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7592676189160815939:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7592676210635653769:2814] 2026-01-07T17:41:29.187699Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7592676189160815939:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-07T17:41:29.187722Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7592676189160815939:2053] Subscribe: subscriber# [3:7592676210635653769:2814], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:41:29.187840Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7592676210635653768:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7592676189160815936:2050] 2026-01-07T17:41:29.187876Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7592676210635653769:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7592676189160815939:2053] 2026-01-07T17:41:29.187908Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592676210635653763:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7592676210635653765:2814] 2026-01-07T17:41:29.187941Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592676210635653763:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7592676210635653766:2814] 2026-01-07T17:41:29.187968Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7592676210635653763:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7592676189160816271:2132], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:41:29.187989Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7592676189160815942:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7592676210635653770:2814] 2026-01-07T17:41:29.187997Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7592676189160815942:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-07T17:41:29.188018Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7592676189160815942:2056] Subscribe: subscriber# [3:7592676210635653770:2814], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:41:29.188043Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592676189160815936:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592676210635653768:2814] 2026-01-07T17:41:29.188057Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592676189160815939:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592676210635653769:2814] 2026-01-07T17:41:29.188088Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7592676189160816271:2132], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-07T17:41:29.188139Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7592676189160816271:2132], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7592676210635653763:2814] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:29.188198Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676189160816271:2132], cacheItem# { Subscriber: { Subscriber: [3:7592676210635653763:2814] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:29.188246Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676210635653771:2816], recipient# [3:7592676210635653761:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:29.188365Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7592676210635653770:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7592676189160815942:2056] 2026-01-07T17:41:29.188414Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592676210635653763:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7592676210635653767:2814] 2026-01-07T17:41:29.188445Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592676210635653763:2814][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7592676189160816271:2132], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:41:29.188468Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592676189160815942:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592676210635653770:2814] 2026-01-07T17:41:29.286414Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592676189160816046:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:29.286481Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:29.325591Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676189160816271:2132], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:29.325750Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676189160816271:2132], cacheItem# { Subscriber: { Subscriber: [3:7592676193455784313:2658] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:29.325847Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676210635653784:2819], recipient# [3:7592676210635653783:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-07T17:41:15.901336Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676150475279776:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:15.904313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:16.200330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:16.247940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:16.248033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:16.267894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:16.394316Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:16.403070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:16.746776Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676150475279803:2119] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:16.746830Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676154770247594:2446] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:16.746976Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676150475279809:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:16.747082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676154770247353:2285][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676150475279809:2121], cookie# 1 2026-01-07T17:41:16.748886Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676154770247407:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154770247404:2285], cookie# 1 2026-01-07T17:41:16.748943Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676154770247408:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154770247405:2285], cookie# 1 2026-01-07T17:41:16.749008Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676150475279485:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154770247407:2285], cookie# 1 2026-01-07T17:41:16.749915Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676150475279488:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154770247408:2285], cookie# 1 2026-01-07T17:41:16.753223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676154770247409:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154770247406:2285], cookie# 1 2026-01-07T17:41:16.753322Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676154770247407:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676150475279485:2050], cookie# 1 2026-01-07T17:41:16.753828Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676150475279491:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676154770247409:2285], cookie# 1 2026-01-07T17:41:16.754317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676154770247408:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676150475279488:2053], cookie# 1 2026-01-07T17:41:16.754360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676154770247409:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676150475279491:2056], cookie# 1 2026-01-07T17:41:16.754402Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676154770247353:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676154770247404:2285], cookie# 1 2026-01-07T17:41:16.754448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676154770247353:2285][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:16.754476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676154770247353:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676154770247405:2285], cookie# 1 2026-01-07T17:41:16.754568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676154770247353:2285][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:16.754603Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676154770247353:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676154770247406:2285], cookie# 1 2026-01-07T17:41:16.754617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676154770247353:2285][/dc-1] Sync cookie mismatch: sender# [1:7592676154770247406:2285], cookie# 1, current cookie# 0 2026-01-07T17:41:16.754686Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676150475279809:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:16.769925Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676150475279809:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676154770247353:2285] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:16.770076Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676150475279809:2121], cacheItem# { Subscriber: { Subscriber: [1:7592676154770247353:2285] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:16.776146Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676154770247595:2447], recipient# [1:7592676154770247594:2446], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:16.777252Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676154770247594:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:16.835397Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676154770247594:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:16.839706Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676154770247594:2446] Handle TEvDescribeSchemeResult Forward to# [1:7592676154770247593:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:16.841758Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676150475279803:2119] Handle TEvProposeTransaction 2026-01-07T17:41:16.841783Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592676150475279803:2119] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:41:16.841841Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592676150475279803:2119] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592676154770247601:2452] 2026-01-07T17:41:16.884077Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:16.909053Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676150475279809:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Oper ... quests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7592676201635001050:2913] 2026-01-07T17:41:28.002617Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592676201635001047:2913][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7592676188750098020:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:41:28.006055Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676188750098020:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:28.006210Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676188750098020:2121], cacheItem# { Subscriber: { Subscriber: [3:7592676188750098903:2755] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:28.006308Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676205929968369:2918], recipient# [3:7592676205929968368:2318], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:28.992804Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592676184455130504:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:28.992898Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:29.006549Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676188750098020:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:29.006715Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676188750098020:2121], cacheItem# { Subscriber: { Subscriber: [3:7592676201635001063:2915] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:29.006808Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676210224935688:2924], recipient# [3:7592676210224935687:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:29.007360Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676188750098020:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:29.007457Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676188750098020:2121], cacheItem# { Subscriber: { Subscriber: [3:7592676188750098903:2755] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:29.007538Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676210224935690:2925], recipient# [3:7592676210224935689:2320], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:30.002534Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676188750098020:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:30.002712Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676188750098020:2121], cacheItem# { Subscriber: { Subscriber: [3:7592676188750098903:2755] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:30.002821Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676210224935712:2931], recipient# [3:7592676210224935711:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:30.009957Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676188750098020:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:30.010120Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676188750098020:2121], cacheItem# { Subscriber: { Subscriber: [3:7592676201635001063:2915] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:30.010209Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676214519903010:2932], recipient# [3:7592676214519903009:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:30.034462Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676188750098020:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:30.034648Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676188750098020:2121], cacheItem# { Subscriber: { Subscriber: [3:7592676188750098903:2755] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:30.034762Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676214519903012:2933], recipient# [3:7592676214519903011:2323], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:15.179042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:15.307633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:15.319186Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:15.319629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:15.319809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:15.924109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:16.101630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:16.101776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:16.143644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:16.321457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:17.431572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:17.433009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:17.433059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:17.433111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:17.433321Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:17.510531Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:18.578220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:23.262397Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:23.270825Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:41:23.275034Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:23.403756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:23.403921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:23.447408Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:23.449805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:23.747409Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:23.747519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:23.749429Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.750163Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.750779Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.751862Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.751984Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.752334Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.752468Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.752605Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.752833Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.780510Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:24.104456Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:24.161031Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:41:24.161162Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:41:24.247142Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:41:24.248596Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:41:24.248800Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:41:24.248870Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:41:24.249244Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:41:24.249334Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:41:24.249390Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:41:24.249464Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:41:24.250198Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:41:24.252623Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:41:24.262313Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:41:24.291798Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:41:24.291882Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:41:24.292015Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:41:24.300756Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:24.300866Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:24.331813Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:41:24.332295Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:41:24.335151Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:24.379471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:24.402953Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:41:24.403118Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:41:24.422043Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:41:24.815941Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:41:24.940256Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:41:25.445898Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:41:25.579416Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:41:25.579507Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:41:26.369303Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.889740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:27.644407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3035:3418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.644565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.644966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3039:3421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.645067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.646269Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3042:3424]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:27.646480Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:41:27.646568Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3044:3426] 2026-01-07T17:41:27.646659Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3044:3426] 2026-01-07T17:41:27.647250Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3045:3180] 2026-01-07T17:41:27.647580Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3044:3426], server id = [2:3045:3180], tablet id = 72075186224037894, status = OK 2026-01-07T17:41:27.647766Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3045:3180], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:41:27.647853Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:41:27.648055Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:41:27.648161Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3042:3424], StatRequests.size() = 1 2026-01-07T17:41:27.669073Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:41:27.669775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3049:3430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.670197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.670818Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3053:3434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.670904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.671129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3056:3437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:27.679462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:27.804943Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:41:27.805025Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:41:27.873375Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3044:3426], schemeshard count = 1 2026-01-07T17:41:28.291689Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3058:3439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:41:28.524868Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3163:3508] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:28.559183Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3186:3524]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:28.559482Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:41:28.559544Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3186:3524], StatRequests.size() = 1 2026-01-07T17:41:28.840684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:29.490827Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3530:3589]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:29.491079Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:41:29.491129Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:3530:3589], StatRequests.size() = 1 2026-01-07T17:41:29.523489Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3539:3598]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:29.523702Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:41:29.523739Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:3539:3598], StatRequests.size() = 1 2026-01-07T17:41:29.723035Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3588:3438]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:41:29.730770Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:41:29.730851Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:41:29.731226Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:41:29.731283Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:41:29.731332Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:41:29.754967Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:41:29.764743Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-07T17:41:29.765123Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-07T17:41:29.765352Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-07T17:41:29.765498Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-07T17:41:29.765814Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3616:3454]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:41:29.768736Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:41:29.768795Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:41:29.769278Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:41:29.769338Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:41:29.769401Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:41:29.769639Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:41:29.777229Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-07T17:41:29.777826Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-07T17:41:29.778753Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-07T17:41:29.779268Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2026-01-07T17:41:28.161487Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676207715089699:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:28.161569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:28.753020Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:28.770607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:28.770687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:28.913596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:29.045079Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676207715089463:2081] 1767807688068643 != 1767807688068646 2026-01-07T17:41:29.094008Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:29.113018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:29.129922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001644/r3tmp/yandexRKQOeS.tmp 2026-01-07T17:41:29.129948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001644/r3tmp/yandexRKQOeS.tmp 2026-01-07T17:41:29.130178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001644/r3tmp/yandexRKQOeS.tmp 2026-01-07T17:41:29.130276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:29.154434Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:50.368853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:50.368961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:50.369004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:50.369038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:50.369083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:50.369113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:50.369170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:50.369229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:50.370015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:50.370277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:50.488331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:50.488413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:50.489377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:50.504532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:50.505299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:50.505517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:50.515970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:50.516255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:50.517037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:50.517353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:50.521414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:50.521597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:50.522640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:50.522699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:50.522849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:50.522936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:50.522994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:50.523121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.529781Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:50.671490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:50.671753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.671983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:50.672043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:50.672371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:50.672470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:50.674596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:50.674767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:50.674953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.675026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:50.675066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:50.675102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:50.676802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.676855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:50.676896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:50.678447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.678493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:50.678549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:50.678602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:50.682273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:50.683865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:50.684053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:50.685098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:50.685228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:50.685283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:50.685540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:50.685591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:50.685760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:50.685861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:50.687635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... ablet: 72075186233409584 cookie: 72057594046678944:39 msg type: 275382275 2026-01-07T17:41:22.214884Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:41:22.214991Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:41:22.215032Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:41:22.215075Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-07T17:41:22.215120Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 3 2026-01-07T17:41:22.221766Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:41:22.221945Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2026-01-07T17:41:22.222000Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2026-01-07T17:41:22.222051Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 4 2026-01-07T17:41:22.222104Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 4 2026-01-07T17:41:22.222228Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 190, ready parts: 2/3, is published: true 2026-01-07T17:41:22.225823Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:41:22.225999Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:41:22.228486Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:41:22.228677Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-07T17:41:22.254995Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 2026-01-07T17:41:22.255069Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 190, tablet: 72075186233409584, partId: 2 2026-01-07T17:41:22.255195Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 190:2, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 FAKE_COORDINATOR: Erasing txId 190 2026-01-07T17:41:22.259517Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 190:2, at schemeshard: 72057594046678944 2026-01-07T17:41:22.259782Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 190:2, at schemeshard: 72057594046678944 2026-01-07T17:41:22.259850Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 190:2 ProgressState 2026-01-07T17:41:22.259958Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2026-01-07T17:41:22.259995Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-07T17:41:22.260039Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2026-01-07T17:41:22.260072Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-07T17:41:22.260114Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 190, ready parts: 3/3, is published: true 2026-01-07T17:41:22.260201Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [31:308:2297] message: TxId: 190 2026-01-07T17:41:22.260255Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-07T17:41:22.260301Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:0 2026-01-07T17:41:22.260334Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:0 2026-01-07T17:41:22.260430Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-07T17:41:22.260474Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:1 2026-01-07T17:41:22.260499Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:1 2026-01-07T17:41:22.260539Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-07T17:41:22.260570Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:2 2026-01-07T17:41:22.260595Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:2 2026-01-07T17:41:22.260684Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2026-01-07T17:41:22.264738Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-07T17:41:22.264810Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [31:5660:7165] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-07T17:41:22.266711Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-07T17:41:22.266770Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-07T17:41:22.266860Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-07T17:41:22.266888Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-07T17:41:22.266951Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-07T17:41:22.266980Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-07T17:41:22.267040Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-07T17:41:22.267293Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-07T17:41:22.267397Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-07T17:41:22.267426Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-07T17:41:22.269840Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-07T17:41:22.270231Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-07T17:41:22.270348Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-07T17:41:22.270389Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [31:5703:7208] 2026-01-07T17:41:22.270698Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-07T17:41:22.270769Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-07T17:41:22.270799Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [31:5703:7208] 2026-01-07T17:41:22.271030Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-07T17:41:22.271176Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-07T17:41:22.271208Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [31:5703:7208] 2026-01-07T17:41:22.271304Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-07T17:41:22.271467Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-07T17:41:22.271501Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [31:5703:7208] 2026-01-07T17:41:22.271641Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-07T17:41:22.271672Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [31:5703:7208] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] >> TVPatchTests::PatchPartGetError >> Donor::MultipleEvicts [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TVPatchTests::PatchPartGetError [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout >> TVPatchTests::FindingPartsWithTimeout [GOOD] |88.7%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TVPatchTests::PatchPartOk >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 14337210447941700231 0 donors: 2026-01-07T17:41:32.522320Z 4 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:32.524648Z 4 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:32.549572Z 4 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2026-01-07T17:41:32.649495Z 2 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:32.651802Z 2 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:32.669886Z 2 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2026-01-07T17:41:32.747002Z 4 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:32.749332Z 4 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:32.763184Z 4 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2026-01-07T17:41:32.842606Z 2 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:32.844907Z 2 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:32.858877Z 2 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2026-01-07T17:41:32.938521Z 4 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:32.940829Z 4 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:32.954695Z 4 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2026-01-07T17:41:33.034316Z 2 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:33.036542Z 2 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:33.049713Z 2 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2026-01-07T17:41:33.130212Z 4 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:33.132529Z 4 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:33.145696Z 4 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2026-01-07T17:41:33.231188Z 2 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:33.233540Z 2 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:33.246469Z 2 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2026-01-07T17:41:33.329164Z 4 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:33.331500Z 4 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 659593233948177406] 2026-01-07T17:41:33.344918Z 4 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::PatchPartOk [GOOD] |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |88.7%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TVPatchTests::FindingPartsWhenError [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2026-01-07T17:41:33.696992Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:33.698221Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-07T17:41:33.698300Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-07T17:41:33.698524Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-07T17:41:33.698598Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:33.698823Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-07T17:41:33.698899Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2026-01-07T17:41:33.835575Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:33.837720Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-07T17:41:33.837801Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-07T17:41:33.838000Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2026-01-07T17:41:33.838068Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-07T17:41:33.838140Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2026-01-07T17:41:34.135464Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2026-01-07T17:41:34.145966Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:735} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2026-01-07T17:41:34.146044Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-07T17:41:34.146144Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenPartsAreDontExist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2026-01-07T17:41:34.420778Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:34.422792Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-07T17:41:34.422881Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-07T17:41:34.423092Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-07T17:41:34.423184Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:34.423413Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2026-01-07T17:41:34.423478Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2026-01-07T17:41:34.423567Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2026-01-07T17:41:34.423747Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2026-01-07T17:41:34.423802Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-07T17:41:34.423865Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-07T17:41:16.618619Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676157710808434:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:16.618669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:17.621625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:17.633151Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:17.657952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:18.086448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:18.091449Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:18.109579Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676157710808400:2082] 1767807676615727 != 1767807676615730 2026-01-07T17:41:18.114162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:18.114253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:18.138499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:18.479234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:18.751471Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676157710808676:2118] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:18.751527Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676166300743783:2449] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:18.751634Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676157710808682:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:18.751710Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676162005776247:2296][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676157710808682:2120], cookie# 1 2026-01-07T17:41:18.753308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676162005776301:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676162005776298:2296], cookie# 1 2026-01-07T17:41:18.753363Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676162005776302:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676162005776299:2296], cookie# 1 2026-01-07T17:41:18.753387Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676162005776303:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676162005776300:2296], cookie# 1 2026-01-07T17:41:18.753429Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676157710808368:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676162005776301:2296], cookie# 1 2026-01-07T17:41:18.753456Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676157710808371:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676162005776302:2296], cookie# 1 2026-01-07T17:41:18.753472Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676157710808374:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676162005776303:2296], cookie# 1 2026-01-07T17:41:18.753508Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676162005776301:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676157710808368:2050], cookie# 1 2026-01-07T17:41:18.753525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676162005776302:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676157710808371:2053], cookie# 1 2026-01-07T17:41:18.753600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676162005776303:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676157710808374:2056], cookie# 1 2026-01-07T17:41:18.753643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676162005776247:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676162005776298:2296], cookie# 1 2026-01-07T17:41:18.753666Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676162005776247:2296][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:18.753681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676162005776247:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676162005776299:2296], cookie# 1 2026-01-07T17:41:18.753701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676162005776247:2296][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:18.753728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676162005776247:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676162005776300:2296], cookie# 1 2026-01-07T17:41:18.753749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676162005776247:2296][/dc-1] Sync cookie mismatch: sender# [1:7592676162005776300:2296], cookie# 1, current cookie# 0 2026-01-07T17:41:18.753791Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676157710808682:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:18.774888Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676157710808682:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676162005776247:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:18.775020Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676157710808682:2120], cacheItem# { Subscriber: { Subscriber: [1:7592676162005776247:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:18.780818Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676166300743785:2451], recipient# [1:7592676166300743783:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:18.780902Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676166300743783:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:18.895988Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676166300743783:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:18.899156Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676166300743783:2449] Handle TEvDescribeSchemeResult Forward to# [1:7592676166300743782:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:18.910188Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676157710808676:2118] Handle TEvProposeTransaction 2026-01-07T17:41:18.910211Z node ... imrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592676220355594481:3019] 2026-01-07T17:41:31.066653Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592676198880756423:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592676220355594476:3018] 2026-01-07T17:41:31.066667Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7592676198880756423:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7592676220355594482:3019] 2026-01-07T17:41:31.066702Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676198880756739:2120], cacheItem# { Subscriber: { Subscriber: [3:7592676220355594464:3018] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:31.066763Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7592676198880756739:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-07T17:41:31.066815Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7592676198880756739:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7592676220355594465:3019] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:31.066872Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676198880756739:2120], cacheItem# { Subscriber: { Subscriber: [3:7592676220355594465:3019] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:31.066969Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676220355594484:3020], recipient# [3:7592676220355594458:2315], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:31.067063Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676220355594485:3021], recipient# [3:7592676220355594462:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:31.429044Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592676198880756705:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:31.429140Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:31.462586Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676198880756739:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:31.462753Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676198880756739:2120], cacheItem# { Subscriber: { Subscriber: [3:7592676203175724818:2675] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:31.462861Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676220355594496:3022], recipient# [3:7592676220355594495:2320], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:32.067778Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676198880756739:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:32.067942Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676198880756739:2120], cacheItem# { Subscriber: { Subscriber: [3:7592676220355594465:3019] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:32.068039Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676224650561807:3030], recipient# [3:7592676224650561806:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:32.429660Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676198880756739:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:32.429793Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676198880756739:2120], cacheItem# { Subscriber: { Subscriber: [3:7592676203175724818:2675] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:32.429877Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676224650561818:3031], recipient# [3:7592676224650561817:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:32.464470Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676198880756739:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:32.464601Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676198880756739:2120], cacheItem# { Subscriber: { Subscriber: [3:7592676203175724818:2675] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:32.464685Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676224650561820:3032], recipient# [3:7592676224650561819:2323], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2026-01-07T17:41:34.554528Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:34.555644Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-07T17:41:34.555709Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-07T17:41:34.555943Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-07T17:41:34.556146Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2026-01-07T17:41:34.556222Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2026-01-07T17:41:34.506097Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:34.507945Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2026-01-07T17:41:34.508042Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-07T17:41:34.508135Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2026-01-07T17:41:35.259301Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:35.260539Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-07T17:41:35.260613Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-07T17:41:35.260870Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2026-01-07T17:41:35.261009Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-07T17:41:35.261204Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> IncrementalRestoreScan::Empty >> IncrementalRestoreScan::ChangeSenderEmpty >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] >> IncrementalRestoreScan::ChangeSenderSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2026-01-07T17:41:35.306377Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:35.307813Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-07T17:41:35.307874Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-07T17:41:35.307943Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2026-01-07T17:41:35.580906Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:35.581340Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-07T17:41:35.581424Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-07T17:41:35.581636Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2026-01-07T17:41:35.581733Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-07T17:41:35.581841Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> TConfigsDispatcherTests::TestYamlEndToEnd >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> TConfigsDispatcherTests::TestSubscriptionNotification >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlConfigAndIcb >> TestShred::ShredManualLaunch >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] Test command err: Trying to start YDB, gRPC: 25898, MsgBus: 19168 2026-01-07T17:40:49.631736Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676040169751448:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:49.631836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:49.878382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:49.925941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:49.926056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:50.009561Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:50.011349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:50.014364Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676040169751420:2081] 1767807649628986 != 1767807649628989 2026-01-07T17:40:50.048448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:50.145671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:50.145695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:50.145705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:50.145815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:50.615424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:50.630275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:40:50.646581Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:50.703001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:50.847612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:51.050781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:51.139403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.236910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676057349622467:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.237031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.239704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676057349622477:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.239811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.538239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.569092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.596596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.636670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.672367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.706480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.737360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.785623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.850591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676057349623351:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.850663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.850796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676057349623356:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.851051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676057349623358:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.851095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.854512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:53.864523Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676057349623359:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:40:53.939500Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676057349623413:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:54.664967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676040169751448:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:54.665091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:40:55.351898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... N: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:26.512487Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:26.512591Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:26.529995Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:26.625782Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:26.792683Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:26.792707Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:26.792717Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:26.792829Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:27.229145Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:27.647341Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:27.665110Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:41:27.682746Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:27.799567Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:28.041522Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:28.168359Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:31.217083Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676198894169912:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:31.217177Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:32.461657Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676224663975494:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:32.461757Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:32.462405Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676224663975504:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:32.462469Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:32.566949Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:32.641688Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:32.679880Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:32.722659Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:32.766053Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:32.813721Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:32.869506Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:32.933021Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:33.022061Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676228958943681:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:33.022146Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:33.022256Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676228958943686:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:33.022340Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676228958943688:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:33.022396Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:33.025865Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:33.035449Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592676228958943690:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:41:33.137194Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676228958943744:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:35.227895Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-07T17:41:35.301145Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-07T17:41:35.314335Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization [GOOD] >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system >> TestShred::ShredWithMerge >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2026-01-07T17:41:28.017204Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676207636594812:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:28.017318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:28.269117Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.007719s 2026-01-07T17:41:28.349534Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639240 Duration# 0.011397s 2026-01-07T17:41:29.025680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:29.025782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:29.193130Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:29.287060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:30.053031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:30.330122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:30.657094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:30.657278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:30.858816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:30.901240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:31.217112Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.011013s 2026-01-07T17:41:31.221309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:31.447010Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.165103s 2026-01-07T17:41:31.452513Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.170594s 2026-01-07T17:41:32.073304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:32.073333Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:32.073340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:32.073446Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:32.789031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:33.005574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676207636594812:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:33.005653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:33.021105Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7592676229111432454:2321], Recipient [1:7592676211931562637:2207]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:49770" } 2026-01-07T17:41:33.021152Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-07T17:41:33.021188Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:41:33.021216Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:41:33.021377Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:49770" 2026-01-07T17:41:33.027406Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767807693026955) 2026-01-07T17:41:33.068249Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767807693026955 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-07T17:41:33.068555Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-07T17:41:33.073646Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-07T17:41:33.074542Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767807693026955&action=1" } } } 2026-01-07T17:41:33.074767Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:41:33.074853Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-07T17:41:33.085056Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-07T17:41:33.085399Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285139, Sender [1:7592676229111432454:2321], Recipient [1:7592676211931562637:2207]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767807693026955&action=1" } UserToken: "" PeerName: "ipv6:[::1]:49770" } 2026-01-07T17:41:33.085430Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:968: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2026-01-07T17:41:33.108123Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3434: Add subscription to /Root/users/user-1 for [1:7592676229111432454:2321] 2026-01-07T17:41:33.117529Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3442: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767807693026955&action=1" } } 2026-01-07T17:41:33.117734Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-07T17:41:33.126129Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-07T17:41:33.130676Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-07T17:41:33.130746Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:41:33.130824Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7592676229111432459:2207], Recipient [1:7592676211931562637:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:41:33.130839Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-07T17:41:33.130849Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:41:33.130855Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:41:33.130905Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-07T17:41:33.130940Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-07T17:41:33.139015Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-07T17:41:33.142611Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:41:33.142655Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:41:33.142664Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:41:33.142678Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:41:33.142740Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-07T17:41:33.142761Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767807693026955 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:41:33.145042Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-07T17:41:33.179101Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:41:33.179232Z nod ... ply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:4 2026-01-07T17:41:34.437414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2026-01-07T17:41:34.437466Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found - using supplied 72075186224037891 2026-01-07T17:41:34.437501Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found - using supplied 72075186224037892 2026-01-07T17:41:34.437529Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found - using supplied 72075186224037895 2026-01-07T17:41:34.437594Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found - using supplied 72075186224037889 2026-01-07T17:41:34.437625Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found - using supplied 72075186224037896 2026-01-07T17:41:34.438820Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2026-01-07T17:41:34.438835Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-07T17:41:34.438876Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:41:34.439002Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7592676233406400558:2207], Recipient [1:7592676211931562637:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:41:34.439020Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:41:34.439036Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:41:34.439044Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:41:34.439076Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-07T17:41:34.439097Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1767807694348436 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:41:34.439157Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767807694348436 issue= 2026-01-07T17:41:34.441339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-01-07T17:41:34.441415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-01-07T17:41:34.441464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-01-07T17:41:34.441525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-07T17:41:34.441567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-07T17:41:34.447005Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-07T17:41:34.448720Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-07T17:41:34.448811Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-07T17:41:34.448823Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:41:34.449186Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7592676211931562521:2213], Recipient [1:7592676211931562637:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:41:34.449202Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:41:34.449220Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:41:34.449229Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:41:34.449258Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-07T17:41:34.449278Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1767807694348436 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:41:34.452804Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-07T17:41:34.452862Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:41:34.452891Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-07T17:41:34.453018Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-07T17:41:34.454018Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-01-07T17:41:34.454105Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-01-07T17:41:34.457479Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-01-07T17:41:34.457575Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7592676233406400648:2207], Recipient [1:7592676211931562637:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-07T17:41:34.457604Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-07T17:41:34.457619Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:41:34.457629Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:41:34.457686Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-07T17:41:34.457715Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-07T17:41:34.501954Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:41:34.501991Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:41:34.502001Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:41:34.502016Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:41:34.502094Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1767807694348436 2026-01-07T17:41:34.502107Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767807694348436 issue= 2026-01-07T17:41:34.502117Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1767807694348436 issue= 2026-01-07T17:41:34.502135Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-07T17:41:34.502226Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1767807694348436 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:41:34.505814Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-07T17:41:34.506028Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2422: Send /Root/users/user-1 notification to [1:7592676233406400553:2428]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767807694348436&action=2" ready: true status: SUCCESS } } 2026-01-07T17:41:34.506111Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:41:34.516232Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7592676233406400672:2431], Recipient [1:7592676211931562637:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:49770" } 2026-01-07T17:41:34.516263Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-07T17:41:34.516393Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2026-01-07T17:41:34.518704Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7592676233406400675:2432], Recipient [1:7592676211931562637:2207]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:49770" } 2026-01-07T17:41:34.518732Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2026-01-07T17:41:34.518916Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2026-01-07T17:41:34.537989Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-07T17:41:34.538186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected |88.8%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] >> TestShred::ShredWithCopyTable >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:33.510247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:33.663774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:33.684608Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:33.685412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:33.685484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:34.029185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:34.029393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:34.159940Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807690468210 != 1767807690468214 2026-01-07T17:41:34.172186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:34.222437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:34.359671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:34.686973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:34.701518Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:34.822225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:37.049829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.049966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1138:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.050034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.051011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.051111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.055232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:37.079969Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:41:37.234463Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:41:37.322602Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:34.928057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:35.037180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:35.059053Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:35.059591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:35.059659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:35.364125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:35.364256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:35.447708Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807691696262 != 1767807691696266 2026-01-07T17:41:35.459531Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:35.505639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:35.607986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:35.911677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:35.925893Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:36.035539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:34.036762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:34.280099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:34.307219Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:34.307755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:34.307820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:34.606119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:34.606266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:34.686289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807691037112 != 1767807691037116 2026-01-07T17:41:34.703144Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:34.751465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:34.865337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:35.170941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:35.185156Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:35.298378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:37.388488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.388612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1138:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.388709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.390144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.390302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.395042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:37.422267Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:41:37.572754Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:41:37.657727Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (WaitTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:34.086765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:34.226650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:34.248755Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:34.249365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:34.249428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:34.592440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:34.592588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:34.669049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807691160624 != 1767807691160628 2026-01-07T17:41:34.682469Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:34.731120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:34.833204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:35.144968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:35.158734Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:35.269107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:37.369084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.369196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1138:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.369273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.370144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.370278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.374959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:37.401952Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:41:37.553762Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:41:37.639885Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 10827, MsgBus: 27516 2026-01-07T17:40:49.563258Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676040418326415:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:49.563662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:49.755218Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:49.775380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:49.775470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:49.829031Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676040418326309:2081] 1767807649559826 != 1767807649559829 2026-01-07T17:40:49.839857Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:49.851838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:49.971271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:49.983762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:49.983785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:49.983794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:49.983890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:50.491559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:50.497886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:40:50.556922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:50.565321Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:50.684058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:50.860412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:50.968615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:52.756577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676053303230064:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.756744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.757184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676053303230074:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:52.757253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.046228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.068241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.091599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.114822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.138258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.166738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.197240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.282325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:40:53.355337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676057598198236:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.355424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.355503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676057598198241:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.355758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676057598198243:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.355789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:53.360085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:53.370376Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676057598198244:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:40:53.476040Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676057598198296:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:54.563191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676040418326415:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:54.563277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:40:54.934161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... _operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:19.937660Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:20.077749Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:23.347075Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676165200689239:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:23.347157Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:41:25.274302Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676195265462087:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:25.274395Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:25.274854Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676195265462097:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:25.274897Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:25.440496Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:25.495280Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:25.567001Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:25.645259Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:25.706928Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:25.778700Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:25.843743Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:25.982479Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:26.121428Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676199560430276:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.121568Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.121988Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676199560430281:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.122045Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592676199560430282:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.122085Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:26.127997Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:26.152523Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592676199560430285:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:41:26.249287Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592676199560430338:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:29.901429Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:30.842863Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:41:31.426546Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:31.972218Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:32.618886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:41:33.142262Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:41:33.685099Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:41:33.685136Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:33.963425Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:34.036126Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-07T17:41:37.711623Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710724:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> IncrementalRestoreScan::Empty [GOOD] >> TestShred::SimpleTestForAllSupportedObjects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] Test command err: 2026-01-07T17:41:37.740073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:37.740146Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:37.977662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:39.173468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:39.173545Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:39.257381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] Test command err: 2026-01-07T17:41:37.982303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:37.982363Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:38.200684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> TestShred::SimpleTestForTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:34.706122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:34.809491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:34.823054Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:34.823385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:34.823426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:35.122956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:35.123087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:35.192391Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807691705182 != 1767807691705186 2026-01-07T17:41:35.203913Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:35.251383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:35.352653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:35.657733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:35.671561Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:35.777086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:37.921306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.921423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1138:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.921488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.922442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.922580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:37.927180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:37.953790Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:41:38.112428Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:41:38.196150Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest >> KqpErrors::ResolveTableError [GOOD] >> TestShred::SimpleTestForTopic >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink >> TestShred::ManualLaunch3Cycles >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:39.284359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:39.399588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:39.422048Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:39.422549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:39.422608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:39.694053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:39.694179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:39.770064Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807696310202 != 1767807696310206 2026-01-07T17:41:39.781509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:39.831158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:39.923418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:40.218421Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:182: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Exhausted 2026-01-07T17:41:40.218540Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:131: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2026-01-07T17:41:40.218585Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:195: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Finish Done |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> TestShred::ShredWithSplit >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse >> TConfigsDispatcherTests::TestRemoveSubscription >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] |88.8%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TestShred::Run3CyclesForTables >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TConfigsCacheTests::TestConfigurationSaveOnNotification |88.8%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |88.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse [GOOD] >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> TVPatchTests::PatchPartPutError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:36.712197Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:36.713328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:36.827696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:36.828737Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:37.357805Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:37.479276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:37.479384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:37.480073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:37.480186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:37.530804Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:37.531997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:37.532373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:37.612956Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:37.625557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:38.289142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:38.361314Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:38.361442Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:38.715394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:39.843978Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-07T17:41:39.844116Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-07T17:41:39.844403Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:836} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-07T17:41:39.844485Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:851} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-01-07T17:41:39.844742Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710658. Resolved key sets: 1 2026-01-07T17:41:39.845034Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-07T17:41:39.845467Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-07T17:41:39.845816Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1853:3211] 2026-01-07T17:41:39.845900Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1853:3211], channels: 0 2026-01-07T17:41:39.845989Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:41:39.846073Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2750} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-07T17:41:39.846126Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1853:3211] 2026-01-07T17:41:39.846177Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1853:3211], channels: 0 2026-01-07T17:41:39.846241Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1853:3211], trace_id# 2026-01-07T17:41:39.846321Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1853:3211], trace_id# 2026-01-07T17:41:39.846373Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-07T17:41:39.857839Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1853:3211] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-07T17:41:39.857999Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1853:3211], trace_id# 2026-01-07T17:41:39.858066Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1853:3211], trace_id# 2026-01-07T17:41:39.895542Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1853:3211] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1642 Tasks { TaskId: 1 CpuTimeUs: 990 FinishTimeMs: 1767807699895 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 87 BuildCpuTimeUs: 903 HostName: "ghrun-z2c6jslqle" NodeId: 1 CreateTimeMs: 1767807699846 UpdateTimeMs: 1767807699895 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:41:39.895607Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1853:3211] 2026-01-07T17:41:39.895674Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:1849:3211] trace_id# 2026-01-07T17:41:39.895736Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.001642s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:41:39.936670Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:41:39.936737Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1509} ActorId: [1:1850:3211] TxId: 281474976710658. Ctx: { TraceId: 01kecrsjj667nr9sff3b518csy, Database: , SessionId: ydb://session/3?node_id=1&id=NjI3ODdmZGYtZGY0OWY3MDUtYzZmMWMxY2QtYmRiMDA0ODk=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-07T17:41:39.982390Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:1868:3229], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2026-01-07T17:41:39.984831Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDRjMDRjZTctNDkwNWEwM2YtM2VkMTE5NmMtNzZiOTRjYjg=, ActorId: [1:1866:3227], ActorState: ExecuteState, LegacyTraceId: 01kecrsjz4504t04gkzfeejw7m, ReplyQueryCompileError, remove tx status# UNAVAILABLE issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[/Root/table-1]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } tx_id# trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:39.450184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:39.568807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:39.588136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:39.588673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:39.588747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:39.820894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:39.821055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:39.897110Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807696593945 != 1767807696593949 2026-01-07T17:41:39.908381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:39.956604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:40.051846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:40.430798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-07T17:41:40.430969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.431135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:41:40.431195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:41:40.431356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:41:40.431521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:40.432140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:41:40.432258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:41:40.432433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.432484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:41:40.432522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:41:40.432551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:41:40.433190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.433231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:41:40.433261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:41:40.433597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.433632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.433681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:41:40.433717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:41:40.436347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:41:40.436733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:41:40.436839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:41:40.437306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-07T17:41:40.437350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2026-01-07T17:41:40.437387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2026-01-07T17:41:40.448027Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:40.544701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:41:40.544901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:41:40.545000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:41:40.545262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:41:40.545320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:41:40.545482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:41:40.545551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-07T17:41:40.546540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:41:40.546616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:40.546830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:41:40.546883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:560:2494], at schemeshard: 72057594046644480, txId: 1, path id: 1 2026-01-07T17:41:40.547165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.547211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 1:0 ProgressState 2026-01-07T17:41:40.547294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:41:40.547326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:40.547361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:41:40.547395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:40.547442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:41:40.547492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:40.547527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:41:40.547555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:41:40.547612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-07T17:41:40.547652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-07T17:41:40.547686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594 ... ountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-07T17:41:41.130122Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:68:2115] Handle TEvNavigate describe path /Root/IncrBackupTable 2026-01-07T17:41:41.130229Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1024:2865] HANDLE EvNavigateScheme /Root/IncrBackupTable 2026-01-07T17:41:41.130582Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1024:2865] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:41.130655Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1024:2865] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2026-01-07T17:41:41.131548Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1024:2865] Handle TEvDescribeSchemeResult Forward to# [1:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:41:41.132221Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:41.132406Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:41.132632Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:41:41.132732Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TVPatchTests::PatchPartFastXorDiffDisorder |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-07T17:41:14.281758Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676145984371085:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:14.301654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:14.793133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:14.958049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:14.958145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:14.964234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:15.119016Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:15.149221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:15.241427Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:15.430369Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676145984371111:2119] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:15.430417Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676150279338918:2455] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:15.430504Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676145984371117:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:15.430604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676145984371378:2292][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676145984371117:2121], cookie# 1 2026-01-07T17:41:15.432479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676145984371406:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676145984371403:2292], cookie# 1 2026-01-07T17:41:15.432543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676145984371407:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676145984371404:2292], cookie# 1 2026-01-07T17:41:15.432560Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676145984371408:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676145984371405:2292], cookie# 1 2026-01-07T17:41:15.432621Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676145984370801:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676145984371408:2292], cookie# 1 2026-01-07T17:41:15.432689Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676145984371408:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676145984370801:2056], cookie# 1 2026-01-07T17:41:15.432746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676145984371378:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676145984371405:2292], cookie# 1 2026-01-07T17:41:15.432782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676145984371378:2292][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:15.432806Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676145984370795:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676145984371406:2292], cookie# 1 2026-01-07T17:41:15.432826Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676145984370798:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676145984371407:2292], cookie# 1 2026-01-07T17:41:15.432856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676145984371406:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676145984370795:2050], cookie# 1 2026-01-07T17:41:15.432874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676145984371407:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676145984370798:2053], cookie# 1 2026-01-07T17:41:15.432955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676145984371378:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676145984371403:2292], cookie# 1 2026-01-07T17:41:15.432986Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676145984371378:2292][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:15.433014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676145984371378:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676145984371404:2292], cookie# 1 2026-01-07T17:41:15.433025Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676145984371378:2292][/dc-1] Sync cookie mismatch: sender# [1:7592676145984371404:2292], cookie# 1, current cookie# 0 2026-01-07T17:41:15.433079Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676145984371117:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:15.449164Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676145984371117:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676145984371378:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:15.449310Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676145984371117:2121], cacheItem# { Subscriber: { Subscriber: [1:7592676145984371378:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:15.456103Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676150279338919:2456], recipient# [1:7592676150279338918:2455], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:15.456203Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676150279338918:2455] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:15.500636Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676150279338918:2455] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:15.503897Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676150279338918:2455] Handle TEvDescribeSchemeResult Forward to# [1:7592676150279338917:2454] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:15.509479Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676145984371111:2119] Handle TEvProposeTransaction 2026-01-07T17:41:15.509525Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592676145984371111:2119] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:41:15.509602Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592676145984371111:2119] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592676150279338928:2461] 2026-01-07T17:41:15.661298Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592676150279338928:2461] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-numbe ... esNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:37.488661Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676246716414874:3616], recipient# [3:7592676246716414873:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:38.290317Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676212356674483:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:38.290469Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676212356674483:2129], cacheItem# { Subscriber: { Subscriber: [3:7592676216651642519:2644] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:38.290564Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676251011382191:3620], recipient# [3:7592676251011382190:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:38.315104Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676212356674483:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:38.315289Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676212356674483:2129], cacheItem# { Subscriber: { Subscriber: [3:7592676216651642519:2644] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:38.315403Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676251011382193:3621], recipient# [3:7592676251011382192:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:38.489594Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676212356674483:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:38.489760Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676212356674483:2129], cacheItem# { Subscriber: { Subscriber: [3:7592676229536544912:3030] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:38.489864Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676251011382195:3622], recipient# [3:7592676251011382194:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:39.294699Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676212356674483:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:39.294852Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676212356674483:2129], cacheItem# { Subscriber: { Subscriber: [3:7592676216651642519:2644] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:39.294999Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676255306349513:3626], recipient# [3:7592676255306349512:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:39.315859Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676212356674483:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:39.316017Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676212356674483:2129], cacheItem# { Subscriber: { Subscriber: [3:7592676216651642519:2644] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:39.316177Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676255306349515:3627], recipient# [3:7592676255306349514:2342], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:39.490603Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676212356674483:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:39.490818Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676212356674483:2129], cacheItem# { Subscriber: { Subscriber: [3:7592676229536544912:3030] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:39.490918Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676255306349517:3628], recipient# [3:7592676255306349516:2343], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TVPatchTests::PatchPartPutError [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:39.471555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:39.590757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:39.609152Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:39.609638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:39.609690Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:39.856296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:39.856428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:39.935493Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807696717573 != 1767807696717577 2026-01-07T17:41:39.954961Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:40.002189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:40.090984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:40.511029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-07T17:41:40.511258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.511498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:41:40.511580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:41:40.511816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:41:40.511917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:40.512713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:41:40.512875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:41:40.513140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.513214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:41:40.513263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:41:40.513301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:41:40.513920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.513981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:41:40.514023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:41:40.514506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.514559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.514615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:41:40.514669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:41:40.518444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:41:40.519088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:41:40.519234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:41:40.519886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-07T17:41:40.519951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2026-01-07T17:41:40.519999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2026-01-07T17:41:40.530861Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:40.631333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:41:40.631538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:41:40.631607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:41:40.631846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:41:40.631901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:41:40.632068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:41:40.632147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-07T17:41:40.633382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:41:40.633447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:40.633681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:41:40.633745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:560:2494], at schemeshard: 72057594046644480, txId: 1, path id: 1 2026-01-07T17:41:40.634050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:41:40.634094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 1:0 ProgressState 2026-01-07T17:41:40.634208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:41:40.634245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:40.634284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:41:40.634327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:40.634375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:41:40.634421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:40.634458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:41:40.634489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:41:40.634553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-07T17:41:40.634601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-07T17:41:40.634638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594 ... ration id: 281474976715658:0 2026-01-07T17:41:41.180269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715658:0 2026-01-07T17:41:41.180405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:41:41.181051Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:68:2115] Handle TEvNavigate describe path /Root/IncrBackupTable 2026-01-07T17:41:41.181160Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1012:2859] HANDLE EvNavigateScheme /Root/IncrBackupTable 2026-01-07T17:41:41.182842Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1012:2859] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:41.182970Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1012:2859] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2026-01-07T17:41:41.184016Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1012:2859] Handle TEvDescribeSchemeResult Forward to# [1:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:41:41.184867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:1022:2863], serverId# [1:1023:2864], sessionId# [0:0:0] 2026-01-07T17:41:41.185566Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:41.185775Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:41.186015Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T17:41:41.186141Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:139: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] BodySize: 18 }] } 2026-01-07T17:41:41.186244Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:144: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-07T17:41:41.186373Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:68:2115] Handle TEvGetProxyServicesRequest 2026-01-07T17:41:41.186435Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:40: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T17:41:41.186724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1029:2869], serverId# [1:1030:2870], sessionId# [0:0:0] 2026-01-07T17:41:41.229386Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2026-01-07T17:41:41.229558Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:41:41.229727Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-07T17:41:41.229818Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T17:41:41.229970Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> Donor::CheckOnlineReadRequestToDonor >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> TestShred::ShredManualLaunch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2026-01-07T17:41:42.658267Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:42.659455Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-07T17:41:42.659520Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-07T17:41:42.659684Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-07T17:41:42.659735Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:42.659884Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2026-01-07T17:41:42.659929Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2026-01-07T17:41:42.660001Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2026-01-07T17:41:42.660158Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2026-01-07T17:41:42.660201Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-07T17:41:42.660253Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2026-01-07T17:41:42.758403Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-07T17:41:42.759550Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-07T17:41:42.759613Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-07T17:41:42.759804Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2026-01-07T17:41:42.759906Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-07T17:41:42.760050Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] |88.8%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |88.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> Donor::SkipBadDonor |88.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] Test command err: 2026-01-07T17:37:18.826879Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675134295330742:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:18.827056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:18.871866Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:19.101118Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:19.144022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:19.144131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:19.182128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:19.189076Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:19.341297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:19.371635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003c95/r3tmp/yandexOzzgxh.tmp 2026-01-07T17:37:19.371660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003c95/r3tmp/yandexOzzgxh.tmp 2026-01-07T17:37:19.372677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003c95/r3tmp/yandexOzzgxh.tmp 2026-01-07T17:37:19.372792Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:19.403659Z INFO: TTestServer started on Port 22009 GrpcPort 12461 PQClient connected to localhost:12461 2026-01-07T17:37:19.683302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:19.692767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:37:19.835146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:19.837133Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:20.056086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-07T17:37:22.399545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675151475200775:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.399681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675151475200788:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.399738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.402400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675151475200791:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.402487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:22.404882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:22.441474Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675151475200790:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:22.518176Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675151475200856:2646] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:22.914227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:22.915017Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675151475200864:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:22.915781Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ODlmZGU4NDEtZDM4MTRlMWItMTMyOGY5NTUtOTkxOTA4YmM=, ActorId: [1:7592675151475200749:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrhqe6dm43z88jqvphy343, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:22.918005Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:22.951628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:23.040858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675155770168443:2823] 2026-01-07T17:37:23.826435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675134295330742:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:23.826536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-07T17:37:29.520804Z :Sinks_Oltp_WriteToTopic_1_Table INFO: TTopicSdkTestSetup started 2026-01-07T17:37:29.556713Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:29.582137Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592675181539972452:2937] connected; active server actors: 1 2026-01-07T17:37:29.582392Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:37:29.587703Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:37:29.587883Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T17:37:29.598409Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:37:29.599393Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:37:29.599893Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T17:37:29.604025Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:29.604147Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have ... 6-01-07T17:40:52.068756Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7592676032920839145:2548]: session cookie 2 consumer test-consumer session test-consumer_14_1_17399138814600733618_v1 grpc closed 2026-01-07T17:40:52.068778Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7592676032920839145:2548]: session cookie 2 consumer test-consumer session test-consumer_14_1_17399138814600733618_v1 proxy is DEAD 2026-01-07T17:40:52.071050Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-07T17:40:52.071869Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-07T17:40:52.071946Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-07T17:40:52.072002Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-07T17:40:52.072044Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2026-01-07T17:40:52.072111Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-07T17:40:52.072126Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0 grpc read done: success: 0 data: 2026-01-07T17:40:52.072161Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0 grpc read failed 2026-01-07T17:40:52.072196Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0 grpc closed 2026-01-07T17:40:52.072213Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|4f700d38-10922eb0-7e6196ec-5375b0a9_0 is DEAD 2026-01-07T17:40:52.073234Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:52.073289Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:52.073451Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7592676032920839034:2524] destroyed 2026-01-07T17:40:52.073495Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:40:52.073524Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:52.073551Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.073568Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:52.073585Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.073597Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:40:52.073638Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7592676032920839037:2524] destroyed 2026-01-07T17:40:52.077177Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-07T17:40:52.077221Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0] PartitionId [0] Generation [1] Write session will now close 2026-01-07T17:40:52.077256Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-07T17:40:52.077656Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-07T17:40:52.077681Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-07T17:40:52.079239Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0 grpc read done: success: 0 data: 2026-01-07T17:40:52.079280Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0 grpc read failed 2026-01-07T17:40:52.079328Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0 grpc closed 2026-01-07T17:40:52.079348Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|d806b337-5197e271-8b0b6946-6be8142b_0 is DEAD 2026-01-07T17:40:52.080407Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:52.080446Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:40:52.080612Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7592676032920838989:2516] destroyed 2026-01-07T17:40:52.080647Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:40:52.080673Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:52.080694Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.080708Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:52.080725Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.080738Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:52.080779Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7592676032920838992:2516] destroyed 2026-01-07T17:40:52.138870Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:52.138918Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.138934Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:52.138957Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.138974Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:52.139035Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:52.139048Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.139059Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:52.139081Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.139092Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:52.139123Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:52.139136Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.139146Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:52.139160Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.139170Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:40:52.242050Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:52.242086Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.242101Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:52.242122Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.242137Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:40:52.242181Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:52.242191Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.242202Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:52.242228Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.242240Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:40:52.242268Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:40:52.242279Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.242289Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:40:52.242302Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:40:52.242310Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:38.298719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:38.298871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:38.298927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:38.298969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:38.299011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:38.299059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:38.299129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:38.299215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:38.300260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:38.300615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:38.382651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:38.382727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:38.399237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:38.399642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:38.399863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:38.408479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:38.408839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:38.409702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:38.410011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:38.416323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:38.416552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:38.417861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:38.417924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:38.418017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:38.418063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:38.418097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:38.418240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:38.560677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553241, Sender [1:1084:3000], Recipient [1:865:2815]: NKikimrTxDataShard.TEvVacuumResult VacuumGeneration: 1 TabletId: 72075186233409550 Status: OK 2026-01-07T17:41:41.722420Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5465: StateWork, processing event TEvDataShard::TEvVacuumResult 2026-01-07T17:41:41.722475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:546: TTxCompleteShredShard Execute at schemestard: 72075186233409546 2026-01-07T17:41:41.722566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__tenant_shred_manager.cpp:309: [TenantShredManager] [Finished] Shred is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 32], tabletId# 72075186233409550, shardIdx# 72075186233409546:5 in# 83 ms, next wakeup in# 14.917000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2026-01-07T17:41:41.722663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:326: [TenantShredManager] Shred in shards is completed. Send response to root schemeshard 2026-01-07T17:41:41.722695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:349: [TenantShredManager] Complete: Generation# 1 2026-01-07T17:41:41.724457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:571: TTxCompleteShredShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2026-01-07T17:41:41.724819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3050:4662], Recipient [1:698:2681]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:41.724877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:41.724907Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:41.725091Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125514, Sender [1:865:2815], Recipient [1:698:2681]: NKikimrScheme.TEvTenantShredResponse PathId { OwnerId: 72057594046678944 LocalId: 38 } Generation: 1 Status: COMPLETED 2026-01-07T17:41:41.725128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvTenantShredResponse 2026-01-07T17:41:41.725181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2026-01-07T17:41:41.725236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38] in# 84 ms, next wakeup# 599.916000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-07T17:41:41.725303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-07T17:41:41.725576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3049:4661], Recipient [1:865:2815]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:3050:4662] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-07T17:41:41.725628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-07T17:41:41.725670Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2026-01-07T17:41:41.727012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-07T17:41:41.727062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:41.727501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3054:4666], Recipient [1:698:2681]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:3055:4667] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T17:41:41.727542Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-07T17:41:41.727574Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-07T17:41:41.727705Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-07T17:41:41.727741Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:41.727777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:41.727841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:41.727893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-07T17:41:41.727953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:41.728021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:42.615805Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:42.615867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:42.615910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:42.616034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:698:2681], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:42.616069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:42.616186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-07T17:41:42.616209Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:42.616244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:42.616298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:42.616333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-07T17:41:42.616371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:42.616403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:43.175040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:43.175118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:43.175204Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:43.175227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:43.175250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:43.175352Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:698:2681], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:43.175376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:43.175475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-07T17:41:43.175497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:43.175518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:43.175610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:43.175635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:43.175669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-07T17:41:43.183331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:43.184102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3106:4718], Recipient [1:698:2681]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:43.184177Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:43.184216Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:43.184356Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:681:2670], Recipient [1:698:2681]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:43.184386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:43.184423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> Donor::SlayAfterWiping >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter [GOOD] >> TestShred::Run3CyclesForTopics >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews >> Donor::CheckOnlineReadRequestToDonor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] Test command err: 2026-01-07T17:41:42.266059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:42.266126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:42.493669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:43.786893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:43.786965Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:43.869582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> TSchemeShardSysViewTest::EmptyName >> TSchemeShardSysViewTest::AsyncCreateSameSysView |88.8%| [TA] $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::CheckOnlineReadRequestToDonor [GOOD] Test command err: RandomSeed# 2047589837386203380 2026-01-07T17:41:44.865801Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:44.868169Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7973975774254957547] 2026-01-07T17:41:44.893181Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:0:0:0:2097152:1] 2026-01-07T17:41:44.893439Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 1 PartsResurrected# 1 >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews >> TSchemeShardSysViewTest::DropSysView |88.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-07T17:41:17.320086Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676161893153430:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:17.320411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:17.980446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:18.258859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:41:18.474336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:18.484700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:18.521364Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:18.640050Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:18.795774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:18.849767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:41:19.078755Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592676161893153581:2118] Handle TEvNavigate describe path dc-1 2026-01-07T17:41:19.078811Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592676170483088685:2449] HANDLE EvNavigateScheme dc-1 2026-01-07T17:41:19.078913Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592676161893153587:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:19.078994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592676161893153825:2274][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592676161893153587:2120], cookie# 1 2026-01-07T17:41:19.080637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676161893153832:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676161893153829:2274], cookie# 1 2026-01-07T17:41:19.080671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676161893153833:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676161893153830:2274], cookie# 1 2026-01-07T17:41:19.080685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592676161893153835:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676161893153831:2274], cookie# 1 2026-01-07T17:41:19.080718Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676157598185976:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676161893153833:2274], cookie# 1 2026-01-07T17:41:19.080756Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676157598185979:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676161893153835:2274], cookie# 1 2026-01-07T17:41:19.080797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676161893153833:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676157598185976:2053], cookie# 1 2026-01-07T17:41:19.080812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676161893153835:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676157598185979:2056], cookie# 1 2026-01-07T17:41:19.080862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676161893153825:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676161893153830:2274], cookie# 1 2026-01-07T17:41:19.080893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592676161893153825:2274][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:41:19.080946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676161893153825:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676161893153831:2274], cookie# 1 2026-01-07T17:41:19.080969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592676161893153825:2274][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:41:19.081035Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592676161893153587:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:41:19.081037Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592676157598185973:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592676161893153832:2274], cookie# 1 2026-01-07T17:41:19.081143Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592676161893153832:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676157598185973:2050], cookie# 1 2026-01-07T17:41:19.081182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592676161893153825:2274][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592676161893153829:2274], cookie# 1 2026-01-07T17:41:19.081232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592676161893153825:2274][/dc-1] Sync cookie mismatch: sender# [1:7592676161893153829:2274], cookie# 1, current cookie# 0 2026-01-07T17:41:19.094653Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592676161893153587:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592676161893153825:2274] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:41:19.094785Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592676161893153587:2120], cacheItem# { Subscriber: { Subscriber: [1:7592676161893153825:2274] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:41:19.101570Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592676170483088686:2450], recipient# [1:7592676170483088685:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:41:19.101647Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592676170483088685:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:41:19.326717Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592676170483088685:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:41:19.342326Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592676170483088685:2449] Handle TEvDescribeSchemeResult Forward to# [1:7592676170483088684:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:41:19.346137Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592676161893153581:2118] Handle TEvProposeTransaction 2026-01-07T17:41:19.346158Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592676161893153581:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:41:19.346212Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592676161893153581:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592676170483088692:2455] 2026-01-07T17:41:19.571723Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592676170483088692:2455] txid# 2814749767 ... 480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:41.275539Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676264968430639:4092], recipient# [3:7592676264968430638:2342], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:41.282031Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676230608689667:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:41.282185Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676230608689667:2118], cacheItem# { Subscriber: { Subscriber: [3:7592676234903657938:2773] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:41.282283Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676264968430641:4093], recipient# [3:7592676264968430640:2343], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:42.116235Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676230608689667:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:42.116383Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676230608689667:2118], cacheItem# { Subscriber: { Subscriber: [3:7592676247788560392:3213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:42.116504Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676269263397957:4097], recipient# [3:7592676269263397956:2344], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:42.276462Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676230608689667:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:42.276609Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676230608689667:2118], cacheItem# { Subscriber: { Subscriber: [3:7592676234903657938:2773] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:42.276687Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676269263397959:4098], recipient# [3:7592676269263397958:2345], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:42.283067Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676230608689667:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:42.283212Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676230608689667:2118], cacheItem# { Subscriber: { Subscriber: [3:7592676234903657938:2773] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:42.283299Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676269263397961:4099], recipient# [3:7592676269263397960:2346], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:43.121387Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676230608689667:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:43.121544Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676230608689667:2118], cacheItem# { Subscriber: { Subscriber: [3:7592676247788560392:3213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:43.121637Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676273558365277:4103], recipient# [3:7592676273558365276:2347], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:43.289101Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592676230608689667:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:41:43.289261Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592676230608689667:2118], cacheItem# { Subscriber: { Subscriber: [3:7592676234903657938:2773] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:41:43.289364Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592676273558365280:4104], recipient# [3:7592676273558365279:2348], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TSchemeShardSysViewTest::CreateExistingSysView >> TestShred::ShredWithMerge [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView >> StreamCreator::Basic >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] >> StreamCreator::TopicAutoPartitioning >> TestShred::SimpleTestForTables [GOOD] >> TSchemeShardSysViewTest::CreateSysView >> TSchemeShardSysViewTest::EmptyName [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query >> TestShred::SimpleTestForAllSupportedObjects [GOOD] >> Donor::SkipBadDonor [GOOD] >> TSchemeShardSysViewTest::AsyncCreateSameSysView [GOOD] >> TSchemeShardSysViewTest::AsyncDropSameSysView >> TestShred::SimpleTestForTopic [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:45.475787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:45.475892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:45.475938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:45.475977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:45.476015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:45.476040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:45.476110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:45.476210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:45.477684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:45.478016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:45.556120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:45.556190Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:45.573914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:45.574291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:45.574489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:45.582847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:45.583181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:45.583906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:45.584174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:45.587878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:45.588073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:45.589233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:45.589295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:45.589409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:45.589453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:45.589551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:45.589992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:45.727485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.728515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.728644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.728738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.728825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.728916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.729643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... views_update.cpp:213: SysViewsRosterUpdate# [1:818:2793] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_ds_pdisks' 2026-01-07T17:41:46.437271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 43 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-07T17:41:46.437377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 43 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-07T17:41:46.437426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720658 2026-01-07T17:41:46.437459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 43 2026-01-07T17:41:46.437497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 39 2026-01-07T17:41:46.437936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-07T17:41:46.438016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-07T17:41:46.438043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720658 2026-01-07T17:41:46.438071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:41:46.438112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:41:46.438168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720658, subscribers: 1 2026-01-07T17:41:46.438219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:818:2793] 2026-01-07T17:41:46.440700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2026-01-07T17:41:46.442342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2026-01-07T17:41:46.442540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:818:2793] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_sys_view' 2026-01-07T17:41:46.442596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:818:2793] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:760:2746] sender: [1:860:2058] recipient: [1:15:2062] 2026-01-07T17:41:46.506389Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:46.506612Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 259us result status StatusSuccess 2026-01-07T17:41:46.506897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:46.507398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:46.507587Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 192us result status StatusPathDoesNotExist 2026-01-07T17:41:46.507762Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000042, drop txId: 281474976720658" Path: "/MyRoot/.sys/new_sys_view" PathId: 38 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:46.508292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:46.508465Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_ds_pdisks" took 148us result status StatusPathDoesNotExist 2026-01-07T17:41:46.508581Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_ds_pdisks\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976720657" Path: "/MyRoot/.sys/new_ds_pdisks" PathId: 39 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:46.509102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:46.509262Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_partition_stats" took 169us result status StatusSuccess 2026-01-07T17:41:46.509531Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_partition_stats" PathDescription { Self { Name: "new_partition_stats" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> Donor::SlayAfterWiping [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithMerge [GOOD] Test command err: 2026-01-07T17:41:38.822267Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2026-01-07T17:41:38.859187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:38.859299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:38.859343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:38.859391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:38.859437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:38.859497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:38.859560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:38.859673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:38.860610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:38.860946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:38.947980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:38.948034Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:38.948584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:38.948868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:38.949096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:38.957154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:38.961094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:38.961666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:38.962010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:38.963151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:38.963481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:38.964340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:38.964386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:38.964575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:38.964615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:38.964647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:38.964767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:39.106186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.107256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.107386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.107461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.107566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.107816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.107919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.107992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.108061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.108159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.108224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.108302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.108413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.108582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.108645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... MeasureSelfResponseTime 2026-01-07T17:41:46.102554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.102626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.102718Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.102746Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.134959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.135041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.135143Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.135176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.145624Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.145698Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.145810Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.145841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.177796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.177861Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.177938Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.177968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.188405Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.188479Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.188575Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.188608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.222055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.222135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.222247Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.222280Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.232771Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.232848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.232976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.233013Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.265131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.265213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.265354Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.265386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.276318Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1582:3400], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2225 Memory: 90453 Storage: 10149823 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-07T17:41:46.276382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:41:46.276433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.2225 2026-01-07T17:41:46.276526Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:41:46.276554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-07T17:41:46.287081Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.287166Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.287200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:46.287487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-07T17:41:46.287536Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:46.287569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:46.287634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:46.287679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:46.287748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.997500s, Timestamp# 1970-01-01T00:01:10.002500Z 2026-01-07T17:41:46.287793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2026-01-07T17:41:46.288423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:46.292142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1897:3656], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:46.292226Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:46.292267Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:46.292465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:46.292505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:46.292541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] >> TSchemeShardSysViewTest::DropSysView [GOOD] |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] >> TestShred::ShredWithCopyTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:46.105618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:46.105712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.105750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:46.105794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:46.105836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:46.105864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:46.105932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.106036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:46.106879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:46.107147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:46.183548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:46.183617Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:46.199760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:46.200160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:46.200383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:46.207267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:46.207627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:46.208428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:46.208714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:46.211494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.211684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:46.212713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:46.212764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.212854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:46.212902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:46.213014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:46.213166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:46.326769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.327855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.328877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.329019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 678944 2026-01-07T17:41:47.051017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:41:47.051051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:41:47.053140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.053214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:41:47.053259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:41:47.056163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.056214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.056268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:41:47.056323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:41:47.056480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:41:47.057987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:41:47.058148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:41:47.058559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:47.058678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:47.058719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:41:47.059003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:41:47.059056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:41:47.059195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:41:47.059282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:47.061100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:47.061200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:47.061375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:47.061484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:41:47.061847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.061896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:41:47.061996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:41:47.062027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:47.062062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:41:47.062105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:47.062142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:41:47.062175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:41:47.062269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:41:47.062300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:41:47.062356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:41:47.062392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:41:47.062424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:41:47.062892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:41:47.062981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:41:47.063037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:41:47.063082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:41:47.063162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:41:47.063242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:41:47.065887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:41:47.066405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:41:47.066966Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2665] Bootstrap 2026-01-07T17:41:47.068029Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2665] Become StateWork (SchemeCache [1:681:2670]) 2026-01-07T17:41:47.070804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "" Type: EPartitionStats } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:41:47.070950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0 2026-01-07T17:41:47.071004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0, sysViewDescription: Name: "" Type: EPartitionStats 2026-01-07T17:41:47.071098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-07T17:41:47.072154Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:41:47.074776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.sys/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:47.075000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/ 2026-01-07T17:41:47.075422Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:41:47.075607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:41:47.075646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:41:47.075996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:41:47.076105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:41:47.076143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:691:2680] TestWaitNotification: OK eventTxId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:41.336757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:41.336848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:41.336890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:41.336920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:41.336977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:41.337000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:41.337041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:41.337112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:41.337791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:41.338005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:41.408464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:41.408509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:41.421455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:41.421706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:41.421847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:41.427348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:41.427752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:41.428647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:41.428945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:41.431817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:41.432001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:41.433232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:41.433290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:41.433440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:41.433492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:41.433540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:41.433705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:41.585637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.586625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.586757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.586836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.586922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.586983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.587715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-07T17:41:45.976951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:45.977439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:2760:4436], Recipient [1:698:2681]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2761:4437] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T17:41:45.977493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-07T17:41:45.977529Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-07T17:41:45.977663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-07T17:41:45.977696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:45.977752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:45.977831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:45.977873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-07T17:41:45.977934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:45.978003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:46.378583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1316:3202]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.378653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.378737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:865:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.378784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.378876Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.378908Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.378983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1316:3202], Recipient [1:1316:3202]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.379027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.379104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:865:2815], Recipient [1:865:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.379128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.379213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:698:2681], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.379240Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.420382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.420457Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.420521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:46.420814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-07T17:41:46.420849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:46.420878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:46.420957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:46.421007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-07T17:41:46.421061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:46.421110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:46.761134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.761190Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.761237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1316:3202]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.761253Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.761292Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:865:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.761311Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.761352Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:698:2681], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.761371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.761415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1316:3202], Recipient [1:1316:3202]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.761430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.761473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:865:2815], Recipient [1:865:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.761491Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.803419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.803517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.803547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:46.803812Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-07T17:41:46.803843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:46.803872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:46.803947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:46.803986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:46.804061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.936000s, Timestamp# 1970-01-01T00:00:05.200000Z 2026-01-07T17:41:46.804098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-07T17:41:46.810519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:46.811085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2780:4456], Recipient [1:698:2681]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:46.811146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:46.811181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:46.811338Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:681:2670], Recipient [1:698:2681]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:46.811372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:46.811406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SkipBadDonor [GOOD] Test command err: RandomSeed# 6286807722825985936 2026-01-07T17:41:46.110356Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:46.112630Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11592806183912909644] 2026-01-07T17:41:46.135458Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:40.810607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:40.810738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:40.810800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:40.810847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:40.810888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:40.810922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:40.810995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:40.811088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:40.812093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:40.812484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:40.907736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:40.907813Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:40.945938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:40.946315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:40.946545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:40.969502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:40.969944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:40.970701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:40.971009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:40.979352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:40.979556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:40.980906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:40.980990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:40.981131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:40.981186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:40.981228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:40.981421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:41.114947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.115733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.115815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.115865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.115919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.115952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.115999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.116050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.116136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.116196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.116232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.116265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.116298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.116352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.116417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-07T17:41:46.062130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:46.062473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3112:4724], Recipient [1:698:2681]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:3113:4725] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T17:41:46.062515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-07T17:41:46.062547Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-07T17:41:46.062710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-07T17:41:46.062742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:46.062783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:46.062838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:46.062871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-07T17:41:46.062923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:46.062999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:46.546286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1439:3299]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.546400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.546489Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:865:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.546524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.546589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.546626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.546692Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:698:2681], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.546722Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.546806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1439:3299], Recipient [1:1439:3299]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.546837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.546908Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:865:2815], Recipient [1:865:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.546933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.589058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.589159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.589205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:46.589413Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-07T17:41:46.589445Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:46.589484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:46.589571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:46.589621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-07T17:41:46.589675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:46.589754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:46.987301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.987369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.987460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1439:3299]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.987492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.987551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:865:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.987591Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.987662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:698:2681], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.987690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.987763Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1439:3299], Recipient [1:1439:3299]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.987790Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.987838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:865:2815], Recipient [1:865:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.987861Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.040947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:47.041035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:47.041067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:47.041363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-07T17:41:47.041404Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:47.041433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:47.041519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:47.041556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:47.041622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.921000s, Timestamp# 1970-01-01T00:00:05.215000Z 2026-01-07T17:41:47.041666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-07T17:41:47.054252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:47.055000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3132:4744], Recipient [1:698:2681]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:47.055061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:47.055103Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:47.055281Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:681:2670], Recipient [1:698:2681]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:47.055320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:47.055361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:45.595791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:45.595900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:45.595954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:45.596000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:45.596037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:45.596070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:45.596123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:45.596238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:45.597266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:45.597558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:45.686805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:45.686863Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:45.710657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:45.711092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:45.711311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:45.718449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:45.718805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:45.719791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:45.720116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:45.724515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:45.724734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:45.725993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:45.726058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:45.726183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:45.726236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:45.726329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:45.726533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:45.878323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.879996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.880057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.880130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.880264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:45.880333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.401639Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:41:47.401711Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:41:47.401740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:47.401772Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:41:47.401799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:47.401830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:41:47.401863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:47.401892Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:41:47.401920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:41:47.401976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2026-01-07T17:41:47.402007Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:41:47.402032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2026-01-07T17:41:47.402057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2026-01-07T17:41:47.402694Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:47.402789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:47.402818Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:41:47.402849Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2026-01-07T17:41:47.402879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:41:47.403934Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:47.404009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:47.404036Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:41:47.404063Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2026-01-07T17:41:47.404091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-07T17:41:47.404151Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:41:47.406104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:41:47.407081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-07T17:41:47.407329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:41:47.407377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:41:47.407455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:41:47.407480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:41:47.407895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:41:47.408013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:41:47.408055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:328:2317] 2026-01-07T17:41:47.408203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:41:47.408298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:41:47.408323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:328:2317] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-07T17:41:47.408696Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:47.408888Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys" took 213us result status StatusSuccess 2026-01-07T17:41:47.409334Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys" PathDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:47.409829Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:47.410014Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 208us result status StatusSuccess 2026-01-07T17:41:47.410323Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTopic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:41.463922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:41.464050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:41.464095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:41.464148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:41.464192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:41.464219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:41.464274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:41.464343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:41.465182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:41.465492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:41.554407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:41.554455Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:41.570144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:41.570426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:41.570628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:41.577937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:41.578258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:41.578813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:41.579016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:41.582018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:41.582197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:41.583387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:41.583441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:41.583572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:41.583660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:41.583703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:41.583866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:41.736744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.737737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.737870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.737950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.738806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-07T17:41:46.083564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:46.083921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:2051:3863], Recipient [1:699:2682]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2052:3864] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T17:41:46.083956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-07T17:41:46.083993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-07T17:41:46.084105Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:699:2682]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-07T17:41:46.084136Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:46.084189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:46.084261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:46.084310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-07T17:41:46.084394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:46.084449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:46.703325Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:699:2682]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.703403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.703471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:866:2816]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.703495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.703555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1254:3136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.703579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:46.703629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:699:2682], Recipient [1:699:2682]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.703655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.703708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:866:2816], Recipient [1:866:2816]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.703739Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.703789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1254:3136], Recipient [1:1254:3136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.703810Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:46.724734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:699:2682]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.724807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:46.724850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:46.725067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:699:2682]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-07T17:41:46.725099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:46.725127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:46.725201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:46.725393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-07T17:41:46.725445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:46.725505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:47.357985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1254:3136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.358055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.358182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:699:2682]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.358233Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.358288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:866:2816]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.358307Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.358348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:699:2682], Recipient [1:699:2682]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.358373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.358421Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:866:2816], Recipient [1:866:2816]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.358444Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.358490Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1254:3136], Recipient [1:1254:3136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.358508Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.379348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:699:2682]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:47.379427Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:47.379478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:47.379695Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:701:2683], Recipient [1:699:2682]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-07T17:41:47.379732Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:47.379782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:47.379854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:47.379892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:47.379967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.982000s, Timestamp# 1970-01-01T00:00:05.153000Z 2026-01-07T17:41:47.380022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-07T17:41:47.382731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:47.383379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2073:3885], Recipient [1:699:2682]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:47.383462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:47.383514Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:47.383696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:681:2670], Recipient [1:699:2682]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:47.383722Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:47.383754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 14708289038776879001 2026-01-07T17:41:46.221657Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:46.223410Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1498422245488894443] 2026-01-07T17:41:46.242230Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:46.383222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:46.383329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.383378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:46.383423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:46.383465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:46.383496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:46.383563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.383635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:46.384530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:46.384809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:46.461328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:46.461389Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:46.477138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:46.477480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:46.477673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:46.484075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:46.484416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:46.485121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:46.485380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:46.487990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.488162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:46.489288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:46.489346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.489479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:46.489527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:46.489619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:46.489833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:46.626083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.626937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.627974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720657, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:41:47.396986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:47.397046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:770:2745], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 2 2026-01-07T17:41:47.397102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:770:2745], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 38 2026-01-07T17:41:47.397220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.397292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976720657:0 ProgressState 2026-01-07T17:41:47.397406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976720657:0 progress is 1/1 2026-01-07T17:41:47.397479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-07T17:41:47.397525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976720657:0 progress is 1/1 2026-01-07T17:41:47.397583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-07T17:41:47.397630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2026-01-07T17:41:47.397684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-07T17:41:47.397732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976720657:0 2026-01-07T17:41:47.397765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976720657:0 2026-01-07T17:41:47.397836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:41:47.397876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976720657, publications: 2, subscribers: 1 2026-01-07T17:41:47.397991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2026-01-07T17:41:47.398041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:41:47.400744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-07T17:41:47.400858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-07T17:41:47.400900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720657 2026-01-07T17:41:47.400986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2026-01-07T17:41:47.401037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-07T17:41:47.402085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-07T17:41:47.402196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-07T17:41:47.402227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720657 2026-01-07T17:41:47.402260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:41:47.402387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:41:47.402462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720657, subscribers: 1 2026-01-07T17:41:47.402504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:774:2749] 2026-01-07T17:41:47.404834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2026-01-07T17:41:47.406334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2026-01-07T17:41:47.406533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:774:2749] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/ds_pdisks' 2026-01-07T17:41:47.406588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:774:2749] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:718:2704] sender: [1:802:2058] recipient: [1:15:2062] 2026-01-07T17:41:47.471680Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:47.471906Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 237us result status StatusSuccess 2026-01-07T17:41:47.472209Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:47.473001Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:47.473189Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 239us result status StatusSuccess 2026-01-07T17:41:47.473433Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976720657 CreateStep: 5000039 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::DropSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:46.445024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:46.445131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.445171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:46.445199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:46.445228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:46.445248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:46.445291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.445347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:46.446124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:46.446389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:46.521399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:46.521458Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:46.536199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:46.536558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:46.536690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:46.545274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:46.545692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:46.546404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:46.546658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:46.549416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.549629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:46.551022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:46.551089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.551248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:46.551305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:46.551415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:46.551641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:46.705334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.706470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.706612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.706692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.706844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.706921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.706988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.707090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.707208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.707292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.707363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.707440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.707508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.707636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.707762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... n for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2026-01-07T17:41:47.576305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2026-01-07T17:41:47.576333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2026-01-07T17:41:47.576357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-07T17:41:47.576383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-07T17:41:47.576407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-07T17:41:47.576429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-07T17:41:47.576452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-07T17:41:47.576477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-07T17:41:47.576524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-07T17:41:47.576554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-07T17:41:47.576581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-07T17:41:47.576607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-07T17:41:47.576632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-07T17:41:47.576656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-07T17:41:47.576678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-07T17:41:47.576699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-07T17:41:47.576740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-07T17:41:47.576782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-07T17:41:47.576814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-07T17:41:47.576858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-07T17:41:47.576882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-07T17:41:47.576905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-07T17:41:47.576950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-07T17:41:47.576980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-07T17:41:47.577005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-07T17:41:47.577032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-07T17:41:47.577055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-07T17:41:47.577088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:41:47.577119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:41:47.577307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.577433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.577540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.577688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.577781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.577923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.578267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.578423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.578729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.578803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.579004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.579170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.579220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.579288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.579481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.579585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.580003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.580298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.580390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.580475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.580654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.580735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.580783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.585825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:47.590859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:47.590932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:47.591182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:47.591255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:47.591303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:47.591451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:735:2721] sender: [1:795:2058] recipient: [1:15:2062] 2026-01-07T17:41:47.659387Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:47.659669Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 319us result status StatusPathDoesNotExist 2026-01-07T17:41:47.659805Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:46.799550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:46.799643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.799683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:46.799716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:46.799750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:46.799775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:46.799828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.799889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:46.800669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:46.800974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:46.890006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:46.890055Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:46.906234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:46.906501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:46.906631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:46.920712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:46.921103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:46.921852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:46.922152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:46.927332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.927535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:46.928833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:46.928899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.929055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:46.929111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:46.929224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:46.929416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.098541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.099539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.099657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.099753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.099855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.099914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.099997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.100077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.100172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.100255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.100312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.100373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.100454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.100534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.100640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 93616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:41:47.794421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:41:47.794519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:41:47.794560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:41:47.794600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2026-01-07T17:41:47.794653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-07T17:41:47.795326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:41:47.795418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:41:47.795462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:41:47.795495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:41:47.795538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:41:47.795608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:41:47.798820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:41:47.799742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:41:47.799982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:41:47.800038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:41:47.800462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:41:47.800557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:41:47.800598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:706:2695] TestWaitNotification: OK eventTxId 101 2026-01-07T17:41:47.801051Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:47.801317Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 278us result status StatusSuccess 2026-01-07T17:41:47.801691Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-07T17:41:47.805072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "new_sys_view" Type: ENodes } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:41:47.805198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0 2026-01-07T17:41:47.805249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0, sysViewDescription: Name: "new_sys_view" Type: ENodes 2026-01-07T17:41:47.805419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:41:47.807817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-07T17:41:47.808099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges), operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/new_sys_view TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:41:47.808422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:41:47.808469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:41:47.808828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:41:47.808911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:41:47.808973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:714:2703] TestWaitNotification: OK eventTxId 102 2026-01-07T17:41:47.809455Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:47.809725Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 294us result status StatusSuccess 2026-01-07T17:41:47.810069Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateSysView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithCopyTable [GOOD] Test command err: 2026-01-07T17:41:39.208156Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2026-01-07T17:41:39.248425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:39.248528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:39.248565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:39.248614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:39.248651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:39.248678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:39.248726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:39.248794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:39.249638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:39.249909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:39.337027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:39.337088Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:39.337707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:39.338001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:39.338173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:39.345623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:39.349843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:39.350551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:39.351016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:39.352732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:39.352946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:39.354496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:39.354586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:39.354920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:39.354991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:39.355061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:39.355293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:39.478653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.479670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.479809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.479888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:39.480839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... e_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 50, DataSize 5121950 2026-01-07T17:41:47.555524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409552, followerId 0 2026-01-07T17:41:47.555562Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2026-01-07T17:41:47.555593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-07T17:41:47.555656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2026-01-07T17:41:47.566164Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:41:47.566221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:41:47.566246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2026-01-07T17:41:47.588170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.588261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.588372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.588400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.598858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.598942Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.599034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.599066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.631044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.631130Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.631253Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.631286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.641796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.641902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.641992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.642025Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.674135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.674216Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.674381Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.674418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.684846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.684920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.685061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.685107Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.718693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.718760Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.718844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.718864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.729376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.729482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.729605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.729643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.761759Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.761851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:47.761973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.762007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:47.772476Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:47.772540Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:47.772566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:47.772740Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-07T17:41:47.772766Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:47.772789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:47.772853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:47.772880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:47.772958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2026-01-07T17:41:47.773010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 35 s 2026-01-07T17:41:47.773674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:47.776610Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2109:3822], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:47.776661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:47.776693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:47.776827Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:47.776852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:47.776884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] Test command err: 2026-01-07T17:40:13.903706Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675884649617470:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:13.903842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:14.340215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:14.365007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:14.365151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:14.421111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:14.537926Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:14.539323Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675884649617257:2081] 1767807613868630 != 1767807613868633 2026-01-07T17:40:14.592103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:14.604468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:14.604504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:14.604513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:14.604647Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:14.806365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:14.902250Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:17.373062Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:40:17.377210Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NzVmZTI4Y2ItNzk1NWMxNWEtMzVkMjM0OGEtMTdjYTc4MTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NzVmZTI4Y2ItNzk1NWMxNWEtMzVkMjM0OGEtMTdjYTc4MTc= temp_dir_name# 0d15ed1d-44df-79e7-9e1b-4bace7968885 trace_id# 2026-01-07T17:40:17.378099Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675901829487300:2317], Start check tables existence, number paths: 2 2026-01-07T17:40:17.384795Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:40:17.384842Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:40:17.395564Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675901829487300:2317], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:40:17.395629Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675901829487300:2317], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:40:17.395657Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675901829487300:2317], Successfully finished 2026-01-07T17:40:17.395716Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:40:17.395770Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NzVmZTI4Y2ItNzk1NWMxNWEtMzVkMjM0OGEtMTdjYTc4MTc=, ActorId: [1:7592675901829487303:2320], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:40:17.397427Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:40:17.401251Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675901829487333:2501], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:40:17.405517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:40:17.407457Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675901829487333:2501], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-07T17:40:17.407709Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675901829487333:2501], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:40:17.421380Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675901829487333:2501], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:40:17.504179Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675901829487333:2501], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:40:17.546938Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675901829487384:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:40:17.547159Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675901829487333:2501], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:40:17.555856Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:40:17.555886Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-07T17:40:17.555939Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:40:17.556107Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675901829487393:2327], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-07T17:40:17.556129Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NzVmZTI4Y2ItNzk1NWMxNWEtMzVkMjM0OGEtMTdjYTc4MTc=, ActorId: [1:7592675901829487303:2320], ActorState: ReadyState, LegacyTraceId: 01kecrq2gj2ak3agjrh7x14pmd, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-07T17:40:17.574075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675901829487393:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:17.574233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:17.574320Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:40:17.574356Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675901829487403:2328], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-07T17:40:17.576571Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675901829487403:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:17.576646Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:40:17.910253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:40:17.928257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:40:17.933376Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=NzVmZTI4Y2ItNzk1NWMxNWEtMzVkMjM0OGEtMTdjYTc4MTc=, ActorId: [1:7592675901829487303:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrq2gj2ak3agjrh7x14pmd, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [1:7592675901829487401:2320] workload_service_cleanup# false trace_id# 2026-01-07T17:40:17.936330Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=NzVmZTI4Y2ItNzk1NWMxNWEtMzVkMjM0OGEtMTdjYTc4MTc=, ActorId: [1:7592675901829487303:2320], ActorState: CleanupState, LegacyTraceId: 01kecrq2gj2ak3agjrh7x14pmd, EndCleanup is_final# false tr ... ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:45.195319Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=OWI5MjQzNDgtZmE3NDYxNzktM2UwMWRjMzctM2I4M2Q4NWU=, ActorId: [8:7592676281248260832:2920], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:45.195357Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=OWI5MjQzNDgtZmE3NDYxNzktM2UwMWRjMzctM2I4M2Q4NWU=, ActorId: [8:7592676281248260832:2920], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:45.195394Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=OWI5MjQzNDgtZmE3NDYxNzktM2UwMWRjMzctM2I4M2Q4NWU=, ActorId: [8:7592676281248260832:2920], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:45.195475Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=OWI5MjQzNDgtZmE3NDYxNzktM2UwMWRjMzctM2I4M2Q4NWU=, ActorId: [8:7592676281248260832:2920], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:41:45.196098Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=ZWRkODAwZDYtNTdiZjNlOWUtNTM5MzY2YTctNmYwZGY0ZTI=, ActorId: [8:7592676281248260830:2918], ActorState: ExecuteState, LegacyTraceId: 01kecrsr2j0xxbxnkrtk9j9wqv, Sent query response back to proxy proxy_request_id# 89 proxy_id# [8:7592676199643878907:2266] trace_id# 2026-01-07T17:41:45.196734Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2026-01-07T17:41:45.197074Z node 8 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2026-01-07T17:41:45.197142Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=ZWRkODAwZDYtNTdiZjNlOWUtNTM5MzY2YTctNmYwZGY0ZTI=, ActorId: [8:7592676281248260830:2918], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:45.197189Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZWRkODAwZDYtNTdiZjNlOWUtNTM5MzY2YTctNmYwZGY0ZTI=, ActorId: [8:7592676281248260830:2918], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:45.197232Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZWRkODAwZDYtNTdiZjNlOWUtNTM5MzY2YTctNmYwZGY0ZTI=, ActorId: [8:7592676281248260830:2918], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:45.197269Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=ZWRkODAwZDYtNTdiZjNlOWUtNTM5MzY2YTctNmYwZGY0ZTI=, ActorId: [8:7592676281248260830:2918], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:45.197344Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=ZWRkODAwZDYtNTdiZjNlOWUtNTM5MzY2YTctNmYwZGY0ZTI=, ActorId: [8:7592676281248260830:2918], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:41:45.372971Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE= temp_dir_name# f076ee01-4b62-48c5-e85c-aa971dcd17b9 trace_id# 2026-01-07T17:41:45.373500Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:45.374148Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ReadyState, LegacyTraceId: 01kecrsr8y61hfcp0vfe242bf4, Received request proxy_request_id# 92 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# /*UI-QUERY-EXCLUDE*/ SELECT * FROM `//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpc_actor# [8:7592676281248260875:2930] database# /Root/test-dedicated database_id# /Root/test-dedicated pool_id# default trace_id# 2026-01-07T17:41:45.374190Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ReadyState, LegacyTraceId: 01kecrsr8y61hfcp0vfe242bf4, Request placed into pool from cache pool_id# default trace_id# 2026-01-07T17:41:45.374312Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ExecuteState, LegacyTraceId: 01kecrsr8y61hfcp0vfe242bf4, Sending CompileQuery request trace_id# 2026-01-07T17:41:45.396007Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][8:7592676225413683547:2671][/Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is incomplete in one of the ring groups: cookie# 46 2026-01-07T17:41:45.396097Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][8:7592676225413683547:2671][/Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is incomplete in one of the ring groups: cookie# 47 2026-01-07T17:41:45.397356Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [8:7592676281248260877:2931], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2026-01-07T17:41:45.401873Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ExecuteState, LegacyTraceId: 01kecrsr8y61hfcp0vfe242bf4, ReplyQueryCompileError, remove tx status# UNAVAILABLE issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:45.401928Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ExecuteState, LegacyTraceId: 01kecrsr8y61hfcp0vfe242bf4, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:45.401957Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ExecuteState, LegacyTraceId: 01kecrsr8y61hfcp0vfe242bf4, EndCleanup is_final# false trace_id# 2026-01-07T17:41:45.402090Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ExecuteState, LegacyTraceId: 01kecrsr8y61hfcp0vfe242bf4, Sent query response back to proxy proxy_request_id# 92 proxy_id# [8:7592676199643878907:2266] trace_id# 2026-01-07T17:41:45.402792Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2026-01-07T17:41:45.403154Z node 8 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2026-01-07T17:41:45.403223Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:45.403280Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:45.403317Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:45.403356Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:45.403443Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=N2FkMzYzYTMtNTU1YzdlODctNzg2OWY3NzQtNmI5MmRhZjE=, ActorId: [8:7592676281248260874:2929], ActorState: unknown state, Session actor destroyed trace_id# |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:46.280195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:46.280292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.280325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:46.280360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:46.280392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:46.280416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:46.280456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:46.280539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:46.281294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:46.281534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:46.364644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:46.364703Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:46.379297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:46.379628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:46.379795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:46.386477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:46.386798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:46.387481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:46.387737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:46.390273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.390421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:46.391249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:46.391326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:46.391559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:46.391599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:46.391696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:46.391882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:46.540513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.541494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.541652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.541728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.541772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.541825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.541862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.541920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.541969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.542014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.542056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.542099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.542179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.542312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:46.542392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:48.421661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:41:48.421725Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_sysview.cpp:43: [72057594046678944] TDropSysView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2026-01-07T17:41:48.421888Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:41:48.422082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-07T17:41:48.422171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:41:48.424853Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:48.424909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:41:48.425295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:41:48.425481Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:48.425534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-07T17:41:48.425607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:41:48.425917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.425976Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:41:48.426245Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:41:48.426294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:48.426349Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:41:48.426402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:48.426447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:41:48.426493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:41:48.426535Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:41:48.426591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:41:48.426700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:41:48.426745Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:41:48.426782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2026-01-07T17:41:48.426820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:41:48.428346Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:48.428470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:48.428518Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:41:48.428561Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2026-01-07T17:41:48.428628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-07T17:41:48.430163Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:48.430266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:41:48.430298Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:41:48.430329Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:41:48.430366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:41:48.430460Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:41:48.430814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:41:48.430871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:41:48.430988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-07T17:41:48.433552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:41:48.435539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:41:48.435673Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 2026-01-07T17:41:48.435973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:41:48.436028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-07T17:41:48.436122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:41:48.436149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:41:48.436689Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:41:48.436815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:41:48.436862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:740:2729] 2026-01-07T17:41:48.437190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:41:48.437256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:41:48.437292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:740:2729] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-07T17:41:48.437893Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:48.438210Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 396us result status StatusPathDoesNotExist 2026-01-07T17:41:48.438418Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TestShred::ShredWithSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:47.465214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:47.465308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:47.465353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:47.465387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:47.465423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:47.465449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:47.465498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:47.465559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:47.466481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:47.466734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:47.545164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:47.545208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:47.559735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:47.560049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:47.560261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:47.567140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:47.567369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:47.567934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:47.568181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:47.570635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:47.570786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:47.571672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:47.571713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:47.571793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:47.571828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:47.571908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:47.572032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:47.718971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.720984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.721047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.721135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:47.721235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -01-07T17:41:48.490532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-07T17:41:48.490561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-07T17:41:48.490584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-07T17:41:48.490605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-07T17:41:48.490629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-07T17:41:48.490655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-07T17:41:48.490694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-07T17:41:48.490717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-07T17:41:48.490738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-07T17:41:48.490760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-07T17:41:48.490780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-07T17:41:48.490830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-07T17:41:48.490854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-07T17:41:48.490876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-07T17:41:48.490898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-07T17:41:48.490920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-07T17:41:48.490963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-07T17:41:48.491005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-07T17:41:48.491047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-07T17:41:48.491072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-07T17:41:48.491094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-07T17:41:48.491125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:41:48.491157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:41:48.491183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-07T17:41:48.491353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.491451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.491561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.491673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.491761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.491901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.492218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.492351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.492746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.492836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.493097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.493260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.493305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.493387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.493603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.493722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.494137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.494410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.494509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.494577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.494720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.494765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.494822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:41:48.500839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:48.505037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:48.505130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:48.506123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:48.506192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:48.506241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:48.508351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:715:2701] sender: [1:777:2058] recipient: [1:15:2062] 2026-01-07T17:41:48.581408Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:41:48.581714Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 348us result status StatusSuccess 2026-01-07T17:41:48.582097Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> TPQCDTest::TestUnavailableWithoutBoth >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> TPQCDTest::TestPrioritizeLocalDatacenter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2026-01-07T17:39:26.438474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:26.438531Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:26.438928Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:26.448571Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:26.448884Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:26.449186Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:26.457737Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:26.486307Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:26.486765Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:26.487824Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:26.487867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:26.487916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:26.488172Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:26.488320Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:26.488363Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:26.544224Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:26.574079Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:26.574268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:26.574370Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:26.574418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:26.574460Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:26.574496Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:26.574651Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.574715Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.575019Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:26.575111Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:26.575240Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:26.575280Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:26.575325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:26.575359Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:26.575420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:26.575452Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:26.575493Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:26.575581Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.575619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.575679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:26.584861Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:26.584958Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:26.585056Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:26.585209Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:26.585273Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:26.585329Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:26.585374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:26.585407Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:26.585441Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:26.585473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:26.585726Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:26.585764Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:26.585803Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:26.585837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:26.585900Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:26.585925Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:26.585953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:26.585985Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:26.586016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:26.598030Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:26.598116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:26.598146Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:26.598215Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:26.598271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:26.598755Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.598804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.598871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:26.599005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:26.599060Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:26.599211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:26.599251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:26.599286Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:26.599329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:26.602605Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:26.602674Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:26.602882Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.602924Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.602971Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:26.603001Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:26.603043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:26.603091Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:26.603141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... ion plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2026-01-07T17:41:40.947228Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:403] at 9437184 has finished 2026-01-07T17:41:40.947262Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:41:40.947290Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:41:40.947321Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:41:40.947351Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:41:40.969919Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:41:40.969993Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:402] at 9437184 on unit CompleteOperation 2026-01-07T17:41:40.970068Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 402] from 9437184 at tablet 9437184 send result to client [4:104:2137], exec latency: 4 ms, propose latency: 6 ms 2026-01-07T17:41:40.970146Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2026-01-07T17:41:40.970193Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:41:40.970489Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:41:40.970523Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437184 on unit StoreAndSendOutRS 2026-01-07T17:41:40.970556Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 400 at 9437184 from 9437184 to 9437185 txId 403 2026-01-07T17:41:40.970616Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:41:40.970652Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:41:40.970685Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:41:40.970710Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2026-01-07T17:41:40.970754Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:41:40.970806Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2026-01-07T17:41:40.970841Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:41:40.971489Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:240:2232], Recipient [4:459:2401]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2026-01-07T17:41:40.971548Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:41:40.971587Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 402 2026-01-07T17:41:40.971901Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [4:240:2232], Recipient [4:349:2317]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2026-01-07T17:41:40.971946Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:41:40.971987Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437185 source 9437184 dest 9437185 producer 9437184 txId 403 2026-01-07T17:41:40.972068Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437185 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2026-01-07T17:41:40.972132Z node 4 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000004:403] from=9437184 to=9437185origin=9437184 2026-01-07T17:41:40.972212Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437185 2026-01-07T17:41:40.972312Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:240:2232], Recipient [4:459:2401]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2026-01-07T17:41:40.972346Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:41:40.972377Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 2026-01-07T17:41:40.972875Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [4:349:2317], Recipient [4:349:2317]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:41:40.972917Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:41:40.973000Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-07T17:41:40.973042Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:41:40.973081Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000004:403] at 9437185 for LoadAndWaitInRS 2026-01-07T17:41:40.973114Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit LoadAndWaitInRS 2026-01-07T17:41:40.973156Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-01-07T17:41:40.973193Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit LoadAndWaitInRS 2026-01-07T17:41:40.973224Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit BlockFailPoint 2026-01-07T17:41:40.973253Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit BlockFailPoint 2026-01-07T17:41:40.973278Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-01-07T17:41:40.973301Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit BlockFailPoint 2026-01-07T17:41:40.973322Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit ExecuteDataTx 2026-01-07T17:41:40.973348Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit ExecuteDataTx 2026-01-07T17:41:40.975256Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000004:403] at tablet 9437185 with status COMPLETE 2026-01-07T17:41:40.975321Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000004:403] at 9437185: {NSelectRow: 5, NSelectRange: 1, NUpdateRow: 8, NEraseRow: 0, SelectRowRows: 4, SelectRowBytes: 32, SelectRangeRows: 32, SelectRangeBytes: 256, UpdateRowBytes: 61, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:41:40.975388Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2026-01-07T17:41:40.975419Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2026-01-07T17:41:40.975451Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2026-01-07T17:41:40.975480Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2026-01-07T17:41:40.975759Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is DelayComplete 2026-01-07T17:41:40.975792Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2026-01-07T17:41:40.975822Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2026-01-07T17:41:40.975854Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2026-01-07T17:41:40.975890Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-01-07T17:41:40.975914Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2026-01-07T17:41:40.975942Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:403] at 9437185 has finished 2026-01-07T17:41:40.975976Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:41:40.976007Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-07T17:41:40.976039Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-07T17:41:40.976069Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-07T17:41:41.000171Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:41:41.000332Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2026-01-07T17:41:41.000409Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:104:2137], exec latency: 4 ms, propose latency: 6 ms 2026-01-07T17:41:41.000490Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2026-01-07T17:41:41.000541Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-07T17:41:41.001297Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:349:2317], Recipient [4:240:2232]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2026-01-07T17:41:41.001356Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:41:41.001400Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithSplit [GOOD] Test command err: 2026-01-07T17:41:41.703861Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2026-01-07T17:41:41.744856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:41.744986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:41.745030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:41.745069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:41.745113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:41.745140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:41.745187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:41.745254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:41.746088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:41.746364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:41.830852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:41.830912Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:41.831499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:41.831788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:41.831947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:41.839490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:41.843586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:41.844295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:41.844729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:41.846254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:41.846399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:41.847511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:41.847725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:41.847967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:41.848011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:41.848050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:41.848230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:41.997218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.998975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.999097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.999190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.999272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.999329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... TEvMeasureSelfResponseTime 2026-01-07T17:41:49.016378Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.051033Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.051109Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.051214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.051246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.061678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.061736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.061805Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.061828Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.094003Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.094103Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.094207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.094237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.104677Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.104749Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.104850Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.104881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.137389Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.137464Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:49.137556Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.137584Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:49.148756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1376:3244], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1036 Memory: 89229 Storage: 5024539 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-07T17:41:49.148826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:41:49.148873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.1036 2026-01-07T17:41:49.148995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:41:49.149040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-07T17:41:49.149305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1379:3246], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1024 Memory: 89253 Storage: 5127032 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-07T17:41:49.149344Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:41:49.149379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.1024 2026-01-07T17:41:49.149468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:41:49.160014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:49.160092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:49.160155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-07T17:41:49.160417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-07T17:41:49.160454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:49.160487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:49.160554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:49.160597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:49.160652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.997500s, Timestamp# 1970-01-01T00:01:10.002500Z 2026-01-07T17:41:49.160694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2026-01-07T17:41:49.161322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:49.164435Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1850:3616], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:49.164502Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:49.164539Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:49.164640Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:49.164672Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:49.164706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> TestShred::ManualLaunch3Cycles [GOOD] >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC >> TPQCDTest::TestUnavailableWithoutClustersList >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] >> TPQCDTest::TestDiscoverClusters |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TPQCDTest::TestRelatedServicesAreRunning >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table [GOOD] >> StreamCreator::Basic [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> TestShred::Run3CyclesForTables [GOOD] >> TestShred::Run3CyclesForAllSupportedObjects >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2026-01-07T17:41:47.038747Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676290688691927:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:47.039791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:47.081463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:47.311049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:47.311181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:47.313367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:47.353938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:47.390614Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:47.538795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:47.566930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:47.567086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:47.567107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:47.567322Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:47.769917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:47.777938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:47.865014Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:41:47.865052Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:41:47.865538Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:41:48.047179Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:49.978867Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767807707862, tx_id: 281474976710658 } } } 2026-01-07T17:41:49.979245Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:41:49.980956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:49.981811Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-07T17:41:49.981832Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-07T17:41:50.009819Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-07T17:41:50.009856Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:41:50.010418Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-07T17:41:50.085299Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7592676303573594831:2337] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-07T17:41:50.088430Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:50.088481Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-07T17:41:50.099503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:41:50.115512Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:50.115549Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2026-01-07T17:40:57.811033Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676075572309777:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:57.811124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:58.043531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:58.050074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:58.050200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:58.089760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:58.154667Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676075572309749:2081] 1767807657809666 != 1767807657809669 2026-01-07T17:40:58.166096Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:58.208174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:58.208192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:58.208201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:58.208320Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:58.217033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:58.382877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:58.820047Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:00.608715Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:41:00.608770Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:116: [WorkloadService] [Service] Resource pools was disabled 2026-01-07T17:41:00.612863Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZjZmODA3ZmItZTgwODhlNjEtZmFmMTFhMGYtMjI5NjkxOTA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZjZmODA3ZmItZTgwODhlNjEtZmFmMTFhMGYtMjI5NjkxOTA= temp_dir_name# 688d8999-4dd9-15a4-dd5b-dcbeebede347 trace_id# 2026-01-07T17:41:00.613016Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZjZmODA3ZmItZTgwODhlNjEtZmFmMTFhMGYtMjI5NjkxOTA=, ActorId: [1:7592676088457212492:2319], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:00.632508Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM= temp_dir_name# ff236362-4317-ad50-d973-57aa18ba4fa9 trace_id# 2026-01-07T17:41:00.632649Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:00.633146Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ReadyState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7592676088457212493:2492] database# Root database_id# /Root pool_id# trace_id# 2026-01-07T17:41:00.633299Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Sending CompileQuery request trace_id# 2026-01-07T17:41:00.939125Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136827742074840 trace_id# 2026-01-07T17:41:00.939203Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Sending to Executer span_id_size# 8 trace_id# 2026-01-07T17:41:00.939510Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Created new KQP executer executer_id# [1:7592676088457212500:2320] is_rollback# false trace_id# 2026-01-07T17:41:00.983517Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Forwarded TEvStreamData to [1:7592676088457212493:2492] trace_id# 2026-01-07T17:41:00.987934Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-07T17:41:00.988171Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, TxInfo status# Committed kind# Pure total_duration# 49.174 server_duration# 49.074 queries_count# 2 trace_id# 2026-01-07T17:41:00.988229Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:41:00.988469Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:00.988511Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, EndCleanup is_final# true trace_id# 2026-01-07T17:41:00.988559Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: ExecuteState, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Sent query response back to proxy proxy_request_id# 3 proxy_id# [1:7592676075572310010:2265] trace_id# 2026-01-07T17:41:00.988611Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: unknown state, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:00.988917Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=MWFjYzg1YTAtNDdhMjZkMmYtZWZjZTgwOWQtZTI1ZTdhMTM=, ActorId: [1:7592676088457212494:2320], ActorState: unknown state, LegacyTraceId: 01kecrrcjs4dj596vcnsq0v94y, Session actor destroyed trace_id# 2026-01-07T17:41:00.999871Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=ZjZmODA3ZmItZTgwODhlNjEtZmFmMTFhMGYtMjI5NjkxOTA=, ActorId: [1:7592676088457212492:2319], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:00.999933Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=ZjZmODA3ZmItZTgwODhlNjEtZmFmMTFhMGYtMjI5NjkxOTA=, ActorId: [1:7592676088457212492:2319], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:00.999988Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=ZjZmODA3ZmItZTgwODhlNjEtZmFmMTFhMGYtMjI5NjkxOTA=, ActorId: [1:7592676088457212492:2319], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:01.000021Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=ZjZmODA3ZmItZTgwODhlNjEtZmFmMTFhMGYtMjI5NjkxOTA=, ActorId: [1:7592676088457212492:2319], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:01.000094Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=ZjZmODA3ZmItZTgwODhlNjEtZmFmMTFhMGYtMjI5NjkxOTA=, ActorId: [1:7592676088457212492:2319], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:41:02.097334Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676095330018924:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:02.097389Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:02.170857Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.008605s 2026-01-07T17:41:02.183203Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009932s 2026-01-07T17:41:02.194944Z node 3 :BS_NODE WAR ... 07T17:41:47.140246Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=Zjk1MjliN2ItYzdmYjllODEtYzlhMTk1OTEtZjliNTAxZGQ=, ActorId: [8:7592676284441308157:2332], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:47.140302Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=Zjk1MjliN2ItYzdmYjllODEtYzlhMTk1OTEtZjliNTAxZGQ=, ActorId: [8:7592676284441308157:2332], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:47.140347Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=Zjk1MjliN2ItYzdmYjllODEtYzlhMTk1OTEtZjliNTAxZGQ=, ActorId: [8:7592676284441308157:2332], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:47.140385Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=Zjk1MjliN2ItYzdmYjllODEtYzlhMTk1OTEtZjliNTAxZGQ=, ActorId: [8:7592676284441308157:2332], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:47.140476Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=Zjk1MjliN2ItYzdmYjllODEtYzlhMTk1OTEtZjliNTAxZGQ=, ActorId: [8:7592676284441308157:2332], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:41:47.150488Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:292: [TQueryRetryActor] [TCpuLoadFetcherActor] OwnerId: [8:7592676288736275512:2927], ActorId: [8:7592676288736275513:2928], Starting query actor #1 [8:7592676288736275514:2929] 2026-01-07T17:41:47.150550Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:135: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7592676288736275513:2928], ActorId: [8:7592676288736275514:2929], Bootstrap. Database: /Root, IsSystemUser: 0, run create session 2026-01-07T17:41:47.154380Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc= temp_dir_name# e3487c93-43b8-0c85-a81a-a8af1c28a73a trace_id# 2026-01-07T17:41:47.154879Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:47.155186Z node 8 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:156: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7592676288736275513:2928], ActorId: [8:7592676288736275514:2929], Successfully created session: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, run query 2026-01-07T17:41:47.155219Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:201: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7592676288736275513:2928], ActorId: [8:7592676288736275514:2929], RunDataQuery with SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, TxId: , text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2026-01-07T17:41:47.155619Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ReadyState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, Received request proxy_request_id# 6 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpc_actor# [8:7592676288736275517:2346] database# /Root database_id# /Root pool_id# trace_id# 2026-01-07T17:41:47.156341Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136827741955416 trace_id# 2026-01-07T17:41:47.156413Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, Sending to Executer span_id_size# 8 trace_id# 2026-01-07T17:41:47.156579Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, Created new KQP executer executer_id# [8:7592676288736275520:2345] is_rollback# false trace_id# 2026-01-07T17:41:47.166019Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-07T17:41:47.166229Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, TxInfo status# Committed kind# ReadOnly total_duration# 9.991 server_duration# 9.896 queries_count# 2 trace_id# 2026-01-07T17:41:47.166355Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:41:47.166436Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:47.166478Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, EndCleanup is_final# false trace_id# 2026-01-07T17:41:47.166546Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ExecuteState, LegacyTraceId: 01kecrst0k41hj13f3xv2jwd2m, Sent query response back to proxy proxy_request_id# 6 proxy_id# [8:7592676262966470626:2268] trace_id# 2026-01-07T17:41:47.167535Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7592676288736275513:2928], ActorId: [8:7592676288736275514:2929], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, TxId: 2026-01-07T17:41:47.167648Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:371: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7592676288736275513:2928], ActorId: [8:7592676288736275514:2929], Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, TxId: 2026-01-07T17:41:47.167693Z node 8 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7592676288736275513:2928], ActorId: [8:7592676288736275514:2929], Delete session: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc= 2026-01-07T17:41:47.167797Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TCpuLoadFetcherActor] OwnerId: [8:7592676288736275512:2927], ActorId: [8:7592676288736275513:2928], Got response [8:7592676288736275514:2929] SUCCESS 2026-01-07T17:41:47.168485Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:47.168554Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:47.168590Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:47.168622Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:47.168718Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=NjM4NzFiYTAtMzkwNWQ0NjMtZTViYTk0ZDMtN2IxN2UwMDc=, ActorId: [8:7592676288736275516:2345], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:41:47.182284Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=NTA4NzhkM2EtOWVkMDExZC00NDcwNTY5OS0xYzk5ZWRiYw==, ActorId: [8:7592676284441308072:2330], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:47.182350Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NTA4NzhkM2EtOWVkMDExZC00NDcwNTY5OS0xYzk5ZWRiYw==, ActorId: [8:7592676284441308072:2330], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:47.182382Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NTA4NzhkM2EtOWVkMDExZC00NDcwNTY5OS0xYzk5ZWRiYw==, ActorId: [8:7592676284441308072:2330], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:47.182419Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=NTA4NzhkM2EtOWVkMDExZC00NDcwNTY5OS0xYzk5ZWRiYw==, ActorId: [8:7592676284441308072:2330], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:47.182538Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=NTA4NzhkM2EtOWVkMDExZC00NDcwNTY5OS0xYzk5ZWRiYw==, ActorId: [8:7592676284441308072:2330], ActorState: unknown state, Session actor destroyed trace_id# |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:35.562552Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:35.563697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:35.569367Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:41:35.670026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:35.671424Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:35.678518Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:35.678661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:35.678765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:35.680732Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:35.680843Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:36.153524Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:36.272364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:36.272508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:36.273559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:36.273639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:36.324352Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:36.325099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:36.325452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:36.403533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:36.417952Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:37.153808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:37.210495Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:37.211589Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:37.548312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:38.610715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1913:3232], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.610869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1924:3237], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.610985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.612102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1929:3242], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.612261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.617652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:39.210950Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1927:3240], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:41:39.412906Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2036:3317] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:39.755091Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-07T17:41:39.755198Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUtNzkzN2EyMzktODJlODIzYTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-07T17:41:39.755433Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:836} ActorId: [1:2062:3230] TxId: 281474976710660. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUtNzkzN2EyMzktODJlODIzYTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-07T17:41:39.755514Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:851} ActorId: [1:2062:3230] TxId: 281474976710660. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUtNzkzN2EyMzktODJlODIzYTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-01-07T17:41:39.755756Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710660. Resolved key sets: 1 2026-01-07T17:41:39.755884Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-07T17:41:39.756207Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710660. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUtNzkzN2EyMzktODJlODIzYTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-07T17:41:39.756516Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710660. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUtNzkzN2EyMzktODJlODIzYTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:2065:3230] 2026-01-07T17:41:39.756564Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710660. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUtNzkzN2EyMzktODJlODIzYTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:2065:3230], channels: 0 2026-01-07T17:41:39.756641Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [1:2062:3230] TxId: 281474976710660. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUtNzkzN2EyMzktODJlODIzYTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:41:39.756680Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2750} ActorId: [1:2062:3230] TxId: 281474976710660. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUtNzkzN2EyMzktODJlODIzYTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-07T17:41:39.756723Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710660. Ctx: { TraceId: 01kecrshng97meddf9thec99r3, Database: , SessionId: ydb://session/3?node_id=1&id=YWI1Y2U0YTYtODk0MzM0NmUt ... : { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CT 1, CA [3:2115:3358], trace_id# 2026-01-07T17:41:50.891707Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitSnapshotState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2115:3358], trace_id# 2026-01-07T17:41:50.891783Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitSnapshotState immediate# true trace_id# 2026-01-07T17:41:50.894640Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2115:3358] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-07T17:41:50.894726Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CT 1, CA [3:2115:3358], trace_id# 2026-01-07T17:41:50.894794Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2115:3358], trace_id# 2026-01-07T17:41:50.895610Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1045} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing task TaskId# 1 ComputeActor# [4:2116:2470] trace_id# 2026-01-07T17:41:50.895673Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [4:2116:2470] 2026-01-07T17:41:50.895743Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:908: TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task: 1, output channelId: 1, dst task: 2, at actor [3:2115:3358] 2026-01-07T17:41:50.895800Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2115:3358], channels: 1 2026-01-07T17:41:50.895844Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:2116:2470], channels: 1 2026-01-07T17:41:50.896153Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2116:2470] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-07T17:41:50.896207Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2115:3358], CA [4:2116:2470], trace_id# 2026-01-07T17:41:50.896250Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [3:2115:3358], CA [4:2116:2470], trace_id# 2026-01-07T17:41:50.897174Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2116:2470] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 839 Tasks { TaskId: 1 CpuTimeUs: 454 ComputeCpuTimeUs: 14 BuildCpuTimeUs: 440 HostName: "ghrun-z2c6jslqle" NodeId: 4 CreateTimeMs: 1767807710893 CurrentWaitInputTimeUs: 21 UpdateTimeMs: 1767807710894 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:41:50.897237Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2115:3358], CA [4:2116:2470], trace_id# 2026-01-07T17:41:50.897277Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [3:2115:3358], CA [4:2116:2470], trace_id# 2026-01-07T17:41:50.912476Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2115:3358] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1154 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 838 FinishTimeMs: 1767807710911 InputRows: 3 InputBytes: 13 ComputeCpuTimeUs: 197 BuildCpuTimeUs: 641 HostName: "ghrun-z2c6jslqle" NodeId: 3 CreateTimeMs: 1767807710892 UpdateTimeMs: 1767807710911 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:41:50.912568Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2115:3358] 2026-01-07T17:41:50.912641Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2116:2470], trace_id# 2026-01-07T17:41:50.912688Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:2116:2470], trace_id# 2026-01-07T17:41:50.913761Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2116:2470] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1704 DurationUs: 10000 Tasks { TaskId: 1 CpuTimeUs: 703 FinishTimeMs: 1767807710913 OutputRows: 3 OutputBytes: 13 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ResultRows: 3 ResultBytes: 13 ComputeCpuTimeUs: 263 BuildCpuTimeUs: 440 WaitInputTimeUs: 7609 HostName: "ghrun-z2c6jslqle" NodeId: 4 StartTimeMs: 1767807710903 CreateTimeMs: 1767807710893 UpdateTimeMs: 1767807710913 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:41:50.913821Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:2116:2470] 2026-01-07T17:41:50.913930Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:41:50.914003Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1509} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-07T17:41:50.914070Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [3:2107:3358] TxId: 281474976715663. Ctx: { TraceId: 01kecrsxh257etxg5tq89r2nva, Database: , SessionId: ydb://session/3?node_id=3&id=ODNiMTBmMTctNGJiMzAzYTktN2VkMGY2MDYtNWY2Mjk3N2E=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002858s ReadRows: 3 ReadBytes: 24 RequestUnits# 3 ForceFlag# true trace_id# { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2026-01-07T17:39:47.611812Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675772933638419:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:47.611903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:48.117022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:48.124635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:48.125019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:48.262425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:48.380069Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:48.472170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:48.631994Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:48.669500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:39:48.669521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:39:48.669526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:39:48.669620Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:48.999370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:52.340197Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:39:52.341524Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675794408475708:2322], Start check tables existence, number paths: 2 2026-01-07T17:39:52.344598Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=OWI3MDAxNmQtYjczZTQ2NzMtNGQwODY4NTAtNTgzNTgzMTI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# OWI3MDAxNmQtYjczZTQ2NzMtNGQwODY4NTAtNTgzNTgzMTI= temp_dir_name# 63f83585-402f-4969-332c-1b8634ae1163 trace_id# 2026-01-07T17:39:52.347444Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=OWI3MDAxNmQtYjczZTQ2NzMtNGQwODY4NTAtNTgzNTgzMTI=, ActorId: [1:7592675794408475727:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:39:52.347850Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675794408475708:2322], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:39:52.347890Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675794408475708:2322], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:39:52.347918Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675794408475708:2322], Successfully finished 2026-01-07T17:39:52.349843Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:39:52.349885Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:39:52.349917Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:39:52.373667Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:39:52.381018Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675794408475737:2502], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:39:52.385219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:39:52.388066Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675794408475737:2502], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-07T17:39:52.391906Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675794408475737:2502], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:39:52.400353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675794408475737:2502], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:39:52.477809Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675794408475737:2502], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:39:52.572201Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675794408475788:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:39:52.572348Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675794408475737:2502], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:39:52.582785Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ODExZjhiMGItNWYzMTAzNmEtOGQ4NzQ2ZDUtYjEyYjk0NmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ODExZjhiMGItNWYzMTAzNmEtOGQ4NzQ2ZDUtYjEyYjk0NmM= temp_dir_name# 70713035-4536-0e98-9b3a-838389374dc5 trace_id# 2026-01-07T17:39:52.583231Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ODExZjhiMGItNWYzMTAzNmEtOGQ4NzQ2ZDUtYjEyYjk0NmM=, ActorId: [1:7592675794408475796:2327], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:39:52.583423Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=ODExZjhiMGItNWYzMTAzNmEtOGQ4NzQ2ZDUtYjEyYjk0NmM=, ActorId: [1:7592675794408475796:2327], ActorState: ReadyState, LegacyTraceId: 01kecrpa4714rb8p7bzr5k0m6g, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7592675794408475795:2540] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-07T17:39:52.583739Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:39:52.583751Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-07T17:39:52.583806Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7592675794408475796:2327], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODExZjhiMGItNWYzMTAzNmEtOGQ4NzQ2ZDUtYjEyYjk0NmM= 2026-01-07T17:39:52.583853Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:39:52.583888Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675794408475798:2328], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:39:52.583961Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7592675794408475799:2329], Database: /Root, Start database fetching 2026-01-07T17:39:52.585352Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7592675794408475799:2329], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-07T17:39:52.588668Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675794408475798:2328], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:39:52.588743Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-07T17:39:52.588796Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-07T17:39:52.588813Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-07T17:39:52.592601Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7592675794408475809:2330], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODExZjhiMGItNWYzMTAzNmEtOGQ4NzQ2ZDUtYjEyYjk0NmM=, Start pool fetching 2026-01-07T17:39:52.592652Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675794408475810:2331], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:39:52.592977Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675794408475810:2331], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:39:52.593047Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:107: [WorkloadService] [TPoolResolverActor] ActorId: [1:7592675794408475809:2330], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODExZjhiMGItNWYzMTAzNmEtOGQ4NzQ2ZDUtYjEyYjk0NmM=, Pool info successfully resolved 2026-01-07T17:39:52.593315Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:286: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, Database ... P_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7592676301013180365:2505], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2026-01-07T17:41:50.493080Z node 7 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7592676301013180365:2505], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:41:50.503308Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7592676301013180365:2505], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:41:50.595977Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7592676301013180365:2505], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:41:50.599694Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7592676301013180416:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:50.599837Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7592676301013180365:2505], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:41:50.604086Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ= temp_dir_name# d830bde4-4dcd-21dc-e603-d79763414bc1 trace_id# 2026-01-07T17:41:50.604262Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [7:7592676301013180423:2327], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:50.604624Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:41:50.604652Z node 7 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-07T17:41:50.604788Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [7:7592676301013180423:2327], ActorState: ReadyState, LegacyTraceId: 01kecrsxccfx1v1x11thg7w42f, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [7:7592676301013180422:2542] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-07T17:41:50.604828Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [7:7592676301013180423:2327], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ= 2026-01-07T17:41:50.604879Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592676301013180425:2328], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:41:50.604950Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7592676301013180426:2329], Database: /Root, Start database fetching 2026-01-07T17:41:50.604998Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:41:50.605291Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7592676301013180426:2329], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-07T17:41:50.605375Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-07T17:41:50.605455Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [7:7592676301013180432:2330], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, Start pool fetching 2026-01-07T17:41:50.605496Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592676301013180433:2331], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:41:50.606517Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592676301013180425:2328], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:41:50.606522Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592676301013180433:2331], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:41:50.606595Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:107: [WorkloadService] [TPoolResolverActor] ActorId: [7:7592676301013180432:2330], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, Pool info successfully resolved 2026-01-07T17:41:50.606596Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-07T17:41:50.606620Z node 7 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-07T17:41:50.606885Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:286: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ= 2026-01-07T17:41:50.606886Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7592676301013180439:2332], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-07T17:41:50.606952Z node 7 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:297: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ= 2026-01-07T17:41:50.607154Z node 7 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [7:7592676301013180423:2327], ActorState: ExecuteState, LegacyTraceId: 01kecrsxccfx1v1x11thg7w42f, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id status# PRECONDITION_FAILED issues# { message: "Resource pool sample_pool_id was disabled due to zero concurrent query limit" severity: 1 } trace_id# 2026-01-07T17:41:50.607271Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [7:7592676301013180423:2327], ActorState: ExecuteState, LegacyTraceId: 01kecrsxccfx1v1x11thg7w42f, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-07T17:41:50.607324Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:196: [WorkloadService] [Service] Finished request with worker actor [7:7592676301013180423:2327], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ= 2026-01-07T17:41:50.607404Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [7:7592676301013180423:2327], ActorState: CleanupState, LegacyTraceId: 01kecrsxccfx1v1x11thg7w42f, EndCleanup is_final# true trace_id# 2026-01-07T17:41:50.607505Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [7:7592676301013180423:2327], ActorState: CleanupState, LegacyTraceId: 01kecrsxccfx1v1x11thg7w42f, Sent query response back to proxy proxy_request_id# 3 proxy_id# [7:7592676279538343250:2265] trace_id# 2026-01-07T17:41:50.607533Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [7:7592676301013180423:2327], ActorState: unknown state, LegacyTraceId: 01kecrsxccfx1v1x11thg7w42f, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:50.607651Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=7&id=OTgyNWJlYWQtMmNhODJkMDYtMzcxNTQ1NjYtYjIxMDExOTQ=, ActorId: [7:7592676301013180423:2327], ActorState: unknown state, LegacyTraceId: 01kecrsxccfx1v1x11thg7w42f, Session actor destroyed trace_id# 2026-01-07T17:41:50.608107Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7592676301013180439:2332], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2026-01-07T17:41:50.618804Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=7&id=OGVjMTU0YjktNjk2NWNlMjYtMWRhMWI5NmEtYTRmZWE1N2E=, ActorId: [7:7592676301013180358:2326], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:41:50.618875Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=7&id=OGVjMTU0YjktNjk2NWNlMjYtMWRhMWI5NmEtYTRmZWE1N2E=, ActorId: [7:7592676301013180358:2326], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:41:50.618919Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=7&id=OGVjMTU0YjktNjk2NWNlMjYtMWRhMWI5NmEtYTRmZWE1N2E=, ActorId: [7:7592676301013180358:2326], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:41:50.618960Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=7&id=OGVjMTU0YjktNjk2NWNlMjYtMWRhMWI5NmEtYTRmZWE1N2E=, ActorId: [7:7592676301013180358:2326], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:41:50.619057Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=7&id=OGVjMTU0YjktNjk2NWNlMjYtMWRhMWI5NmEtYTRmZWE1N2E=, ActorId: [7:7592676301013180358:2326], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:41:50.694050Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7592676279538343017:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:50.694154Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> StreamCreator::TopicAutoPartitioning [GOOD] >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> KqpErrors::ProposeErrorEvWrite [GOOD] >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::TopicAutoPartitioning [GOOD] Test command err: 2026-01-07T17:41:47.250383Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676289918476619:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:47.250607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:47.267101Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:47.513371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:47.513517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:47.537804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:47.553559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:47.601812Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:47.754392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:47.811593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:47.811630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:47.811636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:47.811721Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:48.082583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:48.092838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:48.202502Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-07T17:41:48.254189Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:50.050488Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7592676302803379417:2332] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-07T17:41:50.054741Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:50.054783Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-07T17:41:50.067102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:41:50.078483Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:50.078511Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-07T17:41:50.597178Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676302948776622:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:50.597263Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:50.610890Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:50.684232Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:50.685636Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676302948776581:2081] 1767807710596260 != 1767807710596263 2026-01-07T17:41:50.723182Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:50.723310Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:50.725432Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:50.853928Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:50.914291Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:50.914311Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:50.914317Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:50.914394Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:51.153329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:51.160835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:51.188688Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-07T17:41:51.601590Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:53.197394Z node 2 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][2:7592676315833679534:2332] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-07T17:41:53.200110Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:53.200124Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-07T17:41:53.206352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:41:53.221471Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-07T17:41:53.221501Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2026-01-07T17:41:50.171995Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676301034350881:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:50.172061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:50.383764Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:50.408772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:50.408891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:50.422462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:50.478896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:50.479715Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676301034350853:2081] 1767807710170532 != 1767807710170535 2026-01-07T17:41:50.523922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:50.523942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:50.523951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:50.524031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:50.667632Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:51.179003Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:52.367711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676309624286356:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.367724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676309624286383:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.367788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.367966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676309624286387:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.368000Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.371825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:52.380689Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676309624286386:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:41:52.472998Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676309624286443:2551] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:52.793855Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676309624286460:2364], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:41:52.794389Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTZkMzZhYjctZDhjZGRiYmQtM2NkOTg5MDEtN2JmMWZiYjE=, ActorId: [1:7592676309624286354:2351], ActorState: ExecuteState, LegacyTraceId: 01kecrsz3d1b82p3wagdy4hrnw, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:52.806273Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2026-01-07T17:41:50.435139Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676304253468212:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:50.435277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:50.644199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:50.644316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:50.681736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:50.696664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:50.725372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:50.728108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676304253468188:2081] 1767807710434265 != 1767807710434268 2026-01-07T17:41:50.779753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0033d4/r3tmp/yandexGVjOLX.tmp 2026-01-07T17:41:50.779798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0033d4/r3tmp/yandexGVjOLX.tmp 2026-01-07T17:41:50.779970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0033d4/r3tmp/yandexGVjOLX.tmp 2026-01-07T17:41:50.780048Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:50.879288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:51.440804Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:52.642971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676312843403715:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.642996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676312843403706:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.643083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.643266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676312843403722:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.643312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.646392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:52.654534Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676312843403721:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-01-07T17:41:52.755546Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676312843403778:2551] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:52.979032Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676312843403799:2366], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:41:52.979495Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWNmODkyZDktZDEzMTg0NC0xNTc3NDU1Yi0yMDA1OWZjNQ==, ActorId: [1:7592676312843403689:2351], ActorState: ExecuteState, LegacyTraceId: 01kecrszc18j9rkmtqbpnskbwg, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:52.988501Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:41:54.011790Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676317138371170:2395], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:41:54.012217Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZGQ0M2NiODYtNWFkZmM3NTMtZDhhMzBhZGQtM2I4MGFkMDk=, ActorId: [1:7592676317138371163:2391], ActorState: ExecuteState, LegacyTraceId: 01kecrt0p738xdg1zkab6g1125, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:54.012675Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |89.0%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TestShred::Run3CyclesForTopics [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2026-01-07T17:41:49.873702Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676297535765610:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:49.873784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:50.065563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:50.077919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:50.078090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:50.107920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:50.147545Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:50.149319Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676297535765584:2081] 1767807709872771 != 1767807709872774 2026-01-07T17:41:50.210007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00353a/r3tmp/yandexTuJRVs.tmp 2026-01-07T17:41:50.210044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00353a/r3tmp/yandexTuJRVs.tmp 2026-01-07T17:41:50.210220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00353a/r3tmp/yandexTuJRVs.tmp 2026-01-07T17:41:50.210336Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:50.227006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:31982 2026-01-07T17:41:50.393933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:50.881308Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:52.221972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676310420668431:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.222042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676310420668440:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.222109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.222898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676310420668447:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.223008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.225500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:52.234484Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676310420668445:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:41:52.471674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:52.472494Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676310420668513:2592] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:52.556976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:52.591747Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676310420668557:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:41:52.592292Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjM3ZTQ0MTktZWVkNWUwOC1hOGJjY2U1OC04MTU0YmI1OA==, ActorId: [1:7592676310420668429:2324], ActorState: ExecuteState, LegacyTraceId: 01kecrsyyw8mas98h6sfwhycny, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:52.594016Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:41:52.631243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:35.931889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:36.059645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:36.068800Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:36.071136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:36.071528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:36.071731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:36.073898Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:36.074278Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:36.074523Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:36.074716Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:36.584209Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:36.704073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:36.704221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:36.705069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:36.705153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:36.754592Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:36.755458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:36.755883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:36.863650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:36.936443Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:37.580325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:37.655539Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:37.655632Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:38.025412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:38.947587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1920:3231], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.947714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1931:3236], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.947813Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.948296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1934:3239], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.948428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:38.953177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:39.554963Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1935:3240], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:41:39.784570Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2045:3317] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:40.122065Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-07T17:41:40.122193Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:99} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-01-07T17:41:40.122265Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:133} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-01-07T17:41:40.122318Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:144} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) trace_id# 2026-01-07T17:41:40.122361Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-07T17:41:40.125243Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:293} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-01-07T17:41:40.134644Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-07T17:41:40.134752Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:99} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecrsj01a87q51wz77z3y7ra, Database: , SessionId: ydb://session/3?node_id=1&id=MmVlMDhkMGMtZWVkMmFlMGYtNDk4YTU0ZDQtZTRhMjhlYTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution operation_timeout# 299.448110s cancel_after# trace_id# 2026-01-07T17:41:40.134818Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:133} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecrsj01a87q51wz77z3y7ra, Database: , SessionId: ydb://session/3?node_id=1&id=MmVlMDhkMGMtZWVkMmFlMGYtNDk4YTU0ZDQtZTRhMjhlYTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-01-07T17:41:40.134864Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:144} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecrsj01a87q51wz77z3y7ra, Database: , SessionId: ydb://session/3?node_id=1&id=MmVlMDhkMGMtZWVkMmFlMGYtNDk4YTU0ZDQtZTRhMjhlYTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) trace_id# 2026-01-07T17:41:40.134914Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-07T17:41:40.135536Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:293} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecrsj01a87q51wz77z3y7ra, Database: , SessionId: ydb://session/3?node_id=1&id=MmVlMDhkMGMtZWVkMmFlMGYtNDk4YTU0ZDQtZTRhMjhlYTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-01-07T17:41:40.135765Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-07T17:41:40.135859Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecrsj01a87q51wz77z3y7ra, Database: , SessionId: ydb://session/3?node_id=1&id=MmVlMDhkMGMtZWVkMmFlMGYtNDk4YTU0ZDQtZTRhMjhlYTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-07T17:41:40.136190Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:836} ActorId: [1:2071:3229] TxId: 281474976710660. C ... false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:41:53.218138Z node 3 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2750} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-07T17:41:53.218189Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [3:2327:3464] 2026-01-07T17:41:53.218243Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2327:3464], channels: 0 2026-01-07T17:41:53.218308Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2327:3464], trace_id# 2026-01-07T17:41:53.218378Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2327:3464], trace_id# 2026-01-07T17:41:53.218437Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-07T17:41:53.219307Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2327:3464] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-07T17:41:53.219379Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2327:3464], trace_id# 2026-01-07T17:41:53.219438Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2327:3464], trace_id# 2026-01-07T17:41:53.220467Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2327:3464] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 579 Tasks { TaskId: 1 CpuTimeUs: 104 FinishTimeMs: 1767807713220 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 19 BuildCpuTimeUs: 85 HostName: "ghrun-z2c6jslqle" NodeId: 3 CreateTimeMs: 1767807713218 UpdateTimeMs: 1767807713220 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:41:53.220530Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2327:3464] 2026-01-07T17:41:53.220606Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [3:2323:3464] trace_id# 2026-01-07T17:41:53.220688Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000579s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:41:53.235683Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:860: SelfId: [3:2329:3464], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:2314:3464]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2329:3464]. Ignored this error. 2026-01-07T17:41:53.235820Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:2323:3464], SessionActorId: [3:2314:3464], statusCode=UNAVAILABLE. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 . sessionActorId=[3:2314:3464]. 2026-01-07T17:41:53.236187Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, ActorId: [3:2314:3464], ActorState: ExecuteState, LegacyTraceId: 01kecrszw14xgjbw0rfdpvqa8e, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [3:2324:3464] from: [3:2323:3464] trace_id# 2026-01-07T17:41:53.236390Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1071} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got EvAbortExecution Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 } trace_id# 2026-01-07T17:41:53.236480Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 } trace_id# 2026-01-07T17:41:53.236555Z node 3 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1210} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-07T17:41:53.236733Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1349} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: UNAVAILABLE Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2036 severity: 1 } Result { Stats { } } TargetActor# [3:2314:3464] trace_id# 2026-01-07T17:41:53.236790Z node 3 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2823} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shutdown immediately - nothing to wait trace_id# 2026-01-07T17:41:53.236877Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:41:53.236952Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1509} ActorId: [3:2324:3464] TxId: 281474976715683. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-07T17:41:53.237233Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, ActorId: [3:2314:3464], ActorState: ExecuteState, LegacyTraceId: 01kecrszw14xgjbw0rfdpvqa8e, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2036 severity: 1 } trace_id# 2026-01-07T17:41:53.237481Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-07T17:41:53.237969Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:236} ActorId: [3:2332:3464] TxId: 281474976715684. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [3:2323:3464] trace_id# 2026-01-07T17:41:53.238248Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [3:2332:3464] TxId: 281474976715684. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:41:53.238310Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1509} ActorId: [3:2332:3464] TxId: 281474976715684. Ctx: { TraceId: 01kecrszw14xgjbw0rfdpvqa8e, Database: , SessionId: ydb://session/3?node_id=3&id=ZWJlNmY1M2QtOWI4ZjA0OTAtZGFjMmEyMC02N2YyNGJjMg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2026-01-07T17:41:50.255747Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676303318532168:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:50.255881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:50.450273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:50.471961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:50.472036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:50.518919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:50.570074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:50.570913Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676303318532144:2081] 1767807710254890 != 1767807710254893 2026-01-07T17:41:50.615458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003515/r3tmp/yandexdDEEwl.tmp 2026-01-07T17:41:50.615488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003515/r3tmp/yandexdDEEwl.tmp 2026-01-07T17:41:50.615653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003515/r3tmp/yandexdDEEwl.tmp 2026-01-07T17:41:50.615760Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:50.736806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:26945 2026-01-07T17:41:50.801132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:51.263916Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:52.532011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676311908467701:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.532127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676311908467691:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.532331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.532794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676311908467706:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.532897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.535597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:52.543407Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676311908467705:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:41:52.620043Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676311908467773:2591] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:52.780288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:52.790885Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676311908467781:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:41:52.791452Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NWZhZThkMDUtMzcwZjE3YzQtNjZlM2MxOTctYjE0MTcyYjg=, ActorId: [1:7592676311908467689:2324], ActorState: ExecuteState, LegacyTraceId: 01kecrsz8jcb3cmrf4n382m5jr, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:52.794083Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:41:52.878713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:52.944569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2026-01-07T17:41:50.506246Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676301759400106:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:50.506342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:50.717558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:50.717654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:50.736867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:50.782413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:50.784765Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676301759400077:2081] 1767807710504746 != 1767807710504749 2026-01-07T17:41:50.808538Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:50.852494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0033d1/r3tmp/yandexcYX42o.tmp 2026-01-07T17:41:50.852529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0033d1/r3tmp/yandexcYX42o.tmp 2026-01-07T17:41:50.852746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0033d1/r3tmp/yandexcYX42o.tmp 2026-01-07T17:41:50.852866Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:51.023954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:21594 2026-01-07T17:41:51.071287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:51.512454Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:52.712223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676310349335613:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.712280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676310349335631:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.712356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.713020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676310349335639:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.713081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.716901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:52.727028Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676310349335638:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:41:52.988716Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676310349335705:2590] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:53.016816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:53.112118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:53.114478Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676310349335714:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:41:53.114915Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWNiN2IwNC02NzAwYTYxZC1kYzBiYWY3MC0xZTUzZjc3MA==, ActorId: [1:7592676310349335605:2321], ActorState: ExecuteState, LegacyTraceId: 01kecrsze6f7rbp81tpctw1vcv, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:53.116988Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:41:53.172616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForTopics [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:38.271878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:38.272007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:38.272069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:38.272247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:38.272308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:38.272342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:38.272452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:38.272540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:38.273670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:38.274002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:38.371404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:38.371470Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:38.389593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:38.389993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:38.390204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:38.401300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:38.401717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:38.402579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:38.402873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:38.406206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:38.406420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:38.407796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:38.407873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:38.408016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:38.408082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:38.408134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:38.408329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:38.560689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.561921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:38.562895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... .cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38] in# 20 ms, next wakeup# 593.980000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-07T17:41:53.978072Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-07T17:41:53.978895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2026-01-07T17:41:53.979651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-07T17:41:53.979693Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-07T17:41:53.979879Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:704:2685], Recipient [2:696:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2026-01-07T17:41:53.979913Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:53.979938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:53.979977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:53.980006Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-07T17:41:53.980054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:53.980111Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:54.530979Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1257:3139]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:54.531041Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:54.531089Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:696:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:54.531106Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:54.531140Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:864:2816]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:54.531157Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:54.531192Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1257:3139], Recipient [2:1257:3139]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:54.531214Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:54.531261Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:696:2679], Recipient [2:696:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:54.531276Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:54.531308Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:864:2816], Recipient [2:864:2816]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:54.531323Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:54.541706Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:696:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:54.541777Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:54.541800Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-07T17:41:54.541975Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:704:2685], Recipient [2:696:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2026-01-07T17:41:54.542001Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:54.542021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:54.542070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:54.542098Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-07T17:41:54.542137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:54.542165Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:55.089617Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:864:2816]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:55.089679Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:55.089725Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1257:3139]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:55.089741Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:55.089780Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:696:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:55.089804Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:55.089841Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:696:2679], Recipient [2:696:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:55.089859Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:55.089904Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:864:2816], Recipient [2:864:2816]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:55.089918Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:55.089951Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1257:3139], Recipient [2:1257:3139]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:55.089967Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:55.100311Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:696:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:55.100377Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:55.100429Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-07T17:41:55.100676Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:704:2685], Recipient [2:696:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2026-01-07T17:41:55.100698Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:55.100723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:55.100779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:55.100803Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:55.100835Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.979000s, Timestamp# 1970-01-01T00:00:11.157000Z 2026-01-07T17:41:55.100856Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2026-01-07T17:41:55.102629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:55.103059Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:2257:4053], Recipient [2:696:2679]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:55.103099Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:55.103128Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:55.103231Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:681:2670], Recipient [2:696:2679]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:55.103253Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:55.103278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] |89.0%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::StatisticsOnShardsRestart [GOOD] >> TraverseColumnShard::TraverseColumnTable [GOOD] |89.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:37:49.665963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:37:49.666043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:49.666091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:37:49.666131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:37:49.666181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:37:49.666209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:37:49.666267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:37:49.666337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:37:49.667095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:49.667343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:37:49.797315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:37:49.797411Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:49.798241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:37:49.810827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:37:49.811602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:37:49.811793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:37:49.824031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:37:49.824378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:37:49.825083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.825407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:37:49.830863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:49.831047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:37:49.832079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:37:49.832146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:37:49.832316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:37:49.832378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:37:49.832431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:37:49.832589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.839439Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:247:2058] recipient: [1:15:2062] 2026-01-07T17:37:49.951862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:37:49.952089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.952283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:37:49.952330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:37:49.952577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:37:49.952643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:49.955242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.955446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:37:49.955651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.955722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:37:49.955760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:37:49.955794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:37:49.957559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.957610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:37:49.957669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:37:49.959348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.959383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:37:49.959448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.959491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:37:49.969255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:37:49.971318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:37:49.971501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:37:49.972287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:37:49.972397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:37:49.972432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.972616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:37:49.972646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:37:49.972755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:37:49.972833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:37:49.974676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 4 2026-01-07T17:41:48.758148Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-07T17:41:48.758183Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-07T17:41:48.758217Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-07T17:41:48.758246Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2026-01-07T17:41:48.758277Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-07T17:41:48.760899Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:41:48.761052Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:41:48.761097Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:41:48.761138Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-07T17:41:48.761181Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-07T17:41:48.765069Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:41:48.765222Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:41:48.765272Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:41:48.765318Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-07T17:41:48.765422Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-07T17:41:48.768533Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:41:48.768637Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:41:48.768672Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:41:48.768703Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2026-01-07T17:41:48.768739Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-07T17:41:48.769535Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:41:48.769645Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-07T17:41:48.769692Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-07T17:41:48.769733Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-07T17:41:48.769777Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 3 2026-01-07T17:41:48.769862Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-07T17:41:48.774195Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:41:48.774412Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:41:48.777147Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-07T17:41:48.777321Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-07T17:41:48.779165Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-07T17:41:48.779224Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-07T17:41:48.781341Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-07T17:41:48.781496Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-07T17:41:48.781540Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:3905:5621] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-07T17:41:48.783209Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-07T17:41:48.783281Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-07T17:41:48.783375Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-07T17:41:48.783402Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-07T17:41:48.783465Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-07T17:41:48.783492Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-07T17:41:48.783575Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-07T17:41:48.783606Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-07T17:41:48.783672Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-07T17:41:48.783701Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-07T17:41:48.785971Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-07T17:41:48.786133Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-07T17:41:48.786177Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:3908:5624] 2026-01-07T17:41:48.786730Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-07T17:41:48.786923Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-07T17:41:48.787010Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-07T17:41:48.787042Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:3908:5624] 2026-01-07T17:41:48.787181Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-07T17:41:48.787285Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-07T17:41:48.787323Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:3908:5624] 2026-01-07T17:41:48.787481Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-07T17:41:48.787575Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-07T17:41:48.787605Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:3908:5624] 2026-01-07T17:41:48.787743Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-07T17:41:48.787772Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:3908:5624] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:09.923730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:10.038293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:10.046408Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:10.046834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:10.046989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:10.596675Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:10.704960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:10.705106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:10.748354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:10.855574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:11.852458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:11.853314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:11.853363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:11.853397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:11.853949Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:11.937743Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:12.715946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:16.954860Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:16.965478Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:41:17.032350Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:17.084846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:17.085057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:17.146939Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:17.153021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:17.519733Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:17.519882Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:17.524359Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.526696Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.527299Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.528305Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.528956Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.529179Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.529441Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.529785Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.529924Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.560232Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:18.104265Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:18.180652Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:41:18.180751Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:41:18.272176Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:41:18.273674Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:41:18.273910Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:41:18.273980Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:41:18.274040Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:41:18.274103Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:41:18.274169Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:41:18.274222Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:41:18.275174Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:41:18.299084Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:18.299223Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:18.334461Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:41:18.335299Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:41:18.495824Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:41:18.497775Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:41:18.523180Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:41:18.523252Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:41:18.523338Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:41:18.531340Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:18.536036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:18.559045Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:41:18.559217Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:41:18.580087Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:41:18.991210Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:41:19.017952Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:41:19.581898Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:41:19.755609Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:41:19.755706Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:41:20.725543Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ed schemeshards count = 1 2026-01-07T17:41:52.615620Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:41:52.615763Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:41:52.615826Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5451:4709], StatRequests.size() = 1 2026-01-07T17:41:52.615889Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:41:53.007809Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5441:4699], ActorId: [2:5442:4700], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTljYzFjOTUtNmRkNzcyNzAtZGE1MWJiMzktZTUxZmUwNGU=, TxId: 2026-01-07T17:41:53.007922Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5441:4699], ActorId: [2:5442:4700], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTljYzFjOTUtNmRkNzcyNzAtZGE1MWJiMzktZTUxZmUwNGU=, TxId: 2026-01-07T17:41:53.008341Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5440:4698], ActorId: [2:5441:4699], Got response [2:5442:4700] SUCCESS 2026-01-07T17:41:53.008726Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:41:53.023700Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-07T17:41:53.023777Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:41:53.116475Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:41:53.116568Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:41:53.171309Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5453:4711], schemeshard count = 1 2026-01-07T17:41:54.238684Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-07T17:41:54.238783Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-07T17:41:54.238826Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:41:54.243279Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:41:54.274531Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:41:54.275205Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:41:54.275300Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:41:54.276606Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2026-01-07T17:41:54.276668Z node 2 :STATISTICS WARN: tx_response_tablet_distribution.cpp:63: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2026-01-07T17:41:54.276733Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:41:55.266247Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:41:55.290791Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:41:55.291024Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:41:55.291593Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5564:4768], server id = [2:5568:4772], tablet id = 72075186224037899, status = OK 2026-01-07T17:41:55.291871Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5564:4768], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:41:55.292118Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5565:4769], server id = [2:5569:4773], tablet id = 72075186224037900, status = OK 2026-01-07T17:41:55.292153Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5565:4769], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:41:55.302429Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5566:4770], server id = [2:5570:4774], tablet id = 72075186224037901, status = OK 2026-01-07T17:41:55.302515Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5566:4770], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:41:55.302876Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5567:4771], server id = [2:5571:4775], tablet id = 72075186224037902, status = OK 2026-01-07T17:41:55.302935Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5567:4771], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:41:55.306515Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:41:55.306834Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5564:4768], server id = [2:5568:4772], tablet id = 72075186224037899 2026-01-07T17:41:55.306866Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:41:55.307853Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:41:55.308352Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5565:4769], server id = [2:5569:4773], tablet id = 72075186224037900 2026-01-07T17:41:55.308377Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:41:55.308591Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:41:55.308782Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5566:4770], server id = [2:5570:4774], tablet id = 72075186224037901 2026-01-07T17:41:55.308799Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:41:55.308949Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:41:55.308978Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:41:55.309084Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:41:55.309195Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:41:55.309463Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5584:4784], ActorId: [2:5585:4785], Starting query actor #1 [2:5586:4786] 2026-01-07T17:41:55.309515Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5585:4785], ActorId: [2:5586:4786], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:41:55.311232Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5567:4771], server id = [2:5571:4775], tablet id = 72075186224037902 2026-01-07T17:41:55.311255Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:41:55.311756Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5585:4785], ActorId: [2:5586:4786], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MmEzODRjMTktZTI2ZTEyNTgtOWRhNDczNDItYTU3NGI4N2U=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:41:55.534389Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5595:4795]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:55.534667Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:41:55.534716Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5595:4795], StatRequests.size() = 1 2026-01-07T17:41:55.760922Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5585:4785], ActorId: [2:5586:4786], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmEzODRjMTktZTI2ZTEyNTgtOWRhNDczNDItYTU3NGI4N2U=, TxId: 2026-01-07T17:41:55.761038Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5585:4785], ActorId: [2:5586:4786], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmEzODRjMTktZTI2ZTEyNTgtOWRhNDczNDItYTU3NGI4N2U=, TxId: 2026-01-07T17:41:55.761450Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5584:4784], ActorId: [2:5585:4785], Got response [2:5586:4786] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:41:55.761879Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5608:4801]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:55.762102Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:41:55.763149Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:41:55.763206Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:41:55.763514Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:41:55.763561Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:41:55.763616Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:41:55.768682Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:41.530537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:41.530664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:41.530711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:41.530745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:41.530783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:41.530812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:41.530863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:41.530937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:41.531838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:41.532107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:41.623311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:41.623368Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:41.638985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:41.639363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:41.639552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:41.647702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:41.648086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:41.648894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:41.649224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:41.652976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:41.653175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:41.654450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:41.654508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:41.654632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:41.654689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:41.654735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:41.654914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:41.800661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.801670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.801787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.801877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.801960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:41.802773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nt [2:702:2685]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 100 Completed: false Progress10k: 0 2026-01-07T17:41:56.305936Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:56.305982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:56.306052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:56.306096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:646: TTxCompleteShredBSC Unknown generation#100, Expected gen# 52 at schemestard: 72057594046678944 2026-01-07T17:41:56.306210Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-07T17:41:56.306560Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:709:2690], Recipient [2:702:2685]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 0 2026-01-07T17:41:56.306598Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:56.306624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:56.306662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:56.306695Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-07T17:41:56.308273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:56.308341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:56.308406Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:56.815113Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:56.815200Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:56.815289Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:702:2685], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:56.815322Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:56.825751Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:873:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:56.825828Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:56.825905Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1576:3436]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:56.825929Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:56.825977Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:873:2823], Recipient [2:873:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:56.826002Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:56.826068Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1576:3436], Recipient [2:1576:3436]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:56.826090Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:56.888400Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:56.888491Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:56.888532Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-07T17:41:56.888756Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:709:2690], Recipient [2:702:2685]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 5000 2026-01-07T17:41:56.888788Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:56.888816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:56.888887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:56.888945Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-07T17:41:56.889007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:56.889069Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:57.266232Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:57.266295Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:57.266365Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:702:2685], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:57.266386Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:57.276749Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1576:3436]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:57.276814Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:57.276869Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:873:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:57.276889Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:57.276949Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:873:2823], Recipient [2:873:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:57.276983Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:57.277078Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1576:3436], Recipient [2:1576:3436]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:57.277105Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:57.341367Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:57.341458Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:57.341495Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-07T17:41:57.341762Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:709:2690], Recipient [2:702:2685]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: true Progress10k: 10000 2026-01-07T17:41:57.341803Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:57.341835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:57.341920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:57.341969Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:41:57.342023Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 101, duration# 2 s 2026-01-07T17:41:57.344920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:41:57.345759Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:4817:6101], Recipient [2:702:2685]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:57.345833Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:41:57.345879Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:41:57.346061Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:3975:5429], Recipient [2:702:2685]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:41:57.346103Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:41:57.346150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:11.947683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:12.051957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:12.059769Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:12.060132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:12.060447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:12.527347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:12.712345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:12.712499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:12.754773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:12.907623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:13.759975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:13.760896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:13.760988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:13.761022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:13.761672Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:13.833023Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:14.482818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:19.187830Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:19.195738Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:41:19.199815Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:19.282373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:19.282507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:19.326406Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:19.337531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:19.615018Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:19.615146Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:19.616698Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.617722Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.618644Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.619185Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.619357Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.619473Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.619648Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.619775Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.619884Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:19.649564Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:19.835253Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:19.871656Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:41:19.871754Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:41:19.929932Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:41:19.930312Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:41:19.930548Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:41:19.930607Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:41:19.930678Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:41:19.930744Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:41:19.930792Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:41:19.930846Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:41:19.931362Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:41:19.932443Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2453] 2026-01-07T17:41:19.940855Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:41:19.951695Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. Describe result: PathErrorUnknown 2026-01-07T17:41:19.951770Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. Creating table 2026-01-07T17:41:19.951891Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:41:19.954002Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:19.954133Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2139:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:20.009432Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2212:2648] 2026-01-07T17:41:20.009723Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2212:2648], schemeshard id = 72075186224037897 2026-01-07T17:41:20.060193Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2237:2658], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:20.084530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:20.100701Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:41:20.101059Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. Subscribe on create table tx: 281474976720657 2026-01-07T17:41:20.120668Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:41:20.402310Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:41:20.450207Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:41:21.503476Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:41:21.630056Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:41:21.630157Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2121:2589] Owner: [2:2120:2588]. Column diff is empty, finishing 2026-01-07T17:41:22.331110Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 498Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5460:4721] 2026-01-07T17:41:55.247593Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5459:4720], server id = [2:5460:4721], tablet id = 72075186224037894, status = OK 2026-01-07T17:41:55.247705Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5460:4721], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:41:55.247764Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:41:55.247897Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:41:55.247958Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5457:4718], StatRequests.size() = 1 2026-01-07T17:41:55.248012Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:41:55.358010Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5447:4708], ActorId: [2:5448:4709], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjdhNTQ0YzItYWZkZjNlM2ItM2Q0ZGQ4ODgtYmFiYWRjN2Y=, TxId: 2026-01-07T17:41:55.358103Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5447:4708], ActorId: [2:5448:4709], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjdhNTQ0YzItYWZkZjNlM2ItM2Q0ZGQ4ODgtYmFiYWRjN2Y=, TxId: 2026-01-07T17:41:55.358561Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5446:4707], ActorId: [2:5447:4708], Got response [2:5448:4709] SUCCESS 2026-01-07T17:41:55.359290Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:41:55.372810Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-07T17:41:55.372869Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:41:55.452053Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:41:55.452161Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:41:55.485566Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5459:4720], schemeshard count = 1 2026-01-07T17:41:56.396016Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-07T17:41:56.396080Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-07T17:41:56.396118Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:41:56.401189Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:41:56.431667Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:41:56.432481Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:41:56.432595Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:41:56.433852Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:41:56.459911Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:41:56.460195Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:41:56.461235Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5539:4763], server id = [2:5543:4767], tablet id = 72075186224037899, status = OK 2026-01-07T17:41:56.461656Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5539:4763], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:41:56.461930Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5540:4764], server id = [2:5544:4768], tablet id = 72075186224037900, status = OK 2026-01-07T17:41:56.461998Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5540:4764], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:41:56.463674Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5541:4765], server id = [2:5546:4770], tablet id = 72075186224037901, status = OK 2026-01-07T17:41:56.463748Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5541:4765], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:41:56.464002Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5542:4766], server id = [2:5545:4769], tablet id = 72075186224037902, status = OK 2026-01-07T17:41:56.464057Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5542:4766], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:41:56.469919Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:41:56.470693Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5539:4763], server id = [2:5543:4767], tablet id = 72075186224037899 2026-01-07T17:41:56.470747Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:41:56.472226Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:41:56.472775Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5540:4764], server id = [2:5544:4768], tablet id = 72075186224037900 2026-01-07T17:41:56.472814Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:41:56.473357Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:41:56.473676Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5542:4766], server id = [2:5545:4769], tablet id = 72075186224037902 2026-01-07T17:41:56.473706Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:41:56.473813Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:41:56.473863Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:41:56.474049Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:41:56.474232Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:41:56.474666Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5559:4779], ActorId: [2:5560:4780], Starting query actor #1 [2:5561:4781] 2026-01-07T17:41:56.474728Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5560:4780], ActorId: [2:5561:4781], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:41:56.477349Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5541:4765], server id = [2:5546:4770], tablet id = 72075186224037901 2026-01-07T17:41:56.477409Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:41:56.478514Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5560:4780], ActorId: [2:5561:4781], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YjNmZjI2ZWEtZDliY2RiNTItOTJjM2YyYmEtNDU4OGIxZDk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:41:56.613824Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5570:4790]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:56.614089Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:41:56.614134Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5570:4790], StatRequests.size() = 1 2026-01-07T17:41:56.745629Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5560:4780], ActorId: [2:5561:4781], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjNmZjI2ZWEtZDliY2RiNTItOTJjM2YyYmEtNDU4OGIxZDk=, TxId: 2026-01-07T17:41:56.745701Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5560:4780], ActorId: [2:5561:4781], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjNmZjI2ZWEtZDliY2RiNTItOTJjM2YyYmEtNDU4OGIxZDk=, TxId: 2026-01-07T17:41:56.746011Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5559:4779], ActorId: [2:5560:4780], Got response [2:5561:4781] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:41:56.746427Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5583:4796]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:56.746651Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:41:56.747180Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:41:56.747236Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:41:56.747538Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:41:56.747597Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:41:56.747647Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:41:56.751510Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TPQCDTest::TestDiscoverClusters [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2026-01-07T17:41:50.456865Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676303384305246:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:50.456981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:50.630323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:41:50.637689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:50.637833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:50.703599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:50.736343Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:50.737817Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676303384305223:2081] 1767807710455986 != 1767807710455989 2026-01-07T17:41:50.790904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0033da/r3tmp/yandexNYTuHf.tmp 2026-01-07T17:41:50.790936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0033da/r3tmp/yandexNYTuHf.tmp 2026-01-07T17:41:50.791162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0033da/r3tmp/yandexNYTuHf.tmp 2026-01-07T17:41:50.791289Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:17308 2026-01-07T17:41:50.930524Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:50.945457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:51.464607Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:52.691938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676311974240781:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.692093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676311974240770:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.692289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.692797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676311974240785:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.692859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:52.696127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:52.704474Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676311974240784:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-07T17:41:52.916309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:52.917151Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676311974240852:2591] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:52.997586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:53.035672Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676311974240896:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:41:53.036131Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NGYzY2YwNzQtNzc5Nzg0MmQtYTRmNzljNmUtNDZhMTg0NjM=, ActorId: [1:7592676311974240768:2324], ActorState: ExecuteState, LegacyTraceId: 01kecrszdj66jxye8cgzyphz06, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:41:53.038168Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:41:53.065141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-07T17:41:55.458045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676303384305246:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:55.458755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::StatisticsOnShardsRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:21.423208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:21.513773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:21.521339Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:21.521740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:21.521894Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:21.932506Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:22.036409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:22.036545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:22.072427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:22.166751Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:22.839973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:22.841497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:22.841556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:22.841592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:22.841870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:22.918574Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:23.463259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:35:26.734953Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:26.741509Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:35:26.745124Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:26.779936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:26.780049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:26.823091Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:35:26.825247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:27.027560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:27.027677Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:27.029866Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.030615Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.031426Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.032275Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.032389Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.032677Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.032811Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.033078Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.033216Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:27.049419Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:27.292879Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:27.383572Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:35:27.383695Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:35:27.419971Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:35:27.420263Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:35:27.420422Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:35:27.420485Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:35:27.420531Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:35:27.420568Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:35:27.420605Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:35:27.420639Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:35:27.421012Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:35:27.423660Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:27.423731Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2115:2582], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:27.441168Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2150:2614] 2026-01-07T17:35:27.442133Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2150:2614], schemeshard id = 72075186224037897 2026-01-07T17:35:27.506498Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2666] 2026-01-07T17:35:27.509368Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:35:27.523264Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Describe result: PathErrorUnknown 2026-01-07T17:35:27.523328Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Creating table 2026-01-07T17:35:27.523395Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:35:27.527590Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2305:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:35:27.531042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:27.538643Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:35:27.538788Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Subscribe on create table tx: 281474976720657 2026-01-07T17:35:27.558609Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:35:27.778787Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:35:27.858570Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:35:28.243720Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:35:28.371611Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:35:28.371713Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Column diff is empty, finishing 2026-01-07T17:35:28.933171Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 2026-01-07T17:41:05.484355Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:41:05.484418Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:41:05.484593Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-07T17:41:05.502907Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:07.294836Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:41:07.295296Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 42 2026-01-07T17:41:07.295399Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 42 2026-01-07T17:41:11.691528Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:41:12.841959Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:41:12.842035Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:41:12.842267Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-07T17:41:12.866379Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:14.694781Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:41:14.695136Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 43 2026-01-07T17:41:14.695334Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 43 2026-01-07T17:41:18.951260Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:41:20.046766Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:41:20.046840Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:41:20.047025Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-07T17:41:20.062709Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:21.958102Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:41:21.958462Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 44 2026-01-07T17:41:21.959505Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 44 2026-01-07T17:41:26.175214Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:41:27.290814Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:41:27.290895Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:41:27.291092Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-07T17:41:27.307124Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:29.071732Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:41:29.072297Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 45 2026-01-07T17:41:29.073845Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 45 2026-01-07T17:41:33.483642Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:41:33.483723Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:41:33.483757Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:41:33.483789Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:41:33.522013Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:41:34.517587Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:41:34.517661Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:41:34.517908Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-07T17:41:34.531552Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:36.225573Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:41:36.226044Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 46 2026-01-07T17:41:36.226488Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 46 2026-01-07T17:41:40.137941Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:41:41.100063Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:41:41.100148Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:41:41.100371Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-07T17:41:41.114106Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:42.611387Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:41:42.611857Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 47 2026-01-07T17:41:42.612229Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 47 2026-01-07T17:41:46.333790Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:41:47.439094Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:41:47.439154Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:41:47.439352Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-07T17:41:47.452788Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:49.093503Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:41:49.093874Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 48 2026-01-07T17:41:49.094805Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 48 2026-01-07T17:41:52.791834Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:41:53.737166Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:41:53.737233Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:41:53.737394Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-07T17:41:53.750087Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:55.326284Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:41:55.326697Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 49 2026-01-07T17:41:55.327257Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 49 2026-01-07T17:41:55.483928Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:16405:9772]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:55.487248Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:41:55.487322Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:16405:9772], StatRequests.size() = 1 2026-01-07T17:41:55.490425Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:16421:9776]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:55.493211Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-07T17:41:55.493278Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:16421:9776], StatRequests.size() = 1 2026-01-07T17:41:55.496832Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:16437:9780]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:55.499944Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-07T17:41:55.500017Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 6, ReplyToActorId = [2:16437:9780], StatRequests.size() = 1 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink-isOlap >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink >> KqpCost::CTASWithRetry+isOlap >> KqpCost::WriteRowInsertFails+isSink-isOlap >> KqpCost::IndexLookupJoin+StreamLookupJoin >> KqpCost::QuerySeviceRangeFullScan >> KqpCost::IndexLookupJoin-StreamLookupJoin >> KqpCost::ScanQueryRangeFullScan+SourceRead >> KqpCost::OlapWriteRow >> KqpCost::OlapRangeFullScan >> KqpCost::ScanScriptingRangeFullScan+SourceRead |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink >> KqpCost::Range >> KqpCost::OlapRange >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink >> KqpCost::WriteRow+isSink+isOlap [GOOD] >> KqpCost::IndexLookup+useSink >> KqpCost::WriteRow+isSink-isOlap >> KqpCost::VectorIndexLookup-useSink |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink-isOlap |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink+isOlap [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2026-01-07T17:39:26.485289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:39:26.485348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:26.485822Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:39:26.499866Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:39:26.500235Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:39:26.500509Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:39:26.511416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:39:26.557750Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:39:26.558403Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:39:26.560208Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:39:26.560295Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:39:26.560365Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:39:26.560762Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:39:26.560992Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:39:26.561063Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:39:26.641115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:39:26.682841Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:39:26.683044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:39:26.683144Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:39:26.683206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:39:26.683250Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:39:26.683289Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:26.683460Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.683525Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.683821Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:39:26.683947Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:39:26.684083Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:26.684128Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:39:26.684176Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:39:26.684212Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:39:26.684280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:39:26.684322Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:39:26.684411Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:39:26.684514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.684554Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.684621Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:39:26.691550Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:39:26.691621Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:39:26.691764Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:39:26.691926Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:39:26.691999Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:39:26.692052Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:39:26.692109Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:39:26.692152Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:39:26.692187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:39:26.692220Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:26.692515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:39:26.692569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:39:26.692617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:39:26.692671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:26.692743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:39:26.692772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:39:26.692807Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:39:26.692842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:26.692875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:39:26.705037Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:39:26.705109Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:39:26.705149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:39:26.705210Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:39:26.705282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:39:26.705811Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.705880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:39:26.705936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:39:26.706056Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:39:26.706084Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:39:26.706188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:39:26.706227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:39:26.706265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:39:26.706299Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:39:26.709518Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:39:26.709582Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:39:26.709742Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.709774Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:39:26.709819Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:39:26.709853Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:39:26.709888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:39:26.709932Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:39:26.709970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... NoMoreRestarts 2026-01-07T17:41:56.139629Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:538] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:41:56.139660Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:538] at 9437184 to execution unit CompleteOperation 2026-01-07T17:41:56.139693Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:538] at 9437184 on unit CompleteOperation 2026-01-07T17:41:56.139936Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:538] at 9437184 is DelayComplete 2026-01-07T17:41:56.139977Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:538] at 9437184 executing on unit CompleteOperation 2026-01-07T17:41:56.140012Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:538] at 9437184 to execution unit CompletedOperations 2026-01-07T17:41:56.140044Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:538] at 9437184 on unit CompletedOperations 2026-01-07T17:41:56.140087Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:538] at 9437184 is Executed 2026-01-07T17:41:56.140116Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:538] at 9437184 executing on unit CompletedOperations 2026-01-07T17:41:56.140148Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:538] at 9437184 has finished 2026-01-07T17:41:56.140183Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:41:56.140217Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:539] at 9437184 for BuildAndWaitDependencies 2026-01-07T17:41:56.140526Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [10:238:2230], Recipient [10:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:41:56.140580Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:41:56.140627Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:41:56.140659Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:41:56.140686Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:282: Return cached ready operation [1000005:539] at 9437184 2026-01-07T17:41:56.140707Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:41:56.140733Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is Executed 2026-01-07T17:41:56.140761Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:41:56.140779Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:539] at 9437184 to execution unit BuildDataTxOutRS 2026-01-07T17:41:56.140801Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit BuildDataTxOutRS 2026-01-07T17:41:56.141548Z node 10 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 539 at 9437184 restored its data 2026-01-07T17:41:56.141608Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is Executed 2026-01-07T17:41:56.141628Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit BuildDataTxOutRS 2026-01-07T17:41:56.141654Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:539] at 9437184 to execution unit StoreAndSendOutRS 2026-01-07T17:41:56.141681Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit StoreAndSendOutRS 2026-01-07T17:41:56.141712Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is Executed 2026-01-07T17:41:56.141736Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit StoreAndSendOutRS 2026-01-07T17:41:56.141758Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:539] at 9437184 to execution unit PrepareDataTxInRS 2026-01-07T17:41:56.141780Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit PrepareDataTxInRS 2026-01-07T17:41:56.141810Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is Executed 2026-01-07T17:41:56.141830Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit PrepareDataTxInRS 2026-01-07T17:41:56.141853Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:539] at 9437184 to execution unit LoadAndWaitInRS 2026-01-07T17:41:56.141879Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit LoadAndWaitInRS 2026-01-07T17:41:56.141904Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is Executed 2026-01-07T17:41:56.141927Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit LoadAndWaitInRS 2026-01-07T17:41:56.141949Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:539] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:41:56.141972Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit BlockFailPoint 2026-01-07T17:41:56.142006Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is Executed 2026-01-07T17:41:56.142028Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:41:56.142051Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:539] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:41:56.142075Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit ExecuteDataTx 2026-01-07T17:41:56.142648Z node 10 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:539] at tablet 9437184 with status COMPLETE 2026-01-07T17:41:56.142699Z node 10 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:539] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 32, SelectRangeBytes: 256, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:41:56.142743Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is Executed 2026-01-07T17:41:56.142761Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:41:56.142780Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:539] at 9437184 to execution unit CompleteOperation 2026-01-07T17:41:56.142801Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit CompleteOperation 2026-01-07T17:41:56.142957Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is DelayComplete 2026-01-07T17:41:56.142986Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit CompleteOperation 2026-01-07T17:41:56.143009Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:539] at 9437184 to execution unit CompletedOperations 2026-01-07T17:41:56.143031Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:539] at 9437184 on unit CompletedOperations 2026-01-07T17:41:56.143057Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:539] at 9437184 is Executed 2026-01-07T17:41:56.143073Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:539] at 9437184 executing on unit CompletedOperations 2026-01-07T17:41:56.143092Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:539] at 9437184 has finished 2026-01-07T17:41:56.143123Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:41:56.143186Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:41:56.143233Z node 10 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:41:56.143267Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:41:56.164148Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:41:56.164208Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2026-01-07T17:41:56.164262Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [10:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-07T17:41:56.164321Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-07T17:41:56.164353Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:41:56.164562Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:41:56.164583Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2026-01-07T17:41:56.164614Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [10:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:41:56.164654Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:41:56.164960Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:238:2230], Recipient [10:346:2313]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-07T17:41:56.165015Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:41:56.165061Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 25 18 31 31 31 30 31 31 30 30 23 30 25 23 22 16 8 26 26 15 16 26 2 26 26 26 23 24 26 26 - - actual 25 18 31 31 31 30 31 31 30 30 23 30 25 23 22 16 8 26 26 15 16 26 2 26 26 26 23 24 26 26 - - interm 25 18 24 24 30 30 20 29 30 30 23 30 25 23 22 16 8 26 26 15 16 26 2 26 26 26 23 24 26 26 - - |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> KqpCost::IndexLookup-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:41:42.071753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:41:42.071859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:42.071899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:41:42.071939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:41:42.071975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:41:42.072019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:41:42.072107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:41:42.072241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:41:42.073378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:41:42.073685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:41:42.160155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:41:42.160214Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:42.175164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:41:42.175469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:41:42.175671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:41:42.182733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:41:42.183105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:41:42.183870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:41:42.184141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:41:42.187165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:42.187357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:41:42.188675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:41:42.188738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:41:42.188870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:41:42.188921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:41:42.188986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:41:42.189157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:41:42.354428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.359951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.360014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.360078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.360219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.360327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:41:42.360421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2026-01-07T17:41:59.343827Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 39] in# 83 ms, next wakeup# 593.917000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-07T17:41:59.343883Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-07T17:41:59.345275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-07T17:41:59.345318Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-07T17:41:59.345520Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:709:2690], Recipient [2:702:2685]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2026-01-07T17:41:59.345555Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:59.345585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:59.345636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:59.345664Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-07T17:41:59.345709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:59.345754Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:41:59.870028Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:873:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:59.870091Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:59.870144Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1576:3436]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:59.870165Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:59.870210Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:59.870239Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:41:59.870281Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:702:2685], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:59.870303Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:59.870372Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:873:2823], Recipient [2:873:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:59.870401Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:59.870447Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1576:3436], Recipient [2:1576:3436]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:59.870469Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:41:59.901832Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:59.901916Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:41:59.901947Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-07T17:41:59.902158Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:709:2690], Recipient [2:702:2685]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2026-01-07T17:41:59.902192Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:41:59.902220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:41:59.902291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:41:59.902334Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-07T17:41:59.902390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-07T17:41:59.902435Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-07T17:42:00.393919Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:873:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:00.394007Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:00.394090Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1576:3436]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:00.394119Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:00.394173Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:00.394211Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:00.394272Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1576:3436], Recipient [2:1576:3436]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:00.394303Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:00.394377Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:702:2685], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:00.394404Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:00.394470Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:873:2823], Recipient [2:873:2823]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:00.394497Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:00.425813Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:702:2685]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:42:00.425895Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-07T17:42:00.425927Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-07T17:42:00.426169Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:709:2690], Recipient [2:702:2685]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2026-01-07T17:42:00.426203Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-07T17:42:00.426232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-07T17:42:00.426308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-07T17:42:00.426338Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-07T17:42:00.426382Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.916000s, Timestamp# 1970-01-01T00:00:11.222000Z 2026-01-07T17:42:00.426452Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2026-01-07T17:42:00.428599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-07T17:42:00.429237Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:4852:6136], Recipient [2:702:2685]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:42:00.429295Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:42:00.429333Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:42:00.429481Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:687:2676], Recipient [2:702:2685]: NKikimrScheme.TEvShredInfoRequest 2026-01-07T17:42:00.429511Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-07T17:42:00.429552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> KqpCost::WriteRowInsertFails-isSink+isOlap [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry-isOlap |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup >> KqpCost::WriteRowInsertFails+isSink+isOlap [GOOD] >> KqpCost::CTAS-isOlap >> KqpCost::OlapPointLookup >> KqpCost::CTAS+isOlap >> KqpCost::ScanQueryRangeFullScan-SourceRead >> KqpCost::ScanScriptingRangeFullScan-SourceRead |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink+isOlap [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink+isOlap [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink+isOlap [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink+isOlap [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan >> KqpCost::VectorIndexLookup+useSink |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId >> AnalyzeDatashard::DropTableNavigateError |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSa |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TraverseColumnShard::TraverseColumnTableRebootColumnshard |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> KqpCost::WriteRow+isSink-isOlap [GOOD] >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> KqpCost::Range [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] >> KqpCost::PointLookup [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] Test command err: 2026-01-07T17:40:57.234989Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676073041265642:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:57.235049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:57.448897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:57.503410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:57.503493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:57.543722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:57.572450Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:57.662920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:57.662948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:57.662966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:57.663269Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:57.682299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:57.868338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:58.243083Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:00.030171Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:41:00.033290Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:41:00.033331Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:41:00.033453Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676085926168782:2327], Start check tables existence, number paths: 2 2026-01-07T17:41:00.033767Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:41:00.033808Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=M2FmZTdlNTEtOGZmOGE3NWEtNmQ3MmIxZWYtYzQxNmExNzY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# M2FmZTdlNTEtOGZmOGE3NWEtNmQ3MmIxZWYtYzQxNmExNzY= temp_dir_name# df2d963e-4f40-02a9-7615-9eb44f6777a8 trace_id# 2026-01-07T17:41:00.033954Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=M2FmZTdlNTEtOGZmOGE3NWEtNmQ3MmIxZWYtYzQxNmExNzY=, ActorId: [1:7592676085926168793:2330], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:00.034513Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676085926168782:2327], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:41:00.034560Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676085926168782:2327], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:41:00.034588Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676085926168782:2327], Successfully finished 2026-01-07T17:41:00.034642Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:41:00.072444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:00.085862Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592676088788552604:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:00.085952Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:00.097124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:00.097197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:00.099820Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:41:00.100808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:00.101231Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2026-01-07T17:41:00.130411Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.130550Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.130608Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.130713Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.130815Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.130896Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.130972Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.131060Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.131117Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.204950Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:00.205064Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:00.208650Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:00.285041Z node 3 :STATISTICS WARN: tx_init.cpp:298: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2026-01-07T17:41:00.286062Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:00.426806Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:00.426827Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:00.426832Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:00.426903Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:00.469532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:00.484999Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676087101224343:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:41:00.485136Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:41:00.495445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:00.495566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:00.498593Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:00.725832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:00.736610Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-01-07T17:41:00.838271Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:00.838429Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:00.842398Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:00.864418Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.864608Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.864705Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:00.864820Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTabl ... # 2026-01-07T17:42:07.498559Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592676375652691427:2449], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-07T17:42:07.498645Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-07T17:42:07.502120Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=YTQzN2FjOGYtMTk0MTc1OTItZWI4ZTQ2ZmEtOGE2MmE4YTU=, ActorId: [10:7592676354177854360:2326], ActorState: ExecuteState, LegacyTraceId: 01kecrtdvk2jkh1tcv666fvxz2, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [10:7592676375652691378:2326] workload_service_cleanup# false trace_id# 2026-01-07T17:42:07.505563Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=YTQzN2FjOGYtMTk0MTc1OTItZWI4ZTQ2ZmEtOGE2MmE4YTU=, ActorId: [10:7592676354177854360:2326], ActorState: CleanupState, LegacyTraceId: 01kecrtdvk2jkh1tcv666fvxz2, EndCleanup is_final# false trace_id# 2026-01-07T17:42:07.505645Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=YTQzN2FjOGYtMTk0MTc1OTItZWI4ZTQ2ZmEtOGE2MmE4YTU=, ActorId: [10:7592676354177854360:2326], ActorState: CleanupState, LegacyTraceId: 01kecrtdvk2jkh1tcv666fvxz2, Sent query response back to proxy proxy_request_id# 17 proxy_id# [10:7592676336997984556:2265] trace_id# 2026-01-07T17:42:07.510868Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE= temp_dir_name# 805bd511-455f-3fcb-0dd7-1b810e7e41fa trace_id# 2026-01-07T17:42:07.511018Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [10:7592676375652691442:2450], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:42:07.511397Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-07T17:42:07.511442Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-07T17:42:07.511478Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592676375652691444:2451], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-07T17:42:07.511489Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [10:7592676375652691442:2450], ActorState: ReadyState, LegacyTraceId: 01kecrtdwqc11pgx1yjr85na4x, Received request proxy_request_id# 19 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [10:7592676375652691441:2798] database# Root database_id# /Root pool_id# my_pool trace_id# 2026-01-07T17:42:07.511534Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [10:7592676375652691442:2450], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE= 2026-01-07T17:42:07.511602Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [10:7592676375652691445:2452], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, Start pool fetching 2026-01-07T17:42:07.511642Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592676375652691446:2453], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-07T17:42:07.511798Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592676375652691444:2451], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-07T17:42:07.511890Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-07T17:42:07.511912Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592676375652691446:2453], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-07T17:42:07.511963Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-07T17:42:07.512022Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592676375652691449:2454], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-07T17:42:07.512042Z node 10 :KQP_WORKLOAD_SERVICE ERROR: scheme_actors.cpp:56: [WorkloadService] [TPoolResolverActor] ActorId: [10:7592676375652691445:2452], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-07T17:42:07.512162Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:114: [WorkloadService] [TPoolResolverActor] ActorId: [10:7592676375652691445:2452], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2026-01-07T17:42:07.512238Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592676375652691449:2454], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-07T17:42:07.512362Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:553: [WorkloadService] [Service] Reply continue error NOT_FOUND to [10:7592676375652691442:2450]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2026-01-07T17:42:07.512436Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-07T17:42:07.512613Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [10:7592676375652691442:2450], ActorState: ExecuteState, LegacyTraceId: 01kecrtdwqc11pgx1yjr85na4x, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool status# NOT_FOUND issues# { message: "Failed to resolve pool id my_pool" severity: 1 issues { message: "Resource pool my_pool not found or you don\'t have access permissions" severity: 1 } } trace_id# 2026-01-07T17:42:07.512780Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [10:7592676375652691442:2450], ActorState: ExecuteState, LegacyTraceId: 01kecrtdwqc11pgx1yjr85na4x, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-07T17:42:07.512813Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:196: [WorkloadService] [Service] Finished request with worker actor [10:7592676375652691442:2450], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE= 2026-01-07T17:42:07.512880Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [10:7592676375652691442:2450], ActorState: CleanupState, LegacyTraceId: 01kecrtdwqc11pgx1yjr85na4x, EndCleanup is_final# true trace_id# 2026-01-07T17:42:07.513028Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [10:7592676375652691442:2450], ActorState: CleanupState, LegacyTraceId: 01kecrtdwqc11pgx1yjr85na4x, Sent query response back to proxy proxy_request_id# 19 proxy_id# [10:7592676336997984556:2265] trace_id# 2026-01-07T17:42:07.513079Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [10:7592676375652691442:2450], ActorState: unknown state, LegacyTraceId: 01kecrtdwqc11pgx1yjr85na4x, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:42:07.513200Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=Nzk1NjYwN2QtNzA2Mjc2YjEtNWUyMTQ0YjgtZjU0YWFiMTE=, ActorId: [10:7592676375652691442:2450], ActorState: unknown state, LegacyTraceId: 01kecrtdwqc11pgx1yjr85na4x, Session actor destroyed trace_id# 2026-01-07T17:42:07.524081Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=YTQzN2FjOGYtMTk0MTc1OTItZWI4ZTQ2ZmEtOGE2MmE4YTU=, ActorId: [10:7592676354177854360:2326], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:42:07.524159Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=YTQzN2FjOGYtMTk0MTc1OTItZWI4ZTQ2ZmEtOGE2MmE4YTU=, ActorId: [10:7592676354177854360:2326], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:42:07.524196Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=YTQzN2FjOGYtMTk0MTc1OTItZWI4ZTQ2ZmEtOGE2MmE4YTU=, ActorId: [10:7592676354177854360:2326], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:42:07.524239Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=YTQzN2FjOGYtMTk0MTc1OTItZWI4ZTQ2ZmEtOGE2MmE4YTU=, ActorId: [10:7592676354177854360:2326], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:42:07.524335Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=YTQzN2FjOGYtMTk0MTc1OTItZWI4ZTQ2ZmEtOGE2MmE4YTU=, ActorId: [10:7592676354177854360:2326], ActorState: unknown state, Session actor destroyed trace_id# >> KqpCost::AAARangeFullScan [GOOD] >> KqpCost::CTAS-isOlap [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] >> KqpCost::WriteRowInsertFails+isSink-isOlap [GOOD] >> KqpCost::IndexLookup-useSink [GOOD] >> KqpCost::IndexLookupAndTake-useSink [GOOD] >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] >> KqpCost::IndexLookup+useSink [GOOD] >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] >> KqpCost::IndexLookupAndTake+useSink [GOOD] >> KqpCost::WriteRow-isSink-isOlap [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] >> TFlatTest::LargeProxyReply |89.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::OlapRangeFullScan [GOOD] >> KqpCost::OlapPointLookup [GOOD] >> KqpCost::OlapRange [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 64487, MsgBus: 25575 2026-01-07T17:42:00.682784Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676347015821601:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.682812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.261046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.421381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.465793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.750442Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.779994Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676347015821568:2081] 1767807720669407 != 1767807720669410 2026-01-07T17:42:01.780170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.787612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.805824Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.809465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:02.258536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.347253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.347293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.347303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.347401Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.546913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.578780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.745105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.391619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.788064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.889110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.255665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368490659942:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.255769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.256084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368490659952:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.256142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.685017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676347015821601:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.685077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.792138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.879246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.918906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.954206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.013318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.061959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.140826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.353226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377080595423:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.353283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.353525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377080595428:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.353567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.353624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377080595429:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.429729Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676377080595432:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.543964Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676377080595483:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:11.031737Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807731032, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 24233, MsgBus: 4722 2026-01-07T17:42:03.179558Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676356229598918:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:03.179934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:03.248214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:03.751254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:03.751665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:03.753236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:03.779098Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676356229598699:2081] 1767807723128464 != 1767807723128467 2026-01-07T17:42:03.787133Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:03.795502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:04.049458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:04.049485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:04.049494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:04.049585Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:04.077898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:04.173816Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:05.036802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:05.043189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:05.111578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.250228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.399128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.468076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.261503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676373409469784:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.261598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.261941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676373409469794:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.262005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.588697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.623010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.649237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.674581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.698937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.729438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.789563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.835508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.904122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676373409470668:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.904187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.904202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676373409470673:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.904342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676373409470675:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.904402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.907715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.916119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676373409470676:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:08.021095Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676377704438024:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:08.175843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676356229598918:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:08.175957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 10581, MsgBus: 21139 2026-01-07T17:42:00.682750Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676346607867893:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.682794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:00.744578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:01.217007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.430373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.465517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.485016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.553421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.749375Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676346607867862:2081] 1767807720670429 != 1767807720670432 2026-01-07T17:42:01.759473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.770097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.827258Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.867947Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.347153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.347176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.347184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.347351Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.545444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.586505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.748433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.326588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.559556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.825938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.219832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368082706238:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.219963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.220274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368082706248:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.220332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.683265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676346607867893:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.683479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.789937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.885521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.930444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.977357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.028764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.079894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.146606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376672641722:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351562Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376672641728:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351877Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376672641727:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.423163Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676376672641731:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.543645Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676376672641782:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.332327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 7436, MsgBus: 18355 2026-01-07T17:42:00.683921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676344360558465:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.204259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.421139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.462766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.481260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.621074Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676344360558434:2081] 1767807720666486 != 1767807720666489 2026-01-07T17:42:01.645558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.647338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.713157Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.746959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:02.113612Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.346657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.346684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.346691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.347187Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.543314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.585502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.749839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.301218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.580224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.792240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.066802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365835396800:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.071757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.092078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365835396810:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.092231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.675642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676344360558465:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.675697Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.788901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.889900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.930952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.014336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.061627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.141412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.222672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.352260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374425332296:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374425332301:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374425332302:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.416068Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676374425332305:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540582Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676374425332356:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.270666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.293813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.317011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/Join1_2 1 19 /Root/Join1_1 8 136 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 6459, MsgBus: 9082 2026-01-07T17:42:00.683129Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676346450507892:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.357762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.475090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.480703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.485230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.766036Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.786590Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676346450507861:2081] 1767807720661093 != 1767807720661096 2026-01-07T17:42:01.790515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.797062Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.797154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:02.313253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.330297Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.330317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.330323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.330411Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.540807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.578215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.751963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.327045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.762807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.894488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.239767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367925346227:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.239983Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.240383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367925346237:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.240445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.684622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676346450507892:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.684689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.788704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.867387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.905697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.950023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.987783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.033963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.086099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.168153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.352099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376515281713:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376515281719:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376515281718:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.415266Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676376515281722:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540089Z node 1 :TX_PROXY ERROR: schemereq.cp ... r on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3540 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 179 } compilation { duration_us: 74978 cpu_time_us: 69848 } process_cpu_time_us: 733 total_duration_us: 80125 total_cpu_time_us: 70760 2026-01-07T17:42:11.359657Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=6; 2026-01-07T17:42:11.359952Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7592676393695151634:2542], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7592676389400183989:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7592676393695151634:2542].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:42:11.360003Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592676393695151628:2542], SessionActorId: [1:7592676389400183989:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7592676389400183989:2542]. 2026-01-07T17:42:11.360173Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, ActorId: [1:7592676389400183989:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrthjsfxentjc4ynyw5whc, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7592676393695151629:2542] from: [1:7592676393695151628:2542] trace_id# 2026-01-07T17:42:11.360252Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592676393695151629:2542] TxId: 281474976710687. Ctx: { TraceId: 01kecrthjsfxentjc4ynyw5whc, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:42:11.360444Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, ActorId: [1:7592676389400183989:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrthjsfxentjc4ynyw5whc, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3745 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 469 } compilation { duration_us: 64809 cpu_time_us: 59889 } process_cpu_time_us: 834 total_duration_us: 70251 total_cpu_time_us: 61192 2026-01-07T17:42:11.451099Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=7; 2026-01-07T17:42:11.451324Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7592676393695151651:2542], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7592676389400183989:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7592676393695151651:2542].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:42:11.451418Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592676393695151645:2542], SessionActorId: [1:7592676389400183989:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7592676389400183989:2542]. 2026-01-07T17:42:11.451623Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, ActorId: [1:7592676389400183989:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrthn6b858qtkkf8qmdx2k, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7592676393695151646:2542] from: [1:7592676393695151645:2542] trace_id# 2026-01-07T17:42:11.451702Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592676393695151646:2542] TxId: 281474976710689. Ctx: { TraceId: 01kecrthn6b858qtkkf8qmdx2k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:42:11.451930Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, ActorId: [1:7592676389400183989:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrthn6b858qtkkf8qmdx2k, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 2837 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 350 } compilation { duration_us: 80715 cpu_time_us: 66973 } process_cpu_time_us: 611 total_duration_us: 85149 total_cpu_time_us: 67934 2026-01-07T17:42:11.526010Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=8; 2026-01-07T17:42:11.526239Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7592676393695151668:2542], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7592676389400183989:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7592676393695151668:2542].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:42:11.526281Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592676393695151662:2542], SessionActorId: [1:7592676389400183989:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7592676389400183989:2542]. 2026-01-07T17:42:11.526414Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, ActorId: [1:7592676389400183989:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrthr124y8a0jq5kwdw7qj, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7592676393695151663:2542] from: [1:7592676393695151662:2542] trace_id# 2026-01-07T17:42:11.526491Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592676393695151663:2542] TxId: 281474976710691. Ctx: { TraceId: 01kecrthr124y8a0jq5kwdw7qj, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:42:11.526692Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, ActorId: [1:7592676389400183989:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrthr124y8a0jq5kwdw7qj, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 2673 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 2 bytes: 40 } partitions_count: 1 } cpu_time_us: 345 } compilation { duration_us: 65191 cpu_time_us: 61253 } process_cpu_time_us: 635 total_duration_us: 69219 total_cpu_time_us: 62233 2026-01-07T17:42:11.607972Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=9; 2026-01-07T17:42:11.608241Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7592676393695151685:2542], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7592676389400183989:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7592676393695151685:2542].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:42:11.608325Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592676393695151679:2542], SessionActorId: [1:7592676389400183989:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7592676389400183989:2542]. 2026-01-07T17:42:11.608514Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, ActorId: [1:7592676389400183989:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrthtq11z744yadwt6pw8p, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7592676393695151680:2542] from: [1:7592676393695151679:2542] trace_id# 2026-01-07T17:42:11.608595Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592676393695151680:2542] TxId: 281474976710693. Ctx: { TraceId: 01kecrthtq11z744yadwt6pw8p, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:42:11.608849Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NGM4ZTExOTMtZTJhY2FhNTUtOTFkYWE0YzktZGIxMWM0NDY=, ActorId: [1:7592676389400183989:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrthtq11z744yadwt6pw8p, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3379 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 3 bytes: 60 } partitions_count: 1 } cpu_time_us: 387 } compilation { duration_us: 59833 cpu_time_us: 54603 } process_cpu_time_us: 712 total_duration_us: 65055 total_cpu_time_us: 55702 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 17185, MsgBus: 13011 2026-01-07T17:42:05.197329Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676366364916866:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.197553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:05.432525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:05.432646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:05.486758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:05.507878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:05.512801Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676366364916752:2081] 1767807725185445 != 1767807725185448 2026-01-07T17:42:05.513041Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:05.597532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:05.597559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:05.597566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:05.597643Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:05.773362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:06.018306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:06.052273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.167925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.254632Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:06.291512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.345783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.197613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676379249820515:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.197726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.198052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676379249820525:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.198098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.499236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.535596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.564178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.591149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.619927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.646134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.697429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.738891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.814842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676379249821395:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.814929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.814994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676379249821400:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.815151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676379249821402:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.815195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.818564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:08.829412Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676379249821403:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:42:08.897178Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676379249821455:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.195311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676366364916866:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:10.196528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Reverse":false,"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Test","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Test","ReadColumns":["Group (-∞, + ... :1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":20,"Max":20,"Min":20,"History":[12,20]},"WaitTimeUs":{"Count":1,"Sum":10568,"Max":10568,"Min":10568,"History":[12,10568]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":708972,"CpuTimeUs":696286},"ProcessCpuTimeUs":306,"TotalDurationUs":737230,"ResourcePoolId":"default","QueuedTimeUs":326},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Size":"0","Reverse":false,"Name":"TableFullScan","E-Rows":"0","Table":"Test","ReadColumns":["Group (-∞, +∞)","Name (-∞, +∞)","Amount","Comment"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Predicate":"item.Amount \u003C 5000","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"}],"Operators":[{"A-SelfCpu":1.202,"A-Cpu":1.202,"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"A-SelfCpu":1.05,"A-Cpu":2.252,"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} query_phases { duration_us: 15310 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 2252 affected_shards: 1 } compilation { duration_us: 708972 cpu_time_us: 696286 } process_cpu_time_us: 306 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\",\"Amount\",\"Comment\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"4\",\"Push\":{}},{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20,\"History\":[11,20]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":10568,\"Max\":10568,\"Min\":10568,\"History\":[11,10568]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[11,1048576]},\"Introspections\":[\"1 tasks default for source scan\"],\"WaitOutputTimeUs\":{\"Count\":1,\"Sum\":45,\"Max\":45,\"Min\":45,\"History\":[11,45]},\"Tasks\":1,\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1767807730962,\"CpuTimeUs\":{\"Count\":1,\"Sum\":1202,\"Max\":1202,\"Min\":1202,\"History\":[11,1202]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[11,192]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[11,192]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":10776,\"Max\":10776,\"Min\":10776,\"History\":[11,10776]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":11}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20,\"History\":[12,20]}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[12,1048576]},\"Introspections\":[\"1 minimum tasks for compute\"],\"InputBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20},\"Tasks\":1,\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"StageDurationUs\":0,\"BaseTimeMs\":1767807730962,\"CpuTimeUs\":{\"Count\":1,\"Sum\":1050,\"Max\":1050,\"Min\":1050,\"History\":[12,1050]},\"UpdateTimeMs\":12,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20,\"History\":[12,20]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20,\"History\":[12,20]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":10568,\"Max\":10568,\"Min\":10568,\"History\":[12,10568]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":708972,\"CpuTimeUs\":696286},\"ProcessCpuTimeUs\":306,\"TotalDurationUs\":737230,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":326},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\",\"Amount\",\"Comment\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-SelfCpu\":1.202,\"A-Cpu\":1.202,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-SelfCpu\":1.05,\"A-Cpu\":2.252,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:45\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'560) \'(\'\"_id\" \'\"1c2bcf6-ad60c837-2733cfc1-cab16801\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'573) \'(\'\"_id\" \'\"82fea575-3e818d02-8a470f6d-d210f31d\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 737230 total_cpu_time_us: 698844 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":45},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1767807730\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"640ab37-39d14a93-42c85b7-e0feaf2c\",\"version\":\"1.0\"}" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 4993, MsgBus: 4432 2026-01-07T17:42:00.683081Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676346811377980:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:00.743498Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:01.287292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.421428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.462004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.481110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.760287Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.774230Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.777373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.777480Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676346811377947:2081] 1767807720671282 != 1767807720671285 2026-01-07T17:42:01.777569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:02.155839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.349745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.349773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.349778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.349934Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.552438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.580065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.753898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.353207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.732619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.818555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.119271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368286216305:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.119406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.119805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368286216315:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.119905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.689344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676346811377980:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.689424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.790845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.883653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.931306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.974192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.011532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.044172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.085080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376876151785:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376876151791:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376876151790:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351983Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.417386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:42:07.417879Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676376876151794:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.544809Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676376876151845:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.290875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.314350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.339035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/Join1_2 1 19 /Root/Join1_1 8 136 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 2168, MsgBus: 6709 2026-01-07T17:42:00.681867Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676346766190584:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.681935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.017420Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.269043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.459918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.460058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.485908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.836656Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.855220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.861102Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676346766190552:2081] 1767807720666470 != 1767807720666473 2026-01-07T17:42:01.864236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.864346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.868896Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:42:01.868950Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:42:02.353259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.353281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.353289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.353455Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:02.517966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:03.550792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.580113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.757321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.368489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.768973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.912277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.335820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368241028914:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.335914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.336374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368241028924:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.336415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.682263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676346766190584:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.682357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.796707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.880360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.931715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.014326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.051204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.108316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.156974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.225606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376830964405:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376830964410:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376830964411:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.419311Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676376830964414:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.543837Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676376830964465:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.332042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> KqpCost::OlapWriteRow [GOOD] >> KqpCost::CTASWithRetry-isOlap [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs >> KqpCost::CTAS+isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 24946, MsgBus: 18017 2026-01-07T17:42:03.501132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676360105441112:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:03.502378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:03.555389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:03.865565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:03.865656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:03.979552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:04.005357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:04.011726Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:04.307027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:04.323439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:04.323458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:04.323464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:04.323554Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:04.513301Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:05.177731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:05.186035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:05.240129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.412330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.556217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.618506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.311522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377285312114:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.311611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.311866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377285312124:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.311924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.619142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.645938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.672117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.702175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.725520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.754243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.813887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.852727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.914561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377285312992:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.914629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.914671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377285312997:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.914804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377285312999:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.914843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.917433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.925051Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676377285313001:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.996554Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676377285313052:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:08.501077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676360105441112:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:08.501189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:11.089407Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807731032, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTAS-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28735, MsgBus: 63147 2026-01-07T17:42:03.406889Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676356900639003:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:03.417292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:03.710356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:03.742901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:03.743022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:03.859392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:03.867779Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:03.881109Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676356900638898:2081] 1767807723389678 != 1767807723389681 2026-01-07T17:42:03.961040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:04.100796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:04.100815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:04.100819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:04.104149Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:04.414745Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:04.744414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:04.755673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:04.811920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.990738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.146779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.213497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.093884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374080509954:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.094055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.094613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374080509964:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.094667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.444974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.470855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.498793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.522155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.547345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.578564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.611544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.653628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.717244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374080510835:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.717340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.717558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374080510840:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.717562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374080510841:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.717607Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.720287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.728518Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676374080510844:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.812250Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676374080510895:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:08.402298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676356900639003:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:08.402398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:10.418141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.645692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 78494 table_access { name: "/Root/.tmp/sessions/79ac9abf-42f9-f746-c223-a1ab85b52a97/Root/TestTable2_6343d6be-478e-c2ef-e8f8-5f92a7d8f8d5" updates { rows: 4 bytes: 80 } partitions_count: 1 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 80 } partitions_count: 1 } cpu_time_us: 37047 affected_shards: 1 } compilation { duration_us: 10916 cpu_time_us: 7257 } process_cpu_time_us: 928 total_duration_us: 514409 total_cpu_time_us: 45232 2026-01-07T17:42:11.056862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:42:11.061627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) >> KqpCost::WriteRowInsertFails-isSink-isOlap [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 13700, MsgBus: 21336 2026-01-07T17:42:02.397222Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676354581773023:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:02.397317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:02.633078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:02.647915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:02.648023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:02.760108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:02.797247Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:02.799584Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676354581772913:2081] 1767807722394432 != 1767807722394435 2026-01-07T17:42:02.951037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:03.000786Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:03.000812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:03.000818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:03.000948Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.413068Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:03.546935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.583336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.752984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.321611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.512716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.690311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.108509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676371761643977:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:06.108674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:06.108994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676371761643987:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:06.109071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:06.788686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.874253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.906968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.987269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.045353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.130304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.205993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376056612158:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351822Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376056612163:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376056612165:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.397461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676354581773023:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:07.397610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:07.401615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.423420Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676376056612167:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:42:07.542915Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676376056612221:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.332121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 3048, MsgBus: 12696 2026-01-07T17:42:00.686970Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676346200783543:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.687023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:00.955065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.217139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.508265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.508359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.511758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.721468Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.725116Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676346200783512:2081] 1767807720685325 != 1767807720685328 2026-01-07T17:42:01.725207Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.729458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.729551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:02.347248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.347281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.347295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.347395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:02.820727Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:03.544430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.579066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.748407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.332325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.528825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.711928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.078040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367675621876:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.078234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.091447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367675621886:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.091512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.687271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676346200783543:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.687332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.789132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.876533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.907750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.953898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.008950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.045654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.093855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376265557362:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351691Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376265557367:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376265557368:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.420278Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676376265557371:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540167Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676376265557422:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14419, MsgBus: 15496 2026-01-07T17:42:00.683082Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676347048277714:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:00.971146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.234818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.439353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.460100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.481832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.737020Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676347048277683:2081] 1767807720674690 != 1767807720674693 2026-01-07T17:42:01.747323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.751892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.751965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.765225Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:02.346080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.346104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.346110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.346248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:02.750249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:03.546766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.582261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.753019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.349085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.608703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.747277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.066834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368523116046:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.071577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.090705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368523116056:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.090791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.685071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676347048277714:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.685133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.789933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.882233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.913252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.948908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.000062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.043152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.104526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377113051532:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377113051538:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377113051537:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.420235Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676377113051541:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540164Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676377113051592:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.332241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 18998, MsgBus: 21441 2026-01-07T17:42:00.683185Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676344193811878:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:00.739277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:01.311575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.421554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.461979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.484282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.542245Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676344193811845:2081] 1767807720673170 != 1767807720673173 2026-01-07T17:42:01.718636Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.760966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.787895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.788004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.799185Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:42:01.799226Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:42:02.266467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.332124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.332145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.332156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.332237Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.542914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.580128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.748472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.323644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.505952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.716022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.066296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365668650205:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.071761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.091973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365668650215:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.092035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.683674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676344193811878:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.683746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.789502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.850929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.891431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.928739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.959724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.010588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.050014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.123994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374258585692:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374258585698:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374258585697:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.416695Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676374258585701:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540799Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676374258585752:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.332351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> TFlatTest::LargeProxyReply [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 14138, MsgBus: 1536 2026-01-07T17:42:00.683157Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676346737907804:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:00.763897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:01.276892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.425099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.461165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.482338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.765287Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676346737907771:2081] 1767807720677649 != 1767807720677652 2026-01-07T17:42:01.765373Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.782194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.784372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.784473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:02.117445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.349315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.349354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.349378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.349466Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.544626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.598624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.749186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.301278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.534085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.811608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.433651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368212746141:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.433782Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.434146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368212746151:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.434216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.684672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676346737907804:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.684749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.792760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.880967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.919093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.960039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.998652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.045371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.097710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.352386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376802681618:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376802681624:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376802681623:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.408445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.423746Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676376802681627:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.541123Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676376802681680:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.418396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 534 cpu_time_us: 534 } query_phases { duration_us: 2455 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 215 affected_shards: 1 } compilation { duration_us: 68541 cpu_time_us: 63318 } process_cpu_time_us: 902 total_duration_us: 72853 total_cpu_time_us: 64969 query_phases { duration_us: 515 cpu_time_us: 515 } query_phases { duration_us: 3249 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 267 affected_shards: 1 } compilation { duration_us: 65113 cpu_time_us: 59982 } process_cpu_time_us: 928 total_duration_us: 70255 total_cpu_time_us: 61692 2026-01-07T17:42:11.151039Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592676393982551318:2571], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrth1x40keqpdq3gqgd7rj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmRmZDRlMDAtNWViODQ5YjMtZjg3ODNmMTItM2MxYTk3OWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:42:11.159439Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592676393982551319:2572], TxId: 281474976710678, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrth1x40keqpdq3gqgd7rj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmRmZDRlMDAtNWViODQ5YjMtZjg3ODNmMTItM2MxYTk3OWE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7592676393982551315:2531], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:42:11.159964Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmRmZDRlMDAtNWViODQ5YjMtZjg3ODNmMTItM2MxYTk3OWE=, ActorId: [1:7592676389687583862:2531], ActorState: ExecuteState, LegacyTraceId: 01kecrth1x40keqpdq3gqgd7rj, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 884 cpu_time_us: 884 } query_phases { duration_us: 42259 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 1843 } query_phases { duration_us: 82452 cpu_time_us: 1016 } compilation { duration_us: 281386 cpu_time_us: 258335 } process_cpu_time_us: 2147 total_duration_us: 410266 total_cpu_time_us: 264225 query_phases { duration_us: 932 cpu_time_us: 932 } query_phases { duration_us: 3094 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 520 } query_phases { duration_us: 1606 cpu_time_us: 259 } query_phases { duration_us: 4532 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 429 affected_shards: 1 } compilation { duration_us: 174316 cpu_time_us: 168261 } process_cpu_time_us: 2349 total_duration_us: 188424 total_cpu_time_us: 172750 query_phases { duration_us: 634 cpu_time_us: 634 } query_phases { duration_us: 3598 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 1165 } query_phases { duration_us: 1019 cpu_time_us: 180 affected_shards: 1 } compilation { duration_us: 241843 cpu_time_us: 236539 } process_cpu_time_us: 1411 total_duration_us: 249748 total_cpu_time_us: 239929 query_phases { duration_us: 622 cpu_time_us: 622 } query_phases { duration_us: 2949 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 680 } query_phases { duration_us: 3953 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 404 affected_shards: 1 } compilation { duration_us: 150959 cpu_time_us: 145281 } process_cpu_time_us: 1486 total_duration_us: 161905 total_cpu_time_us: 148473 query_phases { duration_us: 616 cpu_time_us: 616 } query_phases { duration_us: 3536 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 298 affected_shards: 1 } compilation { duration_us: 63533 cpu_time_us: 57789 } process_cpu_time_us: 1084 total_duration_us: 69380 total_cpu_time_us: 59787 query_phases { duration_us: 480 cpu_time_us: 480 } query_phases { duration_us: 54957 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 328 affected_shards: 1 } compilation { duration_us: 69091 cpu_time_us: 63707 } process_cpu_time_us: 832 total_duration_us: 125831 total_cpu_time_us: 65347 |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 32294, MsgBus: 10896 2026-01-07T17:42:00.682849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676343837282872:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.682911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:00.744897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:01.297037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.487283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.487407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.650562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.699583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.718612Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.764004Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.781758Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676343837282839:2081] 1767807720671110 != 1767807720671113 2026-01-07T17:42:01.784751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:02.219184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.346593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.346630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.346638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.346728Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.541168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.580981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.753040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.361867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.571497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.763490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.161575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365312121213:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.161721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.164193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365312121223:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.164303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.682861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676343837282872:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.682925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.797673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.843095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.881645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.919190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.952669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.000655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.052029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.101700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676373902056698:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676373902056703:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676373902056705:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352453Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.421102Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676373902056707:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.539974Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676373902056758:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:11.302993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 25220, MsgBus: 13702 2026-01-07T17:42:00.677873Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676345726885397:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.677936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.049075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.293006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.482014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.482092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.486114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.728302Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.730798Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676345726885366:2081] 1767807720668028 != 1767807720668031 2026-01-07T17:42:01.731372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.733165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.885334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.348213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.348236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.348242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.348336Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.545604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.584808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.754399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.380401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.552530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.708821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.068415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367201723722:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.073069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.094298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367201723732:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.094382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.678020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676345726885397:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.678099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.789221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.878380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.917765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.955030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.005602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.050209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.104202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375791659203:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375791659208:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375791659209:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.424160Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676375791659212:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540116Z node 1 :TX_PROXY ERROR: schemereq. ... de 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:42:10.900016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:42:10.900136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:42:10.900169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-01-07T17:42:10.900256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-07T17:42:10.900283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-07T17:42:10.900803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;self_id=[1:7592676388676561571:2549];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976710673;this=136500997754304;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730900;max=18446744073709551615;plan=0;src=[1:7592676345726885729:2151];cookie=412:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.901387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7592676388676561562:2544];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976710673;this=136500997757888;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730901;max=18446744073709551615;plan=0;src=[1:7592676345726885729:2151];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.901991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[1:7592676388676561557:2543];ev=NActors::IEventHandle;tablet_id=72075186224037927;tx_id=281474976710673;this=136500997754976;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730901;max=18446744073709551615;plan=0;src=[1:7592676345726885729:2151];cookie=402:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.903157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7592676388676561566:2547];ev=NActors::IEventHandle;tablet_id=72075186224037935;tx_id=281474976710673;this=136500997976512;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730902;max=18446744073709551615;plan=0;src=[1:7592676345726885729:2151];cookie=482:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.905672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7592676388676561565:2546];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976710673;this=136500997759904;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730905;max=18446744073709551615;plan=0;src=[1:7592676345726885729:2151];cookie=462:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.907560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[1:7592676388676561570:2548];ev=NActors::IEventHandle;tablet_id=72075186224037934;tx_id=281474976710673;this=136500997760576;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730907;max=18446744073709551615;plan=0;src=[1:7592676345726885729:2151];cookie=472:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.991228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.991286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.991300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.999411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.999468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.999489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.001329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.001385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.001400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.009146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.009215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.009230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.010573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.010645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.010667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.017185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.017236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.017242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.017256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.017289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.017307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> TFlatTest::LargeProxyReplyRW >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> KqpCost::CTASWithRetry+isOlap [GOOD] |89.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 8149, MsgBus: 10337 2026-01-07T17:42:00.683537Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676345661140116:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.049037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.363227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.421722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.464188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.482728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.603854Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676345661140084:2081] 1767807720664920 != 1767807720664923 2026-01-07T17:42:01.625416Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.713220Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.746916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.937059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.348340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.348361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.348366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.348540Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.557123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.591576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.751943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.295361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.525947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.772842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.077851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367135978452:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.077976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.092145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367135978462:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.092214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.684068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676345661140116:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.684151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.793807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.874659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.951819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.993605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.081008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.146047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.209593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375725913947:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375725913952:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375725913954:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.418732Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676375725913956:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540763Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676375725914007:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.1%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 14640, MsgBus: 8030 2026-01-07T17:42:00.673505Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676345119337723:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.673573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.062156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.450535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.466493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.481993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.485041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.819303Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.826737Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676345119337695:2081] 1767807720672768 != 1767807720672771 2026-01-07T17:42:01.842320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.905489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.345662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.345682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.345687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.345800Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.547814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.589896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.749123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.300078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.565026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.760707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.104060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676366594176067:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.104183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.104550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676366594176077:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.104612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.673870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676345119337723:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.674063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.788581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.870321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.907565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.949034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.988147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.042192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.090841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375184111551:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375184111557:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375184111556:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.407229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.426959Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676375184111560:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540786Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676375184111611:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, Local ... 592676392363981128:2542], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtgj540t9zw1cqamg7ry0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-07T17:42:11.018484Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7592676392363981128:2542], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtgj540t9zw1cqamg7ry0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-07T17:42:11.018704Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. Recv TEvReadResult from ShardID=72075186224037914, ReadId=0, Status=SUCCESS, Finished=1, RowCount=3, TxLocks= , BrokenTxLocks= 2026-01-07T17:42:11.018725Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. Taken 0 locks 2026-01-07T17:42:11.018741Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. new data for read #0 seqno = 1 finished = 1 2026-01-07T17:42:11.018767Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592676392363981127:2541], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtgj540t9zw1cqamg7ry0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-07T17:42:11.018781Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592676392363981127:2541], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtgj540t9zw1cqamg7ry0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:42:11.018799Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-07T17:42:11.018813Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. enter pack cells method shardId: 72075186224037914 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-07T17:42:11.018864Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. exit pack cells method shardId: 72075186224037914 processedRows: 0 packed rows: 3 freeSpace: 8388548 2026-01-07T17:42:11.018898Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. returned 3 rows; processed 3 rows 2026-01-07T17:42:11.018963Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. dropping batch for read #0 2026-01-07T17:42:11.018972Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. effective maxinflight 1 sorted 1 2026-01-07T17:42:11.018983Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-07T17:42:11.018999Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: SelfId: [1:7592676392363981129:2541], TxId: 281474976710674, task: 1, CA Id [1:7592676392363981127:2541]. returned async data processed rows 3 left freeSpace 8388548 received rows 3 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-07T17:42:11.019139Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7592676392363981127:2541], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtgj540t9zw1cqamg7ry0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:42:11.019350Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:42:11.019415Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7592676388069013826:2532] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj540t9zw1cqamg7ry0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7592676392363981127:2541] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 10333 Tasks { TaskId: 1 CpuTimeUs: 1026 FinishTimeMs: 1767807731019 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 142 BuildCpuTimeUs: 884 WaitOutputTimeUs: 1625 HostName: "ghrun-z2c6jslqle" NodeId: 1 StartTimeMs: 1767807731019 CreateTimeMs: 1767807731008 UpdateTimeMs: 1767807731019 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:42:11.019481Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj540t9zw1cqamg7ry0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7592676392363981127:2541] 2026-01-07T17:42:11.019498Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:42:11.019517Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [1:7592676388069013826:2532] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj540t9zw1cqamg7ry0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7592676392363981128:2542], trace_id# 2026-01-07T17:42:11.019620Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592676392363981128:2542], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtgj540t9zw1cqamg7ry0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-07T17:42:11.019674Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807731 AvailableComputeActors: 9999 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 9999 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2026-01-07T17:42:11.019737Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7592676392363981128:2542], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtgj540t9zw1cqamg7ry0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:42:11.019842Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:42:11.019964Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:42:11.020020Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 2026-01-07T17:42:13.019417Z, after 1.999463s 2026-01-07T17:42:11.020158Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:334} ActorId: [1:7592676388069013826:2532] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj540t9zw1cqamg7ry0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==, PoolId: default, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [1:7592676388069013795:2532] SeqNo# 1 Rows# 1 trace_id# 2026-01-07T17:42:11.020306Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7592676388069013826:2532] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj540t9zw1cqamg7ry0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7592676392363981128:2542] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 8848 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 821 FinishTimeMs: 1767807731019 InputRows: 1 InputBytes: 20 ComputeCpuTimeUs: 129 BuildCpuTimeUs: 692 HostName: "ghrun-z2c6jslqle" NodeId: 1 CreateTimeMs: 1767807731008 UpdateTimeMs: 1767807731019 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:42:11.020344Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj540t9zw1cqamg7ry0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7592676392363981128:2542] 2026-01-07T17:42:11.020416Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [1:7592676388069013826:2532] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj540t9zw1cqamg7ry0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:42:11.030246Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [1:7592676388069013826:2532] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj540t9zw1cqamg7ry0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4Y2RiODktNjljMjk5NjAtMjcxZmNkZjUtZGI3MGIyMQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.019181s ReadRows: 1 ReadBytes: 20 RequestUnits# 12 ForceFlag# true trace_id# 2026-01-07T17:42:11.032225Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807731032, txId: 281474976710673] shutting down >> TFlatTest::LargeProxyReplyRW [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TraverseDatashard::TraverseOneTableServerless >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TDatabaseResolverTests::Greenplum_MasterNode >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::Ydb_Serverless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 3021, MsgBus: 61959 2026-01-07T17:42:00.677265Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676345779934950:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.677325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.355901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.421418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.461193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.480860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.748720Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.749165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676345779934922:2081] 1767807720676558 != 1767807720676561 2026-01-07T17:42:01.749276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.757533Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.801249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:02.302546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.331500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.331543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.331555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.331737Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.543646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.585536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.753044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.332178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.686845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.752184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.067193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367254773283:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.071583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.090718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676367254773293:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.090807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.677680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676345779934950:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.677768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.795076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.875674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.919545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.956949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.996579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.042165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.096541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375844708766:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375844708771:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375844708772:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.404655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.418861Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676375844708775:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.544086Z node 1 :TX_PROXY ERROR: schemereq.c ... 026-01-07T17:42:10.883700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[1:7592676388729611126:2548];ev=NActors::IEventHandle;tablet_id=72075186224037934;tx_id=281474976710673;this=136979003261824;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730883;max=18446744073709551615;plan=0;src=[1:7592676345779935278:2145];cookie=472:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.886522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7592676388729611125:2547];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976710673;this=136979003437216;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730886;max=18446744073709551615;plan=0;src=[1:7592676345779935278:2145];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.886606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[1:7592676388729611124:2546];ev=NActors::IEventHandle;tablet_id=72075186224037927;tx_id=281474976710673;this=136979003453120;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730886;max=18446744073709551615;plan=0;src=[1:7592676345779935278:2145];cookie=402:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.887143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;self_id=[1:7592676388729611119:2542];ev=NActors::IEventHandle;tablet_id=72075186224037932;tx_id=281474976710673;this=136979003456032;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730886;max=18446744073709551615;plan=0;src=[1:7592676345779935278:2145];cookie=452:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.887509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;self_id=[1:7592676388729611123:2545];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976710673;this=136979003269440;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730887;max=18446744073709551615;plan=0;src=[1:7592676345779935278:2145];cookie=412:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.887734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7592676388729611120:2543];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976710673;this=136979003454240;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730887;max=18446744073709551615;plan=0;src=[1:7592676345779935278:2145];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.888481Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7592676388729611118:2541];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:458;TablesManager not ready=72075186224037933; 2026-01-07T17:42:10.889595Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7592676388729611120:2543];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:458;TablesManager not ready=72075186224037929; 2026-01-07T17:42:10.890812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7592676388729611252:2550];ev=NActors::IEventHandle;tablet_id=72075186224037936;tx_id=281474976710673;this=136979003456928;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730890;max=18446744073709551615;plan=0;src=[1:7592676345779935278:2145];cookie=492:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.918959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.918971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.997794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.997861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.997879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.998970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.999029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.999044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.004980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.005039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.005049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.008061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.008135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.008156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.011455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.011508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.011522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.015269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.015334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.015357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; query_phases { duration_us: 317730 table_access { name: "/Root/TestTable" reads { rows: 3 bytes: 108 } } cpu_time_us: 88699 } compilation { duration_us: 479444 cpu_time_us: 472687 } process_cpu_time_us: 441 total_duration_us: 800407 total_cpu_time_us: 561827 |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTAS+isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 64428, MsgBus: 12725 2026-01-07T17:42:03.468755Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676359901093548:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:03.473931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:03.885188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:03.938106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:03.938224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:04.043256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:04.098159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:04.197067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:04.341475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:04.341499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:04.341516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:04.341589Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:04.481010Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:05.203174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:05.246540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.371546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.533117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.599984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.481835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377080964503:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.481947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.482248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377080964513:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.482345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.755674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.789493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.824462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.856600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.884208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.910249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.939486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.972668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.036986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676381375932678:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.037054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.037068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676381375932683:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.037335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676381375932685:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.037397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.041072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:08.051904Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676381375932686:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:08.148232Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676381375932738:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:08.462345Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676359901093548:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:08.462426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:10.498275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:42:10.703502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[1:7592676389965867761:2551];tablet_id=72075186224037927;process=TTxInitSchema:: ... .h:56: TColumnShard.StateWork at 72075186224037997 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659596Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659596Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037995 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659621Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037991 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659632Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659646Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037993 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659663Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659669Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037960 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659692Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037937 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659697Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037968 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659717Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659732Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037964 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659744Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659766Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037966 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659767Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037952 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659794Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037950 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659801Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037939 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659818Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037948 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659835Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659840Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037946 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659864Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037944 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659867Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659889Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037942 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659901Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659915Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659947Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659954Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.659987Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660004Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660011Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660036Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660040Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660060Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660091Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660096Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660123Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660135Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660170Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660172Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660200Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660213Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-07T17:42:14.660235Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 query_phases { duration_us: 180413 table_access { name: "/Root/.tmp/sessions/45e49066-4a54-f867-ba2c-7aa6eac5a551/Root/TestTable2_4f7cda61-45d1-9943-2595-e89b2e72d6b1" updates { rows: 4 bytes: 1472 } partitions_count: 4 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 144 } } cpu_time_us: 14106 } compilation { duration_us: 9232 cpu_time_us: 5589 } process_cpu_time_us: 947 total_duration_us: 2888649 total_cpu_time_us: 20642 2026-01-07T17:42:14.667822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:42:14.672725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] Leader for TabletID 72057594046678944 is [1:243:2156] sender: [1:244:2060] recipient: [1:226:2145] 2026-01-07T17:40:18.691804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:40:18.691883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:40:18.691925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:40:18.691956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:40:18.691988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:40:18.692017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:40:18.692069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:40:18.692135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:40:18.692842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:40:18.693217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:40:18.773414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:40:18.773484Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:18.791929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:40:18.792529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:40:18.792729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:40:18.799921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:40:18.800134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:40:18.800917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:40:18.801254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:40:18.805511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:40:18.805702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:40:18.807068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:40:18.807150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:40:18.807357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:40:18.807413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:40:18.807456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:40:18.807586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:40:18.939858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.943466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.943605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.943673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.943720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.943771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.943833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.943881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.943971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.944029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.944089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.944143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.944181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.944286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:40:18.944341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:328:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:10.625337Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:10.625429Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:10.625462Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:10.979777Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:10.979867Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:10.979957Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:10.979991Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:11.344510Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:11.344587Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:11.344658Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:11.344690Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:11.605932Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:11.606030Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:11.606118Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:11.606160Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:11.953365Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:11.953457Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:11.953552Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:11.953588Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:12.306047Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:12.306142Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:12.306256Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:12.306297Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:12.677056Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:12.677150Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:12.677239Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:12.677273Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:13.061822Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:13.061911Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:42:13.061987Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:13.062016Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:42:13.147166Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1497:3246], Recipient [7:244:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:42:13.147258Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:42:13.147400Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:42:13.147636Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 222us result status StatusPathDoesNotExist 2026-01-07T17:42:13.147833Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:42:13.148324Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1498:3247], Recipient [7:244:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:42:13.148380Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:42:13.148472Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:42:13.148626Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 155us result status StatusPathDoesNotExist 2026-01-07T17:42:13.148768Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:42:13.149292Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1499:3248], Recipient [7:244:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2026-01-07T17:42:13.149348Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:42:13.149438Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:42:13.149595Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 165us result status StatusPathDoesNotExist 2026-01-07T17:42:13.149738Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2026-01-07T17:42:13.312555Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676398923418878:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:13.312625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:13.534786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:42:13.541092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:13.541258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:13.546129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:13.662796Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676398923418841:2081] 1767807733310987 != 1767807733310990 2026-01-07T17:42:13.664947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:13.809060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:42:13.835743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:13.910722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:14.319512Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:17.495056Z node 1 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [1:7592676416103291817:4330] txid# 281474976711010 MergeResult Result too large TDataReq marker# P18 2026-01-07T17:42:17.495120Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592676416103291817:4330] txid# 281474976711010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2026-01-07T17:42:18.029841Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676422454583917:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:18.029901Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:18.041554Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:42:18.102739Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:18.109294Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676422454583888:2081] 1767807738028981 != 1767807738028984 2026-01-07T17:42:18.146407Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:18.146491Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:18.148302Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:18.243541Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:42:18.279190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:18.296045Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:19.034765Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:21.753068Z node 2 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [2:7592676435339489566:4330] txid# 281474976711011 MergeResult Result too large TDataReq marker# P18 2026-01-07T17:42:21.753144Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7592676435339489566:4330] txid# 281474976711011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15262, MsgBus: 28780 2026-01-07T17:42:03.328168Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676357919851068:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:03.328766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:03.672709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:03.672771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:03.687470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:03.713522Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:03.739735Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:03.740657Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676357919850968:2081] 1767807723305235 != 1767807723305238 2026-01-07T17:42:03.847283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:03.847302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:03.847306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:03.847367Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.993303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:04.302228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:04.466135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:04.474141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:04.746689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.950520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.204124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.272704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.218980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375099722026:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.219110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.219439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375099722036:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.219509Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.514179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.542495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.565805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.589795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.614142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.640623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.666767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.704525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.772988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375099722906:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.773154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.773366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375099722911:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.773391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.773447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676375099722912:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.777130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.788972Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676375099722915:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.856500Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676375099722966:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:08.316755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676357919851068:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:08.316818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:10.498398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... et_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:42:10.884531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:42:10.884552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-01-07T17:42:10.884613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-07T17:42:10.884634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-07T17:42:10.886619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7592676387984625267:2541];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976710673;this=137020151270432;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730886;max=18446744073709551615;plan=0;src=[1:7592676357919851329:2153];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.886737Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037934;self_id=[1:7592676387984625268:2542];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:458;TablesManager not ready=72075186224037934; 2026-01-07T17:42:10.888898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7592676387984625259:2536];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976710673;this=137020151271776;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730888;max=18446744073709551615;plan=0;src=[1:7592676357919851329:2153];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.889757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;self_id=[1:7592676387984625263:2537];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976710673;this=137020151272672;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730889;max=18446744073709551615;plan=0;src=[1:7592676357919851329:2153];cookie=412:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.890532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7592676387984625344:2544];ev=NActors::IEventHandle;tablet_id=72075186224037936;tx_id=281474976710673;this=137020150250336;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730890;max=18446744073709551615;plan=0;src=[1:7592676357919851329:2153];cookie=492:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.891349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7592676387984625264:2538];ev=NActors::IEventHandle;tablet_id=72075186224037935;tx_id=281474976710673;this=137020151466208;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730891;max=18446744073709551615;plan=0;src=[1:7592676357919851329:2153];cookie=482:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.892074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;self_id=[1:7592676387984625269:2543];ev=NActors::IEventHandle;tablet_id=72075186224037932;tx_id=281474976710673;this=137020151274464;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1767807730891;max=18446744073709551615;plan=0;src=[1:7592676357919851329:2153];cookie=452:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.918995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.919246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.990550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.998319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.998323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.998368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.998372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.998383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:10.998383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.004844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.004894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.004910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.006024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.006076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.006090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.012561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.012614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.012627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.013612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.013678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.013694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 11778, MsgBus: 6471 ... waiting for SysViewsRoster update finished 2026-01-07T17:42:06.015217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:06.102636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:06.119374Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:06.119770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:06.119853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:06.365299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:06.365430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:06.443667Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807723094608 != 1767807723094612 2026-01-07T17:42:06.455360Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:06.501506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:06.631889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:06.967021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:06.967078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:06.967113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:06.967692Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:06.990057Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:07.319243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:07.384725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.667271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.049918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.329360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:09.173646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1904:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.173857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.174445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1977:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.174508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.203272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:09.392096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:09.648804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:09.927974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.174392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.418028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.698078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:10.974253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:11.318023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:11.318148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:11.318587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:11.318668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:11.318728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:11.324228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:11.488732Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:42:11.535905Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:12.886507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:13.739723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 1767807723627217 table_access { name: "/Root/.tmp/sessions/844340f5-49ba-2219-13c7-a4aeee813264/Root/TestTable2_2861ff7c-459e-032f-24e6-a28dfd6c4e70" updates { rows: 4 bytes: 80 } partitions_count: 1 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 80 } partitions_count: 1 } cpu_time_us: 1374 affected_shards: 1 } compilation { duration_us: 9329 cpu_time_us: 5237 } process_cpu_time_us: 1110 total_duration_us: 1023752 total_cpu_time_us: 7721 2026-01-07T17:42:14.300543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 12125, MsgBus: 1121 2026-01-07T17:42:03.448481Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676359778457680:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:03.459484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:03.896711Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:03.898805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:03.898920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:03.924307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:04.038172Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:04.220997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:04.233547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:04.233568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:04.233578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:04.233657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:04.477998Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:05.109020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:05.113595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:05.161801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.288408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.443884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.512740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.303257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376958328700:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.303388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.304374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376958328710:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.304488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.575539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.605205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.636023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.659046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.683944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.710183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.733748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.772294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.844183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376958329580:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.844302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.844514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376958329584:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.844555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.844583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676376958329587:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.847729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.859829Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676376958329589:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.955428Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676376958329640:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:08.448966Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676359778457680:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:08.449061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:10.531290Z node 1 :KQP_GATEWAY DEBUG: kqp_metadata_loader.cpp:922: Load table metadata from cache by path, request Path: /Root/Test 2026-01-07T17:42:10.959680Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_snapshot_manager.cpp:37: Start KqpSnapshotManager at [1:7592676389843231839:2523] 2026-01-07T17:42:10.959708Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_snaps ... 8199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:209;event=send_data_to_compute;space=8388608;queue=0;compute_actor_id=[1:7592676394138199150:2533];rows=0; 2026-01-07T17:42:11.067164Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:364;event=register_scanner_actor;actor_id=[1:7592676394138199155:2073];state=Starting;tablet_id=72075186224037914;generation=1;shard_actor=[0:0:0]; 2026-01-07T17:42:11.067205Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:125;event=start_scanner;actor_id=[1:7592676394138199155:2073]; 2026-01-07T17:42:11.067231Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:48;event=scan_ack;actor_id=[1:7592676394138199155:2073]; 2026-01-07T17:42:11.078292Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_fetcher_actor.cpp:134;Recv TEvScanData from ShardID==[1:7592676394138199155:2073];ScanId=1;Finished=1;Lock=;BrokenLocks=; 2026-01-07T17:42:11.078425Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_fetcher_actor.cpp:531;action=got EvScanData;rows=3;finished=1;generation=1;exceeded=0;scan=1;packs_to_send=0;from=[1:7592676394138199155:2073];shards remain=0;in flight scans=1;cursor=;in flight shards=1;delayed_for_seconds_by_ratelimiter=0;tablet_id=72075186224037914;locks=0;broken locks=0; 2026-01-07T17:42:11.078470Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.cpp:40;event=receive_data;actor_id=[1:7592676394138199155:2073];count_chunks=1; 2026-01-07T17:42:11.078502Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:292;event=on_receive;compute_shard_id=NO_VALUE_OPTIONAL; 2026-01-07T17:42:11.078528Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:238;event=add_data_to_compute;rows=3; 2026-01-07T17:42:11.078568Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:209;event=send_data_to_compute;space=8388608;queue=1;compute_actor_id=[1:7592676394138199150:2533];rows=3; 2026-01-07T17:42:11.078602Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:193;event=stop_scanner; 2026-01-07T17:42:11.078640Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:45;event=scan_ack_on_finished;actor_id=[1:7592676394138199155:2073]; 2026-01-07T17:42:11.078669Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:551: SelfId: [1:7592676394138199152:2535]. EVLOGKQP:0/0/3/3 2026-01-07T17:42:11.078674Z node 1 :KQP_COMPUTE DEBUG: log.h:466: kqp_scan_compute_actor.cpp:184 :TEvSendData: [1:7592676394138199152:2535]/[1:7592676394138199150:2533] 2026-01-07T17:42:11.078699Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:392;event=scanner_finished;tablet_id=72075186224037914;stop_shard=1; 2026-01-07T17:42:11.078736Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:100;event=stop_scanner;actor_id=[1:7592676394138199155:2073];message=;final_flag=1; 2026-01-07T17:42:11.078823Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7592676394138199150:2533], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtgj564cqzqmnb1vkee7g. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:42:11.078845Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:594: SelfId: [1:7592676394138199152:2535]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, pending resolve shards: 0, average read rows: 3, average read bytes: 0, 2026-01-07T17:42:11.078918Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7592676394138199152:2535];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:441;event=wait_all_scanner_finished;scans=0; 2026-01-07T17:42:11.078982Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:685: SelfId: [1:7592676394138199152:2535]. EVLOGKQP(max_in_flight:1) InFlightScans:InFlightShards:;wScans=0;wShards=0; {SHARD(72075186224037914):CHUNKS=1;D=0.000000s;PacksCount=1;RowsCount=3;BytesCount=0;MinPackSize=3;MaxPackSize=3;CAVG=0.000000s;CMIN=0.000000s;CMAX=0.000000s;}; 2026-01-07T17:42:11.079205Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:42:11.079353Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7592676389843231849:2523] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj564cqzqmnb1vkee7g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7592676394138199150:2533] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 20500 Tasks { TaskId: 1 CpuTimeUs: 983 FinishTimeMs: 1767807731078 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 122 BuildCpuTimeUs: 861 WaitOutputTimeUs: 366 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-z2c6jslqle" NodeId: 1 StartTimeMs: 1767807731078 CreateTimeMs: 1767807731016 UpdateTimeMs: 1767807731079 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:42:11.079407Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj564cqzqmnb1vkee7g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7592676394138199150:2533] 2026-01-07T17:42:11.079440Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:42:11.079443Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [1:7592676389843231849:2523] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj564cqzqmnb1vkee7g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7592676394138199151:2534], trace_id# 2026-01-07T17:42:11.079573Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7592676394138199151:2534], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtgj564cqzqmnb1vkee7g. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646922 2026-01-07T17:42:11.079627Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807731 AvailableComputeActors: 9999 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 9999 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2026-01-07T17:42:11.079684Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7592676394138199151:2534], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtgj564cqzqmnb1vkee7g. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Compute state finished. All channels and sinks finished 2026-01-07T17:42:11.079804Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:42:11.080293Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:42:11.080525Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:334} ActorId: [1:7592676389843231849:2523] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj564cqzqmnb1vkee7g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=, PoolId: default, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [1:7592676389843231797:2523] SeqNo# 1 Rows# 1 trace_id# 2026-01-07T17:42:11.080704Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7592676389843231849:2523] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj564cqzqmnb1vkee7g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7592676394138199151:2534] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 6453 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 746 FinishTimeMs: 1767807731079 InputRows: 1 InputBytes: 20 ComputeCpuTimeUs: 133 BuildCpuTimeUs: 613 HostName: "ghrun-z2c6jslqle" NodeId: 1 CreateTimeMs: 1767807731034 UpdateTimeMs: 1767807731079 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:42:11.080747Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj564cqzqmnb1vkee7g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7592676394138199151:2534] 2026-01-07T17:42:11.080845Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [1:7592676389843231849:2523] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj564cqzqmnb1vkee7g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:42:11.080897Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [1:7592676389843231849:2523] TxId: 281474976710674. Ctx: { TraceId: 01kecrtgj564cqzqmnb1vkee7g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlOWRiMmYtNzgwOTk4NWMtNTlhZmQ5YTktOTBiZTkwODU=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.026953s ReadRows: 3 ReadBytes: 96 RequestUnits# 17 ForceFlag# true trace_id# 2026-01-07T17:42:11.081677Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767807731032, txId: 281474976710673] shutting down 2026-01-07T17:42:11.081764Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 2026-01-07T17:42:13.079475Z, after 1.997950s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry+isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 62551, MsgBus: 65132 ... waiting for SysViewsRoster update finished 2026-01-07T17:42:03.926335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:04.064109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:04.085652Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:04.086243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:04.086308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:04.358296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:04.358391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:04.437012Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807720656367 != 1767807720656371 2026-01-07T17:42:04.449510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:04.494585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:04.644610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:05.014407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:05.014479Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:05.014511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:05.015019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:05.029401Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:05.365343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:05.440859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.733137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.072524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.342841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.163803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1905:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.163968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.164867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1978:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.165013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.200401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.397311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.643887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.921181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.161152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.420690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.697515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.968279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:09.310113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2790:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.310386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.310819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.310914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.311001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:09.316774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:09.482421Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2799:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:42:09.540277Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2859:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.847470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table. ... ent type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390564Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037950 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390602Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037949 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390639Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037947 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390698Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037946 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390743Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037945 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390793Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037943 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390837Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390875Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037995 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390911Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037997 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390946Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.390979Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037999 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391014Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391050Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391158Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037937 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391226Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037938 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391262Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037940 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391300Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037939 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391338Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037941 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391381Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037942 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391415Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037944 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391450Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037948 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391498Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391543Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037972 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391578Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037993 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391614Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391649Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037991 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391698Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391742Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391782Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391829Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037987 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391875Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391910Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391943Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.391980Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392015Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392050Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392085Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392133Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392177Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392228Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392292Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392339Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392374Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-07T17:42:17.392409Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 query_phases { duration_us: 1767807726078378 table_access { name: "/Root/.tmp/sessions/844340f5-49ba-2219-13c7-a4aeee813264/Root/TestTable2_f8164771-4843-8939-7b9e-2e84f29974be" updates { rows: 4 bytes: 1472 } partitions_count: 4 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 144 } } cpu_time_us: 11064 } compilation { duration_us: 8118 cpu_time_us: 4123 } process_cpu_time_us: 1115 total_duration_us: 5143693 total_cpu_time_us: 16302 |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2026-01-07T17:42:24.908040Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2026-01-07T17:42:24.727198Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 23136, MsgBus: 14608 2026-01-07T17:42:00.683161Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676344281411582:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:00.756324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:01.385036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.421138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.462397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.481081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.886515Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.886608Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.893080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676344281411549:2081] 1767807720669993 != 1767807720669996 2026-01-07T17:42:01.917443Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.920869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:02.347142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.347162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.347171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.347307Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:02.394258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:03.543183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.581910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.748503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.315460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.557904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.792631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.108756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365756249916:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.108874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.109160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365756249926:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.109211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.683622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676344281411582:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.683702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.791600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.879239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.933070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.989972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.090086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.142202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.223235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374346185407:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374346185412:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374346185413:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352607Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.417293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] Acto ... ssionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:42:12.509353Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592676395821022637:2635], TxId: 281474976710691, task: 4. Ctx: { CheckpointId : . TraceId : 01kecrtjbz1vyj1qgrvsxebdns. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7592676395821022631:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:42:12.509695Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=, ActorId: [1:7592676387231087651:2532], ActorState: ExecuteState, LegacyTraceId: 01kecrtjbz1vyj1qgrvsxebdns, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 1177 cpu_time_us: 1177 } query_phases { duration_us: 4137 table_access { name: "/Root/TestTable" partitions_count: 1 } table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 1275 } query_phases { duration_us: 2248 cpu_time_us: 900 } compilation { duration_us: 402294 cpu_time_us: 394309 } process_cpu_time_us: 2303 total_duration_us: 414258 total_cpu_time_us: 399964 2026-01-07T17:42:12.813685Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592676395821022675:2649], TxId: 281474976710694, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtjs56xde3x813xbbm3es. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:42:12.813915Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592676395821022676:2650], TxId: 281474976710694, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtjs56xde3x813xbbm3es. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [1:7592676395821022672:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:42:12.814241Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=, ActorId: [1:7592676387231087651:2532], ActorState: ExecuteState, LegacyTraceId: 01kecrtjs56xde3x813xbbm3es, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 1117 cpu_time_us: 1117 } query_phases { duration_us: 3822 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 521 } query_phases { duration_us: 1740 cpu_time_us: 172 } compilation { duration_us: 285816 cpu_time_us: 277829 } process_cpu_time_us: 2153 total_duration_us: 296371 total_cpu_time_us: 281792 2026-01-07T17:42:13.085241Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592676400115990005:2661], TxId: 281474976710697, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtk2ma59bdwmbb7ygf027. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:42:13.085423Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592676400115990006:2662], TxId: 281474976710697, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtk2ma59bdwmbb7ygf027. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7592676400115990002:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:42:13.085739Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=, ActorId: [1:7592676387231087651:2532], ActorState: ExecuteState, LegacyTraceId: 01kecrtk2ma59bdwmbb7ygf027, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 1091 cpu_time_us: 1091 } query_phases { duration_us: 3526 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 655 } query_phases { duration_us: 1680 cpu_time_us: 224 } compilation { duration_us: 246448 cpu_time_us: 239444 } process_cpu_time_us: 2062 total_duration_us: 264514 total_cpu_time_us: 243476 2026-01-07T17:42:14.711742Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592676404410957355:2683], TxId: 281474976710700, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtkbh8jsm0q32ppny5vbh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:42:14.711913Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592676404410957356:2684], TxId: 281474976710700, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtkbh8jsm0q32ppny5vbh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7592676404410957352:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:42:14.712141Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=, ActorId: [1:7592676387231087651:2532], ActorState: ExecuteState, LegacyTraceId: 01kecrtkbh8jsm0q32ppny5vbh, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 772 cpu_time_us: 772 } query_phases { duration_us: 1321515 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 1320094 } query_phases { duration_us: 1697 cpu_time_us: 1069 } compilation { duration_us: 274967 cpu_time_us: 266654 } process_cpu_time_us: 1775 total_duration_us: 1606585 total_cpu_time_us: 1590364 2026-01-07T17:42:14.915192Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592676404410957390:2696], TxId: 281474976710703, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtmxza9fzfzntg2pxfrts. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:42:14.946034Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592676404410957391:2697], TxId: 281474976710703, task: 2. Ctx: { TraceId : 01kecrtmxza9fzfzntg2pxfrts. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7592676404410957387:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:42:14.946373Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=, ActorId: [1:7592676387231087651:2532], ActorState: ExecuteState, LegacyTraceId: 01kecrtmxza9fzfzntg2pxfrts, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 729 cpu_time_us: 729 } query_phases { duration_us: 3180 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 509 } query_phases { duration_us: 31821 cpu_time_us: 196 } compilation { duration_us: 188213 cpu_time_us: 181240 } process_cpu_time_us: 1513 total_duration_us: 226917 total_cpu_time_us: 184187 2026-01-07T17:42:15.148198Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592676408705924721:2709], TxId: 281474976710706, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtn5854jtx5th1eqxdbtq. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2026-01-07T17:42:15.148335Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592676408705924722:2710], TxId: 281474976710706, task: 2. Ctx: { CheckpointId : . TraceId : 01kecrtn5854jtx5th1eqxdbtq. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [1:7592676408705924718:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:42:15.148521Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmJiNDdhYjQtYmE1Y2YxMzktOWIzZmVhYTAtZmIzYjFmODU=, ActorId: [1:7592676387231087651:2532], ActorState: ExecuteState, LegacyTraceId: 01kecrtn5854jtx5th1eqxdbtq, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 661 cpu_time_us: 661 } query_phases { duration_us: 2814 table_access { name: "/Root/TestTable2" partitions_count: 1 } cpu_time_us: 398 } query_phases { duration_us: 1101 cpu_time_us: 164 } compilation { duration_us: 188173 cpu_time_us: 181839 } process_cpu_time_us: 1503 total_duration_us: 195479 total_cpu_time_us: 184565 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 22492, MsgBus: 21581 2026-01-07T17:42:00.682087Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676346991806277:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.682148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.035375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.252675Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.505861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.507694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.513309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.789489Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.789558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.825039Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676346991806245:2081] 1767807720664103 != 1767807720664106 2026-01-07T17:42:01.851400Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:02.345382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.345406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.345412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.345538Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:02.804657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:03.544021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.594854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.758635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.331455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.549310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.717983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.066295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368466644609:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.071592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.090136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368466644619:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.090209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.682377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676346991806277:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.682473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.791779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.888277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.931771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.970939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.018145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.057051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.141623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377056580096:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377056580102:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.353016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377056580101:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.417803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676377056580105:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.544683Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676377056580156:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.418420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 3715 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 343 affected_shards: 1 } compilation { duration_us: 50469 cpu_time_us: 46048 } process_cpu_time_us: 906 total_duration_us: 56984 total_cpu_time_us: 47297 query_phases { duration_us: 4102 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 118 affected_shards: 1 } compilation { duration_us: 56623 cpu_time_us: 52242 } process_cpu_time_us: 610 total_duration_us: 62293 total_cpu_time_us: 52970 2026-01-07T17:42:10.755229Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=5; 2026-01-07T17:42:10.781937Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037927 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:42:10.782108Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037927 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:42:10.782252Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7592676389941482525:2542], Table: `/Root/TestTable` ([72057594046644480:54:1]), SessionActorId: [1:7592676389941482372:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037927, Sink=[1:7592676389941482525:2542].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:42:10.782727Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592676389941482519:2542], SessionActorId: [1:7592676389941482372:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7592676389941482372:2542]. 2026-01-07T17:42:10.782942Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NTUyZDA2NTItNWNkMWIwNTUtZDhhZmI1MzgtMzg3NmY1ZmY=, ActorId: [1:7592676389941482372:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrth03f48thkd2p12y45x0, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7592676389941482520:2542] from: [1:7592676389941482519:2542] trace_id# 2026-01-07T17:42:10.783076Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592676389941482520:2542] TxId: 281474976710677. Ctx: { TraceId: 01kecrth03f48thkd2p12y45x0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTUyZDA2NTItNWNkMWIwNTUtZDhhZmI1MzgtMzg3NmY1ZmY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:42:10.783329Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTUyZDA2NTItNWNkMWIwNTUtZDhhZmI1MzgtMzg3NmY1ZmY=, ActorId: [1:7592676389941482372:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrth03f48thkd2p12y45x0, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 30390 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 114 } compilation { duration_us: 59523 cpu_time_us: 55475 } process_cpu_time_us: 707 total_duration_us: 91554 total_cpu_time_us: 56296 query_phases { duration_us: 4070 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 363 affected_shards: 1 } compilation { duration_us: 65084 cpu_time_us: 52151 } process_cpu_time_us: 658 total_duration_us: 70797 total_cpu_time_us: 53172 query_phases { duration_us: 3905 cpu_time_us: 414 affected_shards: 1 } compilation { duration_us: 143834 cpu_time_us: 137551 } process_cpu_time_us: 577 total_duration_us: 149259 total_cpu_time_us: 138542 query_phases { duration_us: 3865 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 290 affected_shards: 1 } compilation { duration_us: 58731 cpu_time_us: 54382 } process_cpu_time_us: 632 total_duration_us: 66082 total_cpu_time_us: 55304 query_phases { duration_us: 4170 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 398 affected_shards: 1 } compilation { duration_us: 52555 cpu_time_us: 47856 } process_cpu_time_us: 521 total_duration_us: 58004 total_cpu_time_us: 48775 query_phases { duration_us: 3589 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 335 affected_shards: 1 } compilation { duration_us: 51733 cpu_time_us: 46958 } process_cpu_time_us: 587 total_duration_us: 56878 total_cpu_time_us: 47880 |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow [GOOD] Test command err: Trying to start YDB, gRPC: 13870, MsgBus: 21414 2026-01-07T17:42:00.683038Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676347039850570:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.683251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.393151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.425322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.466530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.485543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.728796Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676347039850537:2081] 1767807720671322 != 1767807720671325 2026-01-07T17:42:01.736152Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.740956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.741082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:01.763245Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:02.246185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:02.347575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.347613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.347632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.347833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:03.547872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.586202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:03.753902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.338367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.711320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.868028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.208062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368514688899:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.208203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.208553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676368514688909:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.208626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.687637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676347039850570:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.687728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.794274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.842371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.878172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.916802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.963979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.018877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.059158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.116234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377104624380:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377104624385:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676377104624386:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.406282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.421608Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676377104624389:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:07.540648Z node 1 :TX_PROXY ERROR: schemereq. ... .003758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.003826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.003840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.004975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.005030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.005052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.010697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.010753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.010770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.012702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.012753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-07T17:42:11.012774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; query_phases { duration_us: 7056 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 368 affected_shards: 1 } query_phases { duration_us: 5601 affected_shards: 1 } compilation { duration_us: 59182 cpu_time_us: 54745 } process_cpu_time_us: 703 total_duration_us: 73563 total_cpu_time_us: 55816 query_phases { duration_us: 5358 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 118 affected_shards: 1 } query_phases { duration_us: 8123 affected_shards: 1 } compilation { duration_us: 107948 cpu_time_us: 103312 } process_cpu_time_us: 720 total_duration_us: 123326 total_cpu_time_us: 104150 query_phases { duration_us: 80749 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 410 affected_shards: 1 } query_phases { duration_us: 7403 affected_shards: 1 } compilation { duration_us: 54329 cpu_time_us: 49103 } process_cpu_time_us: 795 total_duration_us: 144536 total_cpu_time_us: 50308 query_phases { duration_us: 51216 table_access { name: "/Root/TestTable" updates { rows: 2 bytes: 744 } partitions_count: 2 } cpu_time_us: 463 affected_shards: 2 } query_phases { duration_us: 9074 affected_shards: 2 } compilation { duration_us: 55606 cpu_time_us: 50704 } process_cpu_time_us: 744 total_duration_us: 117914 total_cpu_time_us: 51911 query_phases { duration_us: 128554 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 396 affected_shards: 1 } query_phases { duration_us: 6467 affected_shards: 1 } compilation { duration_us: 103856 cpu_time_us: 98562 } process_cpu_time_us: 879 total_duration_us: 242043 total_cpu_time_us: 99837 query_phases { duration_us: 10277 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 333 affected_shards: 1 } query_phases { duration_us: 10194 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 366 affected_shards: 2 } query_phases { duration_us: 10079 affected_shards: 2 } compilation { duration_us: 101459 cpu_time_us: 95165 } process_cpu_time_us: 1106 total_duration_us: 135326 total_cpu_time_us: 96970 2026-01-07T17:42:12.886749Z node 1 :TX_COLUMNSHARD_RESTORE WARN: log.cpp:841: tablet_id=72075186224037935;tablet_actor_id=[1:7592676389989526768:2553];this=137264370557120;activity=1;task_id=32f420bc-ebf011f0-84976c0d-4bd816b7::4;fline=restore.cpp:28;event=merge_data_problems;write_id=4;tablet_id=72075186224037935;message=Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}; 2026-01-07T17:42:12.886939Z node 1 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7592676389989526768:2553];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037935;event=TEvWriteBlobsResult;fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]};tx_id=281474976710689; 2026-01-07T17:42:12.887597Z node 1 :TX_COLUMNSHARD_SCAN WARN: actor.cpp:152: Scan [1:7592676398579462201:2834] got AbortExecution txId: 281474976710689 scanId: 1 gen: 1 tablet: 72075186224037935 code: ABORTED reason: {
: Error: task finished: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]} } 2026-01-07T17:42:12.895283Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7592676398579462198:2832], Table: `/Root/TestTable` ([72057594046644480:54:1]), SessionActorId: [0:0:0]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037935, Sink=[1:7592676398579462198:2832].{
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } 2026-01-07T17:42:12.895388Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [1:7592676398579462196:2832], TxId: 281474976710689, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtjz79vqsykphgssveg5t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=ODBiY2U2NTctODY4ZjUzYzItNDgxMDgyMjgtZDMxZDExNjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Sink[0] fatal error: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } } 2026-01-07T17:42:12.907298Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592676398579462196:2832], TxId: 281474976710689, task: 1. Ctx: { CheckpointId : . TraceId : 01kecrtjz79vqsykphgssveg5t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=ODBiY2U2NTctODY4ZjUzYzItNDgxMDgyMjgtZDMxZDExNjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } }. 2026-01-07T17:42:12.918649Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODBiY2U2NTctODY4ZjUzYzItNDgxMDgyMjgtZDMxZDExNjU=, ActorId: [1:7592676389989526656:2542], ActorState: ExecuteState, LegacyTraceId: 01kecrtjz79vqsykphgssveg5t, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key. {\"sorting_columns\":[{\"name\":\"Group\",\"value\":\"1\"},{\"name\":\"Name\",\"value\":\"Anna\"}],\"fields\":[\"Group: Uint32\",\"Name: String\"]}" issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 118222 cpu_time_us: 79 } compilation { duration_us: 86413 cpu_time_us: 82294 } process_cpu_time_us: 851 total_duration_us: 206535 total_cpu_time_us: 83224 query_phases { duration_us: 11400 cpu_time_us: 385 affected_shards: 1 } query_phases { duration_us: 5233 affected_shards: 1 } compilation { duration_us: 76065 cpu_time_us: 69421 } process_cpu_time_us: 795 total_duration_us: 96573 total_cpu_time_us: 70601 query_phases { duration_us: 8673 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 395 affected_shards: 1 } query_phases { duration_us: 5551 affected_shards: 1 } compilation { duration_us: 57824 cpu_time_us: 52840 } process_cpu_time_us: 801 total_duration_us: 74218 total_cpu_time_us: 54036 query_phases { duration_us: 6786 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 361 affected_shards: 1 } query_phases { duration_us: 5581 affected_shards: 1 } compilation { duration_us: 56088 cpu_time_us: 50863 } process_cpu_time_us: 880 total_duration_us: 70744 total_cpu_time_us: 52104 query_phases { duration_us: 6927 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 328 affected_shards: 1 } query_phases { duration_us: 16360 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 477 affected_shards: 2 } query_phases { duration_us: 8151 affected_shards: 2 } compilation { duration_us: 83430 cpu_time_us: 75838 } process_cpu_time_us: 1188 total_duration_us: 118453 total_cpu_time_us: 77831 query_phases { duration_us: 917 cpu_time_us: 917 } query_phases { duration_us: 181659 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 40 } deletes { rows: 2 } partitions_count: 2 } cpu_time_us: 69797 affected_shards: 10 } query_phases { duration_us: 28217 affected_shards: 10 } compilation { duration_us: 407511 cpu_time_us: 390395 } process_cpu_time_us: 2052 total_duration_us: 621831 total_cpu_time_us: 463161 |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] Test command err: 2026-01-07T17:37:07.708418Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675085929372125:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:07.710176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:07.789861Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:08.069051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:08.105618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:08.105724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:08.212167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:08.293190Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:08.297130Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675085929372000:2081] 1767807427698599 != 1767807427698602 2026-01-07T17:37:08.322581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:08.641669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003cc0/r3tmp/yandexHcBAaV.tmp 2026-01-07T17:37:08.641701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003cc0/r3tmp/yandexHcBAaV.tmp 2026-01-07T17:37:08.641836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003cc0/r3tmp/yandexHcBAaV.tmp 2026-01-07T17:37:08.641915Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:08.717223Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:08.762147Z INFO: TTestServer started on Port 4651 GrpcPort 23533 PQClient connected to localhost:23533 2026-01-07T17:37:09.082641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:09.095038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:37:09.199215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:11.872986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675103109242214:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:11.873335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:11.875335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675103109242240:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:11.875503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675103109242244:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:11.875740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:11.890807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:11.929857Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675103109242249:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:37:12.259837Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675103109242300:2645] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:12.303142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:12.400773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:12.580851Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675107404209605:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:12.590256Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmU0NjhmYTMtMzc5YjhhZDctYWNmYWM1MTAtOWU5ZGFkZWI=, ActorId: [1:7592675103109242212:2330], ActorState: ExecuteState, LegacyTraceId: 01kecrhcznbvfqgsftaf2p4wtf, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:12.604668Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:37:12.641583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:12.713232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675085929372125:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:12.713319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675111699177203:2830] === CheckClustersList. Ok 2026-01-07T17:37:19.537451Z :Sinks_Oltp_WriteToTopicAndTable_1_Table INFO: TTopicSdkTestSetup started 2026-01-07T17:37:19.570825Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:19.589813Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592675137468981214:2943] connected; active server actors: 1 2026-01-07T17:37:19.589997Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T17:37:19.597568Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T17:37:19.597735Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T17:37:19.609705Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:37:19.611340Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:37:19.611570Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T17:37:19.611615Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:37:19.611650Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 7207518 ... 676377956318189:3307] 2026-01-07T17:42:15.277822Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_14_1_39200730136352462_v1 2026-01-07T17:42:15.277846Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:489: session cookie 2 consumer test-consumer session test-consumer_14_1_39200730136352462_v1 got EvDirectReadDestroyPartitionSession, assignId: 1 2026-01-07T17:42:15.278258Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-07T17:42:15.278283Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0] PartitionId [0] Generation [1] Write session will now close 2026-01-07T17:42:15.278304Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-07T17:42:15.278384Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-07T17:42:15.278399Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-07T17:42:15.284616Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [14:7592676377956318189:3307]: session cookie 2 consumer test-consumer session test-consumer_14_1_39200730136352462_v1 grpc read done: success# 0, data# { } 2026-01-07T17:42:15.284617Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|84281d72-f7a5411c-21c927d0-407a6e54_0 grpc read done: success: 0 data: 2026-01-07T17:42:15.284638Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|84281d72-f7a5411c-21c927d0-407a6e54_0 grpc read failed 2026-01-07T17:42:15.284663Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7592676377956318189:3307]: session cookie 2 consumer test-consumer session test-consumer_14_1_39200730136352462_v1grpc read failed 2026-01-07T17:42:15.284687Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 7 sessionId: test-message_group_id|84281d72-f7a5411c-21c927d0-407a6e54_0 2026-01-07T17:42:15.284708Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|84281d72-f7a5411c-21c927d0-407a6e54_0 is DEAD 2026-01-07T17:42:15.284712Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:363: Direct read proxy [14:7592676377956318189:3307]: session cookie 2 consumer test-consumer session test-consumer_14_1_39200730136352462_v1 Close session with reason: reads done signal, closing everything 2026-01-07T17:42:15.284726Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:381: session cookie 2 consumer test-consumer session test-consumer_14_1_39200730136352462_v1 closed 2026-01-07T17:42:15.284802Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7592676377956318189:3307]: session cookie 2 consumer test-consumer session test-consumer_14_1_39200730136352462_v1 proxy is DEAD 2026-01-07T17:42:15.284966Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0 grpc read done: success: 0 data: 2026-01-07T17:42:15.284979Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0 grpc read failed 2026-01-07T17:42:15.285005Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 5 sessionId: test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0 2026-01-07T17:42:15.285021Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|8d6b6a09-ccfc7162-117aa943-c3ed001d_0 is DEAD 2026-01-07T17:42:15.285170Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:42:15.285227Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:42:15.285321Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:42:15.285370Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:42:15.285384Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7592676377956317994:3281] destroyed 2026-01-07T17:42:15.285415Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7592676377956317997:3281] destroyed 2026-01-07T17:42:15.285457Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:42:15.285479Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7592676377956317949:3273] destroyed 2026-01-07T17:42:15.285490Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:42:15.285506Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7592676377956317952:3273] destroyed 2026-01-07T17:42:15.285509Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.285525Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:42:15.285530Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:42:15.285546Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.285551Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:42:15.285560Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:42:15.285565Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.285577Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:42:15.285595Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.285606Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:42:15.321218Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:42:15.321270Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.321289Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:42:15.321317Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.321334Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:42:15.321652Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:42:15.321679Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.321691Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:42:15.321706Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.321718Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:42:15.323182Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:42:15.323212Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.323224Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:42:15.323240Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.323250Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:42:15.421546Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:42:15.421600Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.421619Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:42:15.421661Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.421678Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:42:15.421978Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:42:15.422005Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.422017Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:42:15.422034Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.422046Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:42:15.423566Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:42:15.423592Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.423602Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:42:15.423617Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:42:15.423625Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist |89.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |89.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |89.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> TDatabaseResolverTests::ClickHouseNative >> TDatabaseResolverTests::PostgreSQL |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |89.4%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::DataStreams_Serverless >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2026-01-07T17:42:28.291692Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2026-01-07T17:42:28.370801Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} >> AnalyzeDatashard::AnalyzeTwoTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2026-01-07T17:42:28.375198Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2026-01-07T17:42:28.446514Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2026-01-07T17:42:24.250742Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:24.329707Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:24.329886Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:24.393129Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:24.433286Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:24.433405Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |89.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |89.4%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2026-01-07T17:42:28.455702Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |89.5%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |89.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] >> KqpCost::VectorIndexLookup+useSink [GOOD] |89.5%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeDatashard::AnalyzeOneTable |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] >> KqpWorkloadService::TestDiskIsFullRunBelowQueryLimit |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2026-01-07T17:42:26.443942Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:26.520004Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:26.520197Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:26.598219Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:26.631439Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-07T17:42:26.631542Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> TraverseDatashard::TraverseTwoTablesServerless |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> TConsoleTests::TestCreateTenant |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TConsoleConfigTests::TestModifyConfigItem >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TConsoleTests::TestCreateSharedTenant >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 21563, MsgBus: 13529 2026-01-07T17:42:05.350894Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676368331680587:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.350948Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:05.570011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:05.581858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:05.582030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:05.636755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:05.648658Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:05.717562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:05.717592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:05.717598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:05.717709Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:05.729827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:06.086592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:06.133177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.254740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.357280Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:06.382721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.447701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.295599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676381216584314:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.295734Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.296119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676381216584324:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.296175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.594649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.619600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.645643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.669289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.697478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.745932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.782902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.833043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:08.906977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676381216585197:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.907063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.907117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676381216585202:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.907302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676381216585204:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.907350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:08.910864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:08.920876Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676381216585205:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:09.016720Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676385511552553:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:10.354170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676368331680587:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:10.355577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:10.413359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:17.051694Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__create.cpp:24: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710774 DatabaseName: " ... 4775815u];[["mW\2"];[38];[380];9223372036854775815u];[["bR\2"];[53];[530];9223372036854775815u];[["eQ\2"];[56];[560];9223372036854775815u];[["jX\2"];[61];[610];9223372036854775815u];[["mW\2"];[64];[640];9223372036854775815u];[["bR\2"];[79];[790];9223372036854775815u];[["eQ\2"];[82];[820];9223372036854775815u];[["jX\2"];[87];[870];9223372036854775815u];[["mW\2"];[90];[900];9223372036854775815u];[["dZ\2"];[3];[30];9223372036854775816u];[["gY\2"];[6];[60];9223372036854775816u];[["dZ\2"];[29];[290];9223372036854775816u];[["gY\2"];[32];[320];9223372036854775816u];[["dZ\2"];[55];[550];9223372036854775816u];[["gY\2"];[58];[580];9223372036854775816u];[["dZ\2"];[81];[810];9223372036854775816u];[["gY\2"];[84];[840];9223372036854775816u];[["hP\2"];[7];[70];9223372036854775817u];[["pV\2"];[15];[150];9223372036854775817u];[["hP\2"];[33];[330];9223372036854775817u];[["pV\2"];[41];[410];9223372036854775817u];[["hP\2"];[59];[590];9223372036854775817u];[["pV\2"];[67];[670];9223372036854775817u];[["hP\2"];[85];[850];9223372036854775817u];[["pV\2"];[93];[930];9223372036854775817u];[["cI\2"];[2];[20];9223372036854775818u];[["kO\2"];[10];[100];9223372036854775818u];[["sU\2"];[18];[180];9223372036854775818u];[["cI\2"];[28];[280];9223372036854775818u];[["kO\2"];[36];[360];9223372036854775818u];[["sU\2"];[44];[440];9223372036854775818u];[["cI\2"];[54];[540];9223372036854775818u];[["kO\2"];[62];[620];9223372036854775818u];[["sU\2"];[70];[700];9223372036854775818u];[["cI\2"];[80];[800];9223372036854775818u];[["kO\2"];[88];[880];9223372036854775818u];[["sU\2"];[96];[960];9223372036854775818u];[["aA\2"];[0];[0];9223372036854775819u];[["iG\2"];[8];[80];9223372036854775819u];[["lF\2"];[11];[110];9223372036854775819u];[["qM\2"];[16];[160];9223372036854775819u];[["tL\2"];[19];[190];9223372036854775819u];[["wK\2"];[22];[220];9223372036854775819u];[["yS\2"];[24];[240];9223372036854775819u];[["aA\2"];[26];[260];9223372036854775819u];[["iG\2"];[34];[340];9223372036854775819u];[["lF\2"];[37];[370];9223372036854775819u];[["qM\2"];[42];[420];9223372036854775819u];[["tL\2"];[45];[450];9223372036854775819u];[["wK\2"];[48];[480];9223372036854775819u];[["yS\2"];[50];[500];9223372036854775819u];[["aA\2"];[52];[520];9223372036854775819u];[["iG\2"];[60];[600];9223372036854775819u];[["lF\2"];[63];[630];9223372036854775819u];[["qM\2"];[68];[680];9223372036854775819u];[["tL\2"];[71];[710];9223372036854775819u];[["wK\2"];[74];[740];9223372036854775819u];[["yS\2"];[76];[760];9223372036854775819u];[["aA\2"];[78];[780];9223372036854775819u];[["iG\2"];[86];[860];9223372036854775819u];[["lF\2"];[89];[890];9223372036854775819u];[["qM\2"];[94];[940];9223372036854775819u];[["tL\2"];[97];[970];9223372036854775819u];[["fH\2"];[5];[50];9223372036854775820u];[["nN\2"];[13];[130];9223372036854775820u];[["vT\2"];[21];[210];9223372036854775820u];[["fH\2"];[31];[310];9223372036854775820u];[["nN\2"];[39];[390];9223372036854775820u];[["vT\2"];[47];[470];9223372036854775820u];[["fH\2"];[57];[570];9223372036854775820u];[["nN\2"];[65];[650];9223372036854775820u];[["vT\2"];[73];[730];9223372036854775820u];[["fH\2"];[83];[830];9223372036854775820u];[["nN\2"];[91];[910];9223372036854775820u];[["vT\2"];[99];[990];9223372036854775820u];[["uC\2"];[20];[200];9223372036854775821u];[["xB\2"];[23];[230];9223372036854775821u];[["uC\2"];[46];[460];9223372036854775821u];[["xB\2"];[49];[490];9223372036854775821u];[["uC\2"];[72];[720];9223372036854775821u];[["xB\2"];[75];[750];9223372036854775821u];[["uC\2"];[98];[980];9223372036854775821u];[["oE\2"];[14];[140];9223372036854775822u];[["rD\2"];[17];[170];9223372036854775822u];[["zJ\2"];[25];[250];9223372036854775822u];[["oE\2"];[40];[400];9223372036854775822u];[["rD\2"];[43];[430];9223372036854775822u];[["zJ\2"];[51];[510];9223372036854775822u];[["oE\2"];[66];[660];9223372036854775822u];[["rD\2"];[69];[690];9223372036854775822u];[["zJ\2"];[77];[770];9223372036854775822u];[["oE\2"];[92];[920];9223372036854775822u];[["rD\2"];[95];[950];9223372036854775822u]] 2026-01-07T17:42:27.245536Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2408: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710774 2026-01-07T17:42:27.563975Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2408: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710775 /Root/Vectors: [[["aA\2"];[0];[0];[0]];[["bR\2"];[1];[1];[10]];[["cI\2"];[2];[2];[20]];[["dZ\2"];[3];[3];[30]];[["eQ\2"];[4];[4];[40]];[["fH\2"];[5];[5];[50]];[["gY\2"];[6];[6];[60]];[["hP\2"];[7];[7];[70]];[["iG\2"];[8];[8];[80]];[["jX\2"];[9];[9];[90]];[["kO\2"];[10];[0];[100]];[["lF\2"];[11];[1];[110]];[["mW\2"];[12];[2];[120]];[["nN\2"];[13];[3];[130]];[["oE\2"];[14];[4];[140]];[["pV\2"];[15];[5];[150]];[["qM\2"];[16];[6];[160]];[["rD\2"];[17];[7];[170]];[["sU\2"];[18];[8];[180]];[["tL\2"];[19];[9];[190]];[["uC\2"];[20];[0];[200]];[["vT\2"];[21];[1];[210]];[["wK\2"];[22];[2];[220]];[["xB\2"];[23];[3];[230]];[["yS\2"];[24];[4];[240]];[["zJ\2"];[25];[5];[250]];[["aA\2"];[26];[6];[260]];[["bR\2"];[27];[7];[270]];[["cI\2"];[28];[8];[280]];[["dZ\2"];[29];[9];[290]];[["eQ\2"];[30];[0];[300]];[["fH\2"];[31];[1];[310]];[["gY\2"];[32];[2];[320]];[["hP\2"];[33];[3];[330]];[["iG\2"];[34];[4];[340]];[["jX\2"];[35];[5];[350]];[["kO\2"];[36];[6];[360]];[["lF\2"];[37];[7];[370]];[["mW\2"];[38];[8];[380]];[["nN\2"];[39];[9];[390]];[["oE\2"];[40];[0];[400]];[["pV\2"];[41];[1];[410]];[["qM\2"];[42];[2];[420]];[["rD\2"];[43];[3];[430]];[["sU\2"];[44];[4];[440]];[["tL\2"];[45];[5];[450]];[["uC\2"];[46];[6];[460]];[["vT\2"];[47];[7];[470]];[["wK\2"];[48];[8];[480]];[["xB\2"];[49];[9];[490]];[["yS\2"];[50];[0];[500]];[["zJ\2"];[51];[1];[510]];[["aA\2"];[52];[2];[520]];[["bR\2"];[53];[3];[530]];[["cI\2"];[54];[4];[540]];[["dZ\2"];[55];[5];[550]];[["eQ\2"];[56];[6];[560]];[["fH\2"];[57];[7];[570]];[["gY\2"];[58];[8];[580]];[["hP\2"];[59];[9];[590]];[["iG\2"];[60];[0];[600]];[["jX\2"];[61];[1];[610]];[["kO\2"];[62];[2];[620]];[["lF\2"];[63];[3];[630]];[["mW\2"];[64];[4];[640]];[["nN\2"];[65];[5];[650]];[["oE\2"];[66];[6];[660]];[["pV\2"];[67];[7];[670]];[["qM\2"];[68];[8];[680]];[["rD\2"];[69];[9];[690]];[["sU\2"];[70];[0];[700]];[["tL\2"];[71];[1];[710]];[["uC\2"];[72];[2];[720]];[["vT\2"];[73];[3];[730]];[["wK\2"];[74];[4];[740]];[["xB\2"];[75];[5];[750]];[["yS\2"];[76];[6];[760]];[["zJ\2"];[77];[7];[770]];[["aA\2"];[78];[8];[780]];[["bR\2"];[79];[9];[790]];[["cI\2"];[80];[0];[800]];[["dZ\2"];[81];[1];[810]];[["eQ\2"];[82];[2];[820]];[["fH\2"];[83];[3];[830]];[["gY\2"];[84];[4];[840]];[["hP\2"];[85];[5];[850]];[["iG\2"];[86];[6];[860]];[["jX\2"];[87];[7];[870]];[["kO\2"];[88];[8];[880]];[["lF\2"];[89];[9];[890]];[["mW\2"];[90];[0];[900]];[["nN\2"];[91];[1];[910]];[["oE\2"];[92];[2];[920]];[["pV\2"];[93];[3];[930]];[["qM\2"];[94];[4];[940]];[["rD\2"];[95];[5];[950]];[["sU\2"];[96];[6];[960]];[["tL\2"];[97];[7];[970]];[["uC\2"];[98];[8];[980]];[["vT\2"];[99];[9];[990]]] /Root/Vectors/vector_idx_prefixed/indexImplLevelTable: [["nG\2";202u;201u];["jQ\2";203u;201u];["rD\2";9223372036854776411u;202u];["kI\2";9223372036854776412u;202u];["kO\2";9223372036854776413u;203u];["iT\2";9223372036854776414u;203u];["hV\2";205u;204u];["pK\2";206u;204u];["cV\2";9223372036854776417u;205u];["mW\2";9223372036854776418u;205u];["nN\2";9223372036854776419u;206u];["sI\2";9223372036854776420u;206u];["gQ\2";208u;207u];["oF\2";209u;207u];["gL\2";9223372036854776423u;208u];["hU\2";9223372036854776424u;208u];["mH\2";9223372036854776425u;209u];["rD\2";9223372036854776426u;209u];["rD\2";211u;210u];["jQ\2";212u;210u];["lF\2";9223372036854776429u;211u];["uC\2";9223372036854776430u;211u];["cV\2";9223372036854776431u;212u];["mP\2";9223372036854776432u;212u];["iS\2";214u;213u];["qK\2";215u;213u];["hU\2";9223372036854776435u;214u];["kO\2";9223372036854776436u;214u];["qM\2";9223372036854776437u;215u];["sH\2";9223372036854776438u;215u];["iV\2";217u;216u];["rH\2";218u;216u];["dZ\2";9223372036854776441u;217u];["kT\2";9223372036854776442u;217u];["mK\2";9223372036854776443u;218u];["vE\2";9223372036854776444u;218u];["nH\2";220u;219u];["jS\2";221u;219u];["mJ\2";9223372036854776447u;220u];["rD\2";9223372036854776448u;220u];["fU\2";9223372036854776449u;221u];["oR\2";9223372036854776450u;221u];["jR\2";223u;222u];["sH\2";224u;222u];["mP\2";9223372036854776453u;223u];["fU\2";9223372036854776454u;223u];["vG\2";9223372036854776455u;224u];["pI\2";9223372036854776456u;224u];["nG\2";226u;225u];["jR\2";227u;225u];["uC\2";9223372036854776459u;226u];["lH\2";9223372036854776460u;226u];["gY\2";9223372036854776461u;227u];["kQ\2";9223372036854776462u;227u];["rF\2";229u;228u];["jS\2";230u;228u];["pG\2";9223372036854776465u;229u];["xB\2";9223372036854776466u;229u];["nP\2";9223372036854776467u;230u];["eV\2";9223372036854776468u;230u]] 2026-01-07T17:42:27.809539Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2408: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710776 /Root/Vectors/vector_idx_prefixed/indexImplPostingTable: [[[20];9223372036854776411u];[[40];9223372036854776411u];[[0];9223372036854776412u];[[50];9223372036854776412u];[[60];9223372036854776412u];[[10];9223372036854776413u];[[70];9223372036854776413u];[[80];9223372036854776413u];[[30];9223372036854776414u];[[90];9223372036854776414u];[[1];9223372036854776417u];[[81];9223372036854776417u];[[41];9223372036854776418u];[[61];9223372036854776418u];[[21];9223372036854776419u];[[31];9223372036854776419u];[[91];9223372036854776419u];[[11];9223372036854776420u];[[51];9223372036854776420u];[[71];9223372036854776420u];[[2];9223372036854776423u];[[62];9223372036854776423u];[[12];9223372036854776424u];[[32];9223372036854776424u];[[82];9223372036854776424u];[[22];9223372036854776425u];[[42];9223372036854776425u];[[52];9223372036854776425u];[[72];9223372036854776426u];[[92];9223372036854776426u];[[63];9223372036854776429u];[[23];9223372036854776430u];[[43];9223372036854776430u];[[3];9223372036854776431u];[[53];9223372036854776431u];[[13];9223372036854776432u];[[33];9223372036854776432u];[[73];9223372036854776432u];[[83];9223372036854776432u];[[93];9223372036854776432u];[[4];9223372036854776435u];[[64];9223372036854776435u];[[84];9223372036854776435u];[[44];9223372036854776436u];[[54];9223372036854776436u];[[24];9223372036854776437u];[[34];9223372036854776437u];[[94];9223372036854776437u];[[14];9223372036854776438u];[[74];9223372036854776438u];[[55];9223372036854776441u];[[15];9223372036854776442u];[[35];9223372036854776442u];[[85];9223372036854776442u];[[5];9223372036854776443u];[[45];9223372036854776443u];[[65];9223372036854776443u];[[25];9223372036854776444u];[[75];9223372036854776444u];[[95];9223372036854776444u];[[16];9223372036854776447u];[[26];9223372036854776447u];[[76];9223372036854776447u];[[86];9223372036854776447u];[[46];9223372036854776448u];[[66];9223372036854776448u];[[6];9223372036854776449u];[[56];9223372036854776449u];[[36];9223372036854776450u];[[96];9223372036854776450u];[[7];9223372036854776453u];[[47];9223372036854776453u];[[57];9223372036854776453u];[[67];9223372036854776453u];[[27];9223372036854776454u];[[87];9223372036854776454u];[[17];9223372036854776455u];[[77];9223372036854776455u];[[37];9223372036854776456u];[[97];9223372036854776456u];[[98];9223372036854776459u];[[8];9223372036854776460u];[[48];9223372036854776460u];[[68];9223372036854776460u];[[78];9223372036854776460u];[[58];9223372036854776461u];[[18];9223372036854776462u];[[28];9223372036854776462u];[[38];9223372036854776462u];[[88];9223372036854776462u];[[19];9223372036854776465u];[[69];9223372036854776465u];[[89];9223372036854776465u];[[49];9223372036854776466u];[[39];9223372036854776467u];[[59];9223372036854776467u];[[99];9223372036854776467u];[[9];9223372036854776468u];[[29];9223372036854776468u];[[79];9223372036854776468u]] /Root/Vectors/vector_idx_prefixed/indexImplPrefixTable: [[[0];201u];[[1];204u];[[2];207u];[[3];210u];[[4];213u];[[5];216u];[[6];219u];[[7];222u];[[8];225u];[[9];228u]] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup-useSink [GOOD] |89.5%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:18.049393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:18.127429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:18.133379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:18.133673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:18.133781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:18.424215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:18.509420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:18.509600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:18.545322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:18.630725Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:19.268513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:19.269604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:19.269659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:19.269690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:19.270190Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:19.338354Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:19.855462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:22.236384Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:22.242493Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:22.245686Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:22.267547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:22.267635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:22.304959Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:22.306265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:22.465051Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:22.465175Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:22.466515Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.467009Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.467382Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.468106Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.468493Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.468634Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.468829Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.469064Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.469162Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:22.483194Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:22.638926Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:22.674933Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:22.675042Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:22.701598Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:22.702562Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:22.702784Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:22.702844Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:22.702879Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:22.702922Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:22.702959Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:22.702996Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:22.703389Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:22.714623Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:22.714727Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:22.763660Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2189:2632] 2026-01-07T17:42:22.764269Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2189:2632], schemeshard id = 72075186224037897 2026-01-07T17:42:22.811896Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:42:22.814249Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:42:22.825171Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. Describe result: PathErrorUnknown 2026-01-07T17:42:22.825246Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. Creating table 2026-01-07T17:42:22.825330Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:42:22.830821Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2315:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:22.834011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:22.839405Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:22.839510Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:22.849351Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:23.011424Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:42:23.036453Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:23.287163Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:23.431240Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:23.431312Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2257:2668] Owner: [2:2256:2667]. Column diff is empty, finishing 2026-01-07T17:42:24.177854Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.575219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.575635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4415:3834], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.575686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.576698Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:4418:3837]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:28.576850Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:28.577040Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-07T17:42:28.577087Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:4421:3840] 2026-01-07T17:42:28.577140Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:4421:3840] 2026-01-07T17:42:28.577830Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4422:3925] 2026-01-07T17:42:28.578049Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:4421:3840], server id = [2:4422:3925], tablet id = 72075186224037894, status = OK 2026-01-07T17:42:28.578223Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4422:3925], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:42:28.578282Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:42:28.578505Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:42:28.578563Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:4418:3837], StatRequests.size() = 1 2026-01-07T17:42:28.593077Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:42:28.593560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4426:3844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.593717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.594093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4431:3849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.594151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.594311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4432:3850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:28.599475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:28.760194Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:42:28.760278Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:42:28.817110Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:4421:3840], schemeshard count = 1 2026-01-07T17:42:29.074765Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:4435:3853], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2026-01-07T17:42:29.255940Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:4546:3924] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:29.269639Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:4569:3940]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:29.269873Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:29.269908Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:4569:3940], StatRequests.size() = 1 2026-01-07T17:42:29.434075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:29.871483Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4912:4003]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:29.871780Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:42:29.872283Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2026-01-07T17:42:29.872346Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:42:29.872664Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:42:29.872748Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:4912:4003], StatRequests.size() = 1 2026-01-07T17:42:29.891825Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:42:29.903062Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4921:4012]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:29.903228Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:42:29.903258Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:4921:4012], StatRequests.size() = 1 2026-01-07T17:42:29.980696Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4964:4174]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:42:29.984118Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:29.984188Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:42:29.984646Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:29.984700Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-07T17:42:29.984773Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:42:29.996128Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:42:30.003713Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-07T17:42:30.004088Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-07T17:42:30.004330Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-07T17:42:30.004427Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-07T17:42:30.004775Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4992:4190]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:42:30.008296Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:30.008367Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:42:30.008995Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:30.009072Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-07T17:42:30.009123Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:42:30.009358Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:42:30.012654Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-07T17:42:30.013037Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-07T17:42:30.013792Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-07T17:42:30.013997Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TConsoleTests::TestRestartConsoleAndPools >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TraverseDatashard::TraverseOneTableServerless [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> FolderServiceTest::TFolderServiceAdapter >> FolderServiceTest::TFolderServiceTransitional >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES >> TConsoleConfigHelpersTests::TestConfigCourier >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 30493, MsgBus: 21984 2026-01-07T17:42:00.670434Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676344090864300:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:00.670493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:01.039562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.325227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.462900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:01.462996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:01.483630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:01.541167Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676344090864275:2081] 1767807720668802 != 1767807720668805 2026-01-07T17:42:01.716193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:01.733858Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:01.764395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:01.764457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:02.348131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:02.348152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:02.348157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:02.348304Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:02.460009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:03.552510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:03.749369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.294931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.446522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:04.522454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:05.066301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365565702652:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.071540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.091101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676365565702662:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.091180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:05.670664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676344090864300:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:05.670764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:06.789135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.865559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.921330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.960213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:06.992648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.049917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.107883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.184918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:07.351831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374155638143:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.351938Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374155638148:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676374155638149:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.352426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:07.401622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:07.416422Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676374155638152:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking ... 4775815u];[["mW\2"];[38];[380];9223372036854775815u];[["bR\2"];[53];[530];9223372036854775815u];[["eQ\2"];[56];[560];9223372036854775815u];[["jX\2"];[61];[610];9223372036854775815u];[["mW\2"];[64];[640];9223372036854775815u];[["bR\2"];[79];[790];9223372036854775815u];[["eQ\2"];[82];[820];9223372036854775815u];[["jX\2"];[87];[870];9223372036854775815u];[["mW\2"];[90];[900];9223372036854775815u];[["dZ\2"];[3];[30];9223372036854775816u];[["gY\2"];[6];[60];9223372036854775816u];[["dZ\2"];[29];[290];9223372036854775816u];[["gY\2"];[32];[320];9223372036854775816u];[["dZ\2"];[55];[550];9223372036854775816u];[["gY\2"];[58];[580];9223372036854775816u];[["dZ\2"];[81];[810];9223372036854775816u];[["gY\2"];[84];[840];9223372036854775816u];[["hP\2"];[7];[70];9223372036854775817u];[["pV\2"];[15];[150];9223372036854775817u];[["hP\2"];[33];[330];9223372036854775817u];[["pV\2"];[41];[410];9223372036854775817u];[["hP\2"];[59];[590];9223372036854775817u];[["pV\2"];[67];[670];9223372036854775817u];[["hP\2"];[85];[850];9223372036854775817u];[["pV\2"];[93];[930];9223372036854775817u];[["cI\2"];[2];[20];9223372036854775818u];[["kO\2"];[10];[100];9223372036854775818u];[["sU\2"];[18];[180];9223372036854775818u];[["cI\2"];[28];[280];9223372036854775818u];[["kO\2"];[36];[360];9223372036854775818u];[["sU\2"];[44];[440];9223372036854775818u];[["cI\2"];[54];[540];9223372036854775818u];[["kO\2"];[62];[620];9223372036854775818u];[["sU\2"];[70];[700];9223372036854775818u];[["cI\2"];[80];[800];9223372036854775818u];[["kO\2"];[88];[880];9223372036854775818u];[["sU\2"];[96];[960];9223372036854775818u];[["aA\2"];[0];[0];9223372036854775819u];[["iG\2"];[8];[80];9223372036854775819u];[["lF\2"];[11];[110];9223372036854775819u];[["qM\2"];[16];[160];9223372036854775819u];[["tL\2"];[19];[190];9223372036854775819u];[["wK\2"];[22];[220];9223372036854775819u];[["yS\2"];[24];[240];9223372036854775819u];[["aA\2"];[26];[260];9223372036854775819u];[["iG\2"];[34];[340];9223372036854775819u];[["lF\2"];[37];[370];9223372036854775819u];[["qM\2"];[42];[420];9223372036854775819u];[["tL\2"];[45];[450];9223372036854775819u];[["wK\2"];[48];[480];9223372036854775819u];[["yS\2"];[50];[500];9223372036854775819u];[["aA\2"];[52];[520];9223372036854775819u];[["iG\2"];[60];[600];9223372036854775819u];[["lF\2"];[63];[630];9223372036854775819u];[["qM\2"];[68];[680];9223372036854775819u];[["tL\2"];[71];[710];9223372036854775819u];[["wK\2"];[74];[740];9223372036854775819u];[["yS\2"];[76];[760];9223372036854775819u];[["aA\2"];[78];[780];9223372036854775819u];[["iG\2"];[86];[860];9223372036854775819u];[["lF\2"];[89];[890];9223372036854775819u];[["qM\2"];[94];[940];9223372036854775819u];[["tL\2"];[97];[970];9223372036854775819u];[["fH\2"];[5];[50];9223372036854775820u];[["nN\2"];[13];[130];9223372036854775820u];[["vT\2"];[21];[210];9223372036854775820u];[["fH\2"];[31];[310];9223372036854775820u];[["nN\2"];[39];[390];9223372036854775820u];[["vT\2"];[47];[470];9223372036854775820u];[["fH\2"];[57];[570];9223372036854775820u];[["nN\2"];[65];[650];9223372036854775820u];[["vT\2"];[73];[730];9223372036854775820u];[["fH\2"];[83];[830];9223372036854775820u];[["nN\2"];[91];[910];9223372036854775820u];[["vT\2"];[99];[990];9223372036854775820u];[["uC\2"];[20];[200];9223372036854775821u];[["xB\2"];[23];[230];9223372036854775821u];[["uC\2"];[46];[460];9223372036854775821u];[["xB\2"];[49];[490];9223372036854775821u];[["uC\2"];[72];[720];9223372036854775821u];[["xB\2"];[75];[750];9223372036854775821u];[["uC\2"];[98];[980];9223372036854775821u];[["oE\2"];[14];[140];9223372036854775822u];[["rD\2"];[17];[170];9223372036854775822u];[["zJ\2"];[25];[250];9223372036854775822u];[["oE\2"];[40];[400];9223372036854775822u];[["rD\2"];[43];[430];9223372036854775822u];[["zJ\2"];[51];[510];9223372036854775822u];[["oE\2"];[66];[660];9223372036854775822u];[["rD\2"];[69];[690];9223372036854775822u];[["zJ\2"];[77];[770];9223372036854775822u];[["oE\2"];[92];[920];9223372036854775822u];[["rD\2"];[95];[950];9223372036854775822u]] 2026-01-07T17:42:28.910931Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2408: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710774 2026-01-07T17:42:29.196061Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2408: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710775 2026-01-07T17:42:29.455164Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2408: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710776 /Root/Vectors: [[["aA\2"];[0];[0];[0]];[["bR\2"];[1];[1];[10]];[["cI\2"];[2];[2];[20]];[["dZ\2"];[3];[3];[30]];[["eQ\2"];[4];[4];[40]];[["fH\2"];[5];[5];[50]];[["gY\2"];[6];[6];[60]];[["hP\2"];[7];[7];[70]];[["iG\2"];[8];[8];[80]];[["jX\2"];[9];[9];[90]];[["kO\2"];[10];[0];[100]];[["lF\2"];[11];[1];[110]];[["mW\2"];[12];[2];[120]];[["nN\2"];[13];[3];[130]];[["oE\2"];[14];[4];[140]];[["pV\2"];[15];[5];[150]];[["qM\2"];[16];[6];[160]];[["rD\2"];[17];[7];[170]];[["sU\2"];[18];[8];[180]];[["tL\2"];[19];[9];[190]];[["uC\2"];[20];[0];[200]];[["vT\2"];[21];[1];[210]];[["wK\2"];[22];[2];[220]];[["xB\2"];[23];[3];[230]];[["yS\2"];[24];[4];[240]];[["zJ\2"];[25];[5];[250]];[["aA\2"];[26];[6];[260]];[["bR\2"];[27];[7];[270]];[["cI\2"];[28];[8];[280]];[["dZ\2"];[29];[9];[290]];[["eQ\2"];[30];[0];[300]];[["fH\2"];[31];[1];[310]];[["gY\2"];[32];[2];[320]];[["hP\2"];[33];[3];[330]];[["iG\2"];[34];[4];[340]];[["jX\2"];[35];[5];[350]];[["kO\2"];[36];[6];[360]];[["lF\2"];[37];[7];[370]];[["mW\2"];[38];[8];[380]];[["nN\2"];[39];[9];[390]];[["oE\2"];[40];[0];[400]];[["pV\2"];[41];[1];[410]];[["qM\2"];[42];[2];[420]];[["rD\2"];[43];[3];[430]];[["sU\2"];[44];[4];[440]];[["tL\2"];[45];[5];[450]];[["uC\2"];[46];[6];[460]];[["vT\2"];[47];[7];[470]];[["wK\2"];[48];[8];[480]];[["xB\2"];[49];[9];[490]];[["yS\2"];[50];[0];[500]];[["zJ\2"];[51];[1];[510]];[["aA\2"];[52];[2];[520]];[["bR\2"];[53];[3];[530]];[["cI\2"];[54];[4];[540]];[["dZ\2"];[55];[5];[550]];[["eQ\2"];[56];[6];[560]];[["fH\2"];[57];[7];[570]];[["gY\2"];[58];[8];[580]];[["hP\2"];[59];[9];[590]];[["iG\2"];[60];[0];[600]];[["jX\2"];[61];[1];[610]];[["kO\2"];[62];[2];[620]];[["lF\2"];[63];[3];[630]];[["mW\2"];[64];[4];[640]];[["nN\2"];[65];[5];[650]];[["oE\2"];[66];[6];[660]];[["pV\2"];[67];[7];[670]];[["qM\2"];[68];[8];[680]];[["rD\2"];[69];[9];[690]];[["sU\2"];[70];[0];[700]];[["tL\2"];[71];[1];[710]];[["uC\2"];[72];[2];[720]];[["vT\2"];[73];[3];[730]];[["wK\2"];[74];[4];[740]];[["xB\2"];[75];[5];[750]];[["yS\2"];[76];[6];[760]];[["zJ\2"];[77];[7];[770]];[["aA\2"];[78];[8];[780]];[["bR\2"];[79];[9];[790]];[["cI\2"];[80];[0];[800]];[["dZ\2"];[81];[1];[810]];[["eQ\2"];[82];[2];[820]];[["fH\2"];[83];[3];[830]];[["gY\2"];[84];[4];[840]];[["hP\2"];[85];[5];[850]];[["iG\2"];[86];[6];[860]];[["jX\2"];[87];[7];[870]];[["kO\2"];[88];[8];[880]];[["lF\2"];[89];[9];[890]];[["mW\2"];[90];[0];[900]];[["nN\2"];[91];[1];[910]];[["oE\2"];[92];[2];[920]];[["pV\2"];[93];[3];[930]];[["qM\2"];[94];[4];[940]];[["rD\2"];[95];[5];[950]];[["sU\2"];[96];[6];[960]];[["tL\2"];[97];[7];[970]];[["uC\2"];[98];[8];[980]];[["vT\2"];[99];[9];[990]]] /Root/Vectors/vector_idx_prefixed/indexImplLevelTable: [["nG\2";202u;201u];["jQ\2";203u;201u];["rD\2";9223372036854776411u;202u];["kI\2";9223372036854776412u;202u];["kO\2";9223372036854776413u;203u];["iT\2";9223372036854776414u;203u];["hV\2";205u;204u];["pK\2";206u;204u];["cV\2";9223372036854776417u;205u];["mW\2";9223372036854776418u;205u];["nN\2";9223372036854776419u;206u];["sI\2";9223372036854776420u;206u];["gQ\2";208u;207u];["oF\2";209u;207u];["gL\2";9223372036854776423u;208u];["hU\2";9223372036854776424u;208u];["mH\2";9223372036854776425u;209u];["rD\2";9223372036854776426u;209u];["rD\2";211u;210u];["jQ\2";212u;210u];["lF\2";9223372036854776429u;211u];["uC\2";9223372036854776430u;211u];["cV\2";9223372036854776431u;212u];["mP\2";9223372036854776432u;212u];["iS\2";214u;213u];["qK\2";215u;213u];["hU\2";9223372036854776435u;214u];["kO\2";9223372036854776436u;214u];["qM\2";9223372036854776437u;215u];["sH\2";9223372036854776438u;215u];["iV\2";217u;216u];["rH\2";218u;216u];["dZ\2";9223372036854776441u;217u];["kT\2";9223372036854776442u;217u];["mK\2";9223372036854776443u;218u];["vE\2";9223372036854776444u;218u];["nH\2";220u;219u];["jS\2";221u;219u];["mJ\2";9223372036854776447u;220u];["rD\2";9223372036854776448u;220u];["fU\2";9223372036854776449u;221u];["oR\2";9223372036854776450u;221u];["jR\2";223u;222u];["sH\2";224u;222u];["mP\2";9223372036854776453u;223u];["fU\2";9223372036854776454u;223u];["vG\2";9223372036854776455u;224u];["pI\2";9223372036854776456u;224u];["nG\2";226u;225u];["jR\2";227u;225u];["uC\2";9223372036854776459u;226u];["lH\2";9223372036854776460u;226u];["gY\2";9223372036854776461u;227u];["kQ\2";9223372036854776462u;227u];["rF\2";229u;228u];["jS\2";230u;228u];["pG\2";9223372036854776465u;229u];["xB\2";9223372036854776466u;229u];["nP\2";9223372036854776467u;230u];["eV\2";9223372036854776468u;230u]] /Root/Vectors/vector_idx_prefixed/indexImplPostingTable: [[[20];9223372036854776411u];[[40];9223372036854776411u];[[0];9223372036854776412u];[[50];9223372036854776412u];[[60];9223372036854776412u];[[10];9223372036854776413u];[[70];9223372036854776413u];[[80];9223372036854776413u];[[30];9223372036854776414u];[[90];9223372036854776414u];[[1];9223372036854776417u];[[81];9223372036854776417u];[[41];9223372036854776418u];[[61];9223372036854776418u];[[21];9223372036854776419u];[[31];9223372036854776419u];[[91];9223372036854776419u];[[11];9223372036854776420u];[[51];9223372036854776420u];[[71];9223372036854776420u];[[2];9223372036854776423u];[[62];9223372036854776423u];[[12];9223372036854776424u];[[32];9223372036854776424u];[[82];9223372036854776424u];[[22];9223372036854776425u];[[42];9223372036854776425u];[[52];9223372036854776425u];[[72];9223372036854776426u];[[92];9223372036854776426u];[[63];9223372036854776429u];[[23];9223372036854776430u];[[43];9223372036854776430u];[[3];9223372036854776431u];[[53];9223372036854776431u];[[13];9223372036854776432u];[[33];9223372036854776432u];[[73];9223372036854776432u];[[83];9223372036854776432u];[[93];9223372036854776432u];[[4];9223372036854776435u];[[64];9223372036854776435u];[[84];9223372036854776435u];[[44];9223372036854776436u];[[54];9223372036854776436u];[[24];9223372036854776437u];[[34];9223372036854776437u];[[94];9223372036854776437u];[[14];9223372036854776438u];[[74];9223372036854776438u];[[55];9223372036854776441u];[[15];9223372036854776442u];[[35];9223372036854776442u];[[85];9223372036854776442u];[[5];9223372036854776443u];[[45];9223372036854776443u];[[65];9223372036854776443u];[[25];9223372036854776444u];[[75];9223372036854776444u];[[95];9223372036854776444u];[[16];9223372036854776447u];[[26];9223372036854776447u];[[76];9223372036854776447u];[[86];9223372036854776447u];[[46];9223372036854776448u];[[66];9223372036854776448u];[[6];9223372036854776449u];[[56];9223372036854776449u];[[36];9223372036854776450u];[[96];9223372036854776450u];[[7];9223372036854776453u];[[47];9223372036854776453u];[[57];9223372036854776453u];[[67];9223372036854776453u];[[27];9223372036854776454u];[[87];9223372036854776454u];[[17];9223372036854776455u];[[77];9223372036854776455u];[[37];9223372036854776456u];[[97];9223372036854776456u];[[98];9223372036854776459u];[[8];9223372036854776460u];[[48];9223372036854776460u];[[68];9223372036854776460u];[[78];9223372036854776460u];[[58];9223372036854776461u];[[18];9223372036854776462u];[[28];9223372036854776462u];[[38];9223372036854776462u];[[88];9223372036854776462u];[[19];9223372036854776465u];[[69];9223372036854776465u];[[89];9223372036854776465u];[[49];9223372036854776466u];[[39];9223372036854776467u];[[59];9223372036854776467u];[[99];9223372036854776467u];[[9];9223372036854776468u];[[29];9223372036854776468u];[[79];9223372036854776468u]] /Root/Vectors/vector_idx_prefixed/indexImplPrefixTable: [[[0];201u];[[1];204u];[[2];207u];[[3];210u];[[4];213u];[[5];216u];[[6];219u];[[7];222u];[[8];225u];[[9];228u]] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:22.021644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:22.081721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:22.086371Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:22.086580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:22.086663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:22.318513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:22.397066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:22.397161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:22.430551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:22.492201Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:23.066124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:23.066848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:23.066879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:23.066922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:23.067051Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:23.127707Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:23.614335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:26.135463Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:26.141698Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:26.144818Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:26.168747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:26.168835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:26.207188Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:26.209147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:26.408718Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:26.408840Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:26.410562Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.411306Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.412003Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.413044Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.413278Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.413665Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.413850Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.414017Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.414275Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.430755Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:26.670631Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:26.698638Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:26.698726Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:26.740012Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:26.741367Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:26.741656Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:26.741718Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:26.741784Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:26.741842Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:26.741891Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:26.741951Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:26.742611Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:26.745030Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:42:26.747813Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:42:26.800234Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:42:26.800292Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:42:26.800388Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:42:26.807744Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:26.807867Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:26.837179Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:42:26.837605Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:42:26.840593Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:26.877956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:26.899271Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:26.899409Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:26.913347Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:26.991650Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:42:27.110280Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:27.484749Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:27.594790Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:27.594906Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:42:28.366837Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:28.404235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:29.037572Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:29.144255Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8275: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2026-01-07T17:42:29.144309Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8291: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2026-01-07T17:42:29.144380Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:3092:3137], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2026-01-07T17:42:29.145744Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3093:3138] 2026-01-07T17:42:29.145884Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3093:3138], schemeshard id = 72075186224037899 2026-01-07T17:42:30.513049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3414:3570], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:30.513327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:30.514052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3432:3575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:30.514149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:30.536079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:30.924009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3714:3621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:30.924186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:31.006965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3717:3623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:31.007163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:31.008577Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3721:3627]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:31.008808Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:31.009052Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-07T17:42:31.009137Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3724:3630] 2026-01-07T17:42:31.009222Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3724:3630] 2026-01-07T17:42:31.009995Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3725:3544] 2026-01-07T17:42:31.010321Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3724:3630], server id = [2:3725:3544], tablet id = 72075186224037894, status = OK 2026-01-07T17:42:31.010648Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3725:3544], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:42:31.010730Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:42:31.010976Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:42:31.011055Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3721:3627], StatRequests.size() = 1 2026-01-07T17:42:31.059805Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:42:31.060577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3729:3634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:31.060740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:31.061476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3733:3638], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:31.061574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:31.061671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3736:3641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:31.069372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:31.187379Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:42:31.187492Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:42:31.219495Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3724:3630], schemeshard count = 1 2026-01-07T17:42:31.562684Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3738:3643], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:42:31.884888Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3856:3716] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:31.898899Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3879:3732]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:31.899132Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:31.899181Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3879:3732], StatRequests.size() = 1 2026-01-07T17:42:32.010019Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3920:3589]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:42:32.012727Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:32.012792Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:42:32.013416Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:32.013462Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-07T17:42:32.013510Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:42:32.023234Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:42:32.029481Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-07T17:42:32.029761Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-07T17:42:32.029874Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-07T17:42:32.030005Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestValidation >> KqpQueryPerf::Delete+QueryService-UseSink >> TUserAccountServiceTest::Get >> TAccessServiceTest::Authenticate |89.6%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService >> TAccessServiceTest::PassRequestId >> DataStreams::TestDeleteStream >> TServiceAccountServiceTest::Get [GOOD] >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query [GOOD] >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> DataStreams::TestUpdateStorage >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> DataStreams::TestStreamStorageRetention >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2026-01-07T17:42:33.499923Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676485885803465:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:33.500134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:33.704096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:33.711450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:33.711559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:33.728812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:33.816150Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676485885803344:2081] 1767807753494553 != 1767807753494556 2026-01-07T17:42:33.820700Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:33.958502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:34.011116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:34.030618Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cfb0b079a50] Connect to grpc://localhost:8885 2026-01-07T17:42:34.031459Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cfb0b079a50] Request ListFoldersRequest { id: "i_am_exists" } 2026-01-07T17:42:34.041268Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cfb0b079a50] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2026-01-07T17:42:34.042106Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cfb0b00e750] Connect to grpc://localhost:16344 2026-01-07T17:42:34.042641Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cfb0b00e750] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2026-01-07T17:42:34.048648Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cfb0b00e750] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2026-01-07T17:42:34.049089Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cfb0b00e750] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-07T17:42:34.050850Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cfb0b00e750] Status 5 Not Found 2026-01-07T17:42:34.051276Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cfb0b079a50] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-07T17:42:34.053355Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cfb0b079a50] Status 5 Not Found |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2026-01-07T17:42:33.766323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676487722546055:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:33.766370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:34.005252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:34.005373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:34.032157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:34.059183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:34.110883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.111859Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676487722546027:2081] 1767807753764777 != 1767807753764780 2026-01-07T17:42:34.312244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:34.354821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:34.381973Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdaf3c8edd0] Connect to grpc://localhost:30108 2026-01-07T17:42:34.428758Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdaf3c8edd0] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-07T17:42:34.438483Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdaf3c8edd0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30108: Failed to connect to remote host: Connection refused 2026-01-07T17:42:34.439998Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdaf3c8edd0] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-07T17:42:34.440728Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdaf3c8edd0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30108: Failed to connect to remote host: Connection refused 2026-01-07T17:42:34.783635Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:35.441281Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdaf3c8edd0] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-07T17:42:35.447824Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdaf3c8edd0] Status 5 Not Found 2026-01-07T17:42:35.448421Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdaf3c8edd0] Request ListFoldersRequest { id: "i_am_exists" } 2026-01-07T17:42:35.451422Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdaf3c8edd0] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TUserAccountServiceTest::Get [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TAccessServiceTest::Authenticate [GOOD] >> FolderServiceTest::TFolderService [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest >> TAccessServiceTest::PassRequestId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2026-01-07T17:42:35.669568Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676494555147213:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:35.669641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:35.901599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:35.903696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:35.903831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:35.908276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:36.043709Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676494555147173:2081] 1767807755668604 != 1767807755668607 2026-01-07T17:42:36.048564Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.129626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:36.255206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] >> TDataShardLocksTest::Points_OneTx >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2026-01-07T17:42:35.805298Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676494556876084:2107];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:35.805424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:35.851547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:36.132735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:36.132839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:36.133014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:36.181282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:36.250395Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676494556876014:2081] 1767807755801263 != 1767807755801266 2026-01-07T17:42:36.251917Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.416030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:36.487575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:36.550794Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d3bb008edd0] Connect to grpc://localhost:10793 2026-01-07T17:42:36.552116Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3bb008edd0] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2026-01-07T17:42:36.573301Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3bb008edd0] Status 7 Permission Denied 2026-01-07T17:42:36.575484Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3bb008edd0] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2026-01-07T17:42:36.577425Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3bb008edd0] Response AuthenticateResponse { subject { user_account { id: "1234" } } } |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2026-01-07T17:42:36.143729Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676497894187289:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:36.146478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:36.391260Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:36.398322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:36.398460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:36.401439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:36.547061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.667383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:36.796547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:36.805182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:36.813028Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d3fb569ed50] Connect to grpc://localhost:23356 2026-01-07T17:42:36.884525Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3fb569ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-07T17:42:36.902190Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3fb569ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23356: Failed to connect to remote host: Connection refused 2026-01-07T17:42:36.903997Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3fb569ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-07T17:42:36.904604Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3fb569ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23356: Failed to connect to remote host: Connection refused 2026-01-07T17:42:37.149117Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:37.905471Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3fb569ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-07T17:42:37.920131Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3fb569ed50] Status 5 Not Found 2026-01-07T17:42:37.921594Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3fb569ed50] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2026-01-07T17:42:37.925169Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3fb569ed50] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> DataStreams::TestControlPlaneAndMeteringData ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2026-01-07T17:42:36.272145Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676499641021623:2221];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:36.272248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:36.534038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:36.534156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:36.538581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:36.580188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:36.649854Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676499641021435:2081] 1767807756177404 != 1767807756177407 2026-01-07T17:42:36.651773Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.836047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:36.849084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:36.849279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:36.914806Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ce4a8c9ed50]{trololo} Connect to grpc://localhost:6992 2026-01-07T17:42:36.916059Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce4a8c9ed50]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2026-01-07T17:42:36.946529Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce4a8c9ed50]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> DataStreams::TestGetShardIterator >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> DataStreams::TestPutRecordsOfAnauthorizedUser >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> DataStreams::TestUpdateStream >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |89.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestNonChargeableUser |89.6%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:09.219230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:09.324694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:09.333825Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:09.334243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:09.334406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:09.758426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:09.856553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:09.856666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:09.891243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:09.981763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:10.630111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:10.631159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:10.631208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:10.631241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:10.631764Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:10.700799Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:11.237726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:14.429084Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:14.438100Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:14.442468Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:14.476890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:14.477040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:14.516521Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:14.518087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:14.699395Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:14.699538Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:14.701166Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.701767Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.702390Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.703249Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.703844Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.704048Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.704374Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.704603Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.704719Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.720914Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:14.875215Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:14.940939Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:14.941065Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:14.967681Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:14.968691Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:14.968860Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:14.968901Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:14.968967Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:14.969008Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:14.969061Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:14.969104Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:14.969709Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:14.980435Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:14.980520Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:15.003198Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:42:15.003791Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:42:15.050113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:42:15.051234Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:15.064623Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:42:15.064680Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:42:15.064760Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:15.071037Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:15.074556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:15.081071Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:15.081190Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:15.091938Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:15.154769Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:15.311302Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:15.620267Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:15.767807Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:15.767888Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:42:16.512319Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... :42:18.359438Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:42:18.485610Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3178:3264] 2026-01-07T17:42:18.487887Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3176:3469] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-07T17:42:18.487939Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `operationId', DatabaseName: `', Types: 1 2026-01-07T17:42:18.487978Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `operationId', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-01-07T17:42:18.563007Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-07T17:42:18.586506Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037899 not found 2026-01-07T17:42:18.587155Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037900 not found 2026-01-07T17:42:18.587726Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037902 not found 2026-01-07T17:42:18.587800Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037901 not found 2026-01-07T17:42:18.672277Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:42:18.672344Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:42:18.747526Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3080:3445], schemeshard count = 1 2026-01-07T17:42:21.007974Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:42:21.008312Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 1 2026-01-07T17:42:21.008521Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-07T17:42:22.737298Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:42:24.019442Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:42:24.019906Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 2 2026-01-07T17:42:24.020074Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 2 2026-01-07T17:42:25.727604Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:42:25.760123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:42:25.760200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:27.296813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:42:27.296882Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:27.391178Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:42:27.391495Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 3 2026-01-07T17:42:27.391671Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-07T17:42:29.944863Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:42:31.605566Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:42:31.605954Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 4 2026-01-07T17:42:31.606295Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-07T17:42:34.055108Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:42:35.779657Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:42:35.780108Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-07T17:42:35.780333Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:42:38.525288Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:42:38.671399Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:42:38.671481Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:42:38.671523Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:42:38.671566Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:42:40.841583Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:42:40.841915Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-07T17:42:40.842119Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:42:40.853810Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:42:40.853900Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:42:40.854153Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:42:40.870333Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:42:40.903597Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:42:40.903756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:667: [72075186224037894] ScheduleNextAnalyze. table [OwnerId: 72075186224037897, LocalPathId: 34] was deleted, deleting its statistics 2026-01-07T17:42:40.904231Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4081:3606], ActorId: [2:4082:3607], Starting query actor #1 [2:4083:3608] 2026-01-07T17:42:40.904321Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4082:3607], ActorId: [2:4083:3608], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:42:40.908090Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4082:3607], ActorId: [2:4083:3608], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=M2EwMmI3ZDYtOGZhYTM5MzktMTJiMzA4NjAtZWE2Yzc2NmM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2026-01-07T17:42:41.022622Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4092:3617]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:41.022897Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:41.023003Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4094:3619] 2026-01-07T17:42:41.023072Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4094:3619] 2026-01-07T17:42:41.023469Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4095:3620] 2026-01-07T17:42:41.023583Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4094:3619], server id = [2:4095:3620], tablet id = 72075186224037894, status = OK 2026-01-07T17:42:41.023677Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4095:3620], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:42:41.023737Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:42:41.023834Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:42:41.023908Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4092:3617], StatRequests.size() = 1 2026-01-07T17:42:41.024026Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:42:41.207331Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4082:3607], ActorId: [2:4083:3608], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2EwMmI3ZDYtOGZhYTM5MzktMTJiMzA4NjAtZWE2Yzc2NmM=, TxId: 2026-01-07T17:42:41.207437Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4082:3607], ActorId: [2:4083:3608], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2EwMmI3ZDYtOGZhYTM5MzktMTJiMzA4NjAtZWE2Yzc2NmM=, TxId: 2026-01-07T17:42:41.207837Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4081:3606], ActorId: [2:4082:3607], Got response [2:4083:3608] SUCCESS 2026-01-07T17:42:41.208194Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:42:41.225891Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:42:41.225980Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3176:3469] 2026-01-07T17:42:41.226686Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4115:3632]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:42:41.230056Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:41.230123Z node 2 :STATISTICS ERROR: service_impl.cpp:799: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2026-01-07T17:42:41.230174Z node 2 :STATISTICS DEBUG: service_impl.cpp:1329: ReplyFailed(), request id = 2 |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> DataStreams::TestReservedResourcesMetering >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> DataStreams::TestGetRecordsStreamWithSingleShard >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2026-01-07T17:42:35.762849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676493394782802:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:35.763421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:36.083599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:36.083708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:36.131318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:36.156946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:36.157418Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.161085Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676493394782681:2081] 1767807755756266 != 1767807755756269 2026-01-07T17:42:36.411244Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:36.416239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:39.799693Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:39.799936Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:39.810018Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:39.839965Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:39.840065Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:39.929734Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.117161Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:40.357557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:40.369506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:40.611799Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2026-01-07T17:42:36.524063Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676498915107138:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:36.529561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:36.783081Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:36.789148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:36.789292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:36.840192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:36.875074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.881049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676498915107031:2081] 1767807756510509 != 1767807756510512 2026-01-07T17:42:37.017103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:37.068120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:37.080735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:40.463657Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676515962603900:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:40.463861Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:40.490860Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:40.577464Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:40.578339Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:40.578411Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:40.579196Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676515962603856:2081] 1767807760461270 != 1767807760461273 2026-01-07T17:42:40.615447Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.726474Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:40.818010Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |89.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigTests::TestAddConfigItem |89.6%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestTimeRetention >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |89.6%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> KqpQueryPerf::Update+QueryService+UseSink >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2026-01-07T17:42:33.391193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:33.391260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:33.603187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:34.900210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:34.900280Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.992804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:36.143787Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:36.143858Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.226165Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:37.421728Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:37.421814Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:37.560489Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:38.994071Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:38.994148Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:39.098297Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:40.701867Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:40.701953Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:40.789927Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:42.172493Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:42.172578Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:42.282943Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) E0107 17:42:43.786114143 264607 trace.cc:67] Unknown trace var: 'sdk_authz' 2026-01-07T17:42:43.786788Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CONFIGS has been changed from WARN to NOTICE 2026-01-07T17:42:43.786888Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CONFIGS has been changed from WARN to DEBUG 2026-01-07T17:42:43.786937Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CLUSTER has been changed from WARN to NOTICE 2026-01-07T17:42:43.786967Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CLUSTER has been changed from WARN to DEBUG 2026-01-07T17:42:43.786996Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_PROXY has been changed from WARN to NOTICE 2026-01-07T17:42:43.787027Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_PROXY has been changed from WARN to DEBUG 2026-01-07T17:42:43.787055Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_WORKER has been changed from WARN to NOTICE 2026-01-07T17:42:43.787086Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_WORKER has been changed from WARN to DEBUG 2026-01-07T17:42:43.787115Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_GATEWAY has been changed from WARN to NOTICE 2026-01-07T17:42:43.787148Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_GATEWAY has been changed from WARN to DEBUG 2026-01-07T17:42:43.787176Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_YQL has been changed from WARN to NOTICE 2026-01-07T17:42:43.787207Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_YQL has been changed from WARN to DEBUG 2026-01-07T17:42:43.787234Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_EXECUTER has been changed from WARN to NOTICE 2026-01-07T17:42:43.787266Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_EXECUTER has been changed from WARN to DEBUG 2026-01-07T17:42:43.787293Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPUTE has been changed from WARN to NOTICE 2026-01-07T17:42:43.787323Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPUTE has been changed from WARN to DEBUG 2026-01-07T17:42:43.787354Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SLOW_LOG has been changed from WARN to NOTICE 2026-01-07T17:42:43.787383Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SLOW_LOG has been changed from WARN to DEBUG 2026-01-07T17:42:43.787411Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_SERVICE has been changed from WARN to NOTICE 2026-01-07T17:42:43.787441Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_SERVICE has been changed from WARN to DEBUG 2026-01-07T17:42:43.787471Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_ACTOR has been changed from WARN to NOTICE 2026-01-07T17:42:43.787503Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_ACTOR has been changed from WARN to DEBUG 2026-01-07T17:42:43.787534Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to NOTICE 2026-01-07T17:42:43.787562Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to DEBUG 2026-01-07T17:42:43.787589Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to NOTICE 2026-01-07T17:42:43.787617Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to DEBUG 2026-01-07T17:42:43.787646Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_BLOBS_STORAGE has been changed from WARN to NOTICE 2026-01-07T17:42:43.787676Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_BLOBS_STORAGE has been changed from WARN to DEBUG 2026-01-07T17:42:43.787707Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_REQUEST has been changed from WARN to NOTICE 2026-01-07T17:42:43.787735Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_REQUEST has been changed from WARN to DEBUG 2026-01-07T17:42:43.787765Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_NODE has been changed from WARN to NOTICE 2026-01-07T17:42:43.787795Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_NODE has been changed from WARN to DEBUG 2026-01-07T17:42:43.787824Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_LOAD_TEST has been changed from WARN to NOTICE 2026-01-07T17:42:43.787854Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_LOAD_TEST has been changed from WARN to DEBUG 2026-01-07T17:42:43.787882Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SESSION has been changed from WARN to NOTICE 2026-01-07T17:42:43.787914Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SESSION has been changed from WARN to DEBUG 2026-01-07T17:42:43.787950Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to NOTICE 2026-01-07T17:42:43.787982Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to DEBUG 2026-01-07T17:42:43.788009Z node 8 :CMS_CONFIGS NOTICE: log_settings_con ... 026-01-07T17:42:48.271600Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from WARN to ALERT 2026-01-07T17:42:48.271627Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2026-01-07T17:42:48.271656Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from WARN to ALERT 2026-01-07T17:42:48.271686Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from WARN to ALERT 2026-01-07T17:42:48.271716Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2026-01-07T17:42:48.271743Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from WARN to ALERT 2026-01-07T17:42:48.271776Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from WARN to ALERT 2026-01-07T17:42:48.271802Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2026-01-07T17:42:48.271833Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2026-01-07T17:42:48.271862Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2026-01-07T17:42:48.271887Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_CHECKINTEGRITY has been changed from 0 to 10 2026-01-07T17:42:48.271914Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2026-01-07T17:42:48.271942Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2026-01-07T17:42:48.271969Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_BRIDGE has been changed from 0 to 10 2026-01-07T17:42:48.271997Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2026-01-07T17:42:48.272028Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2026-01-07T17:42:48.272056Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_CLUSTER_BALANCING has been changed from 0 to 10 2026-01-07T17:42:48.272084Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2026-01-07T17:42:48.272112Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2026-01-07T17:42:48.272137Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_BRIDGE_SYNC has been changed from 0 to 10 2026-01-07T17:42:48.272164Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2026-01-07T17:42:48.272193Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2026-01-07T17:42:48.272219Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PHANTOM_FLAG_STORAGE has been changed from 0 to 10 2026-01-07T17:42:48.272248Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2026-01-07T17:42:48.272275Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2026-01-07T17:42:48.272301Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2026-01-07T17:42:48.272330Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SASL_AUTH has been changed from WARN to ALERT 2026-01-07T17:42:48.272358Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SASL_AUTH has been changed from WARN to ALERT 2026-01-07T17:42:48.272387Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SASL_AUTH has been changed from 0 to 10 2026-01-07T17:42:48.272416Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2026-01-07T17:42:48.272446Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2026-01-07T17:42:48.272469Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2026-01-07T17:42:48.272499Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2026-01-07T17:42:48.272525Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2026-01-07T17:42:48.272551Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2026-01-07T17:42:48.272586Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2026-01-07T17:42:48.272619Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2026-01-07T17:42:48.272644Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2026-01-07T17:42:48.272675Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from WARN to ALERT 2026-01-07T17:42:48.272706Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from WARN to ALERT 2026-01-07T17:42:48.272735Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2026-01-07T17:42:48.272764Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from WARN to ALERT 2026-01-07T17:42:48.272792Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from WARN to ALERT 2026-01-07T17:42:48.272820Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2026-01-07T17:42:48.272920Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BRIDGE has been changed from WARN to ALERT 2026-01-07T17:42:48.273089Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BRIDGE has been changed from WARN to ALERT 2026-01-07T17:42:48.273119Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BRIDGE has been changed from 0 to 10 2026-01-07T17:42:48.273149Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TRANSFER has been changed from WARN to ALERT 2026-01-07T17:42:48.273180Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TRANSFER has been changed from WARN to ALERT 2026-01-07T17:42:48.273208Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TRANSFER has been changed from 0 to 10 2026-01-07T17:42:48.273239Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2026-01-07T17:42:48.273266Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2026-01-07T17:42:48.273292Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TOKEN_MANAGER has been changed from 0 to 10 2026-01-07T17:42:48.273340Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2026-01-07T17:42:48.273385Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2026-01-07T17:42:48.273413Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LOCAL_DB_BACKUP has been changed from 0 to 10 2026-01-07T17:42:48.273443Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2026-01-07T17:42:48.273472Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2026-01-07T17:42:48.273499Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SCHEMA_SECRET_CACHE has been changed from 0 to 10 2026-01-07T17:42:48.273615Z node 11 :CMS_CONFIGS TRACE: log_settings_configurator.cpp:100: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } ... waiting for config update (done) |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2026-01-07T17:40:59.443077Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676085263041033:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:59.443577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:59.667039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:59.672898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:59.673004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:59.710614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:59.770034Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:59.771282Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676085263041005:2081] 1767807659441593 != 1767807659441596 2026-01-07T17:40:59.823261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:59.823288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:59.823297Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:59.823393Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:59.866820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:41:00.018257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:00.450318Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:02.188505Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:41:02.188989Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676098147943746:2319], Start check tables existence, number paths: 2 2026-01-07T17:41:02.190127Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:41:02.190168Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:41:02.193875Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NDM5ZWUzMC1mYTQzYTlmYS03MzU4YTI5OC1iMmU4YzJlYQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NDM5ZWUzMC1mYTQzYTlmYS03MzU4YTI5OC1iMmU4YzJlYQ== temp_dir_name# 798f6dd7-400d-51d0-2bce-328a02540c6d trace_id# 2026-01-07T17:41:02.194562Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676098147943746:2319], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:41:02.194604Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676098147943746:2319], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:41:02.194626Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676098147943746:2319], Successfully finished 2026-01-07T17:41:02.204890Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:41:02.205064Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NDM5ZWUzMC1mYTQzYTlmYS03MzU4YTI5OC1iMmU4YzJlYQ==, ActorId: [1:7592676098147943764:2322], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:02.210388Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:41:02.225065Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676098147943776:2496], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:41:02.229843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:02.233353Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676098147943776:2496], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-07T17:41:02.235714Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676098147943776:2496], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:41:02.247102Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676098147943776:2496], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:41:02.334288Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676098147943776:2496], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:41:02.386798Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676098147943827:2528] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:02.386954Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676098147943776:2496], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:41:02.389451Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: Root, PoolId: sample_pool_id 2026-01-07T17:41:02.389483Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id Root 2026-01-07T17:41:02.389572Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676098147943834:2325], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:41:02.390964Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676098147943834:2325], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:41:02.391035Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2026-01-07T17:41:02.391052Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-07T17:41:02.391263Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7592676098147943843:2326], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-07T17:41:02.392388Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7592676098147943843:2326], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-07T17:41:02.406215Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:41:02.406247Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-07T17:41:02.406277Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:41:02.406295Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676098147943855:2328], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-07T17:41:02.406463Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NDM5ZWUzMC1mYTQzYTlmYS03MzU4YTI5OC1iMmU4YzJlYQ==, ActorId: [1:7592676098147943764:2322], ActorState: ReadyState, LegacyTraceId: 01kecrrea5fb24pdrf7c949ya4, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# ALTER RESOURCE POOL sample_pool_id SET ( CONCURRENT_QUERY_LIMIT=42 ); rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-07T17:41:02.412221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676098147943855:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:02.412347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:02.412418Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:41:02.412457Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676098147943865:2329], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-07T17:41:02.412907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676098147943865:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:02.412977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch poo ... GU0MzBhMWU=, TxId: 2026-01-07T17:42:45.056584Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:201: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7592676538946057090:2866], ActorId: [10:7592676538946057091:2867], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery with SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, TxId: , text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2026-01-07T17:42:45.057096Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ReadyState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, Received request proxy_request_id# 29 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpc_actor# [10:7592676538946057124:2527] database# /Root database_id# /Root pool_id# trace_id# 2026-01-07T17:42:45.058208Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:967} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, Acquire mvcc snapshot trace_id# 2026-01-07T17:42:45.062761Z node 10 :KQP_SESSION TRACE: {KQPSA@kqp_session_actor.cpp:1008} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, Read snapshot result status# UNAVAILABLE step# 1767807765101 tx_id# 18446744073709551615 trace_id# 2026-01-07T17:42:45.062844Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, ExecutePhyTx literal# false commit# false deferred_effects_size# 0 tx# 136336318207512 trace_id# 2026-01-07T17:42:45.062927Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, Sending to Executer span_id_size# 8 trace_id# 2026-01-07T17:42:45.063103Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, Created new KQP executer executer_id# [10:7592676538946057133:2520] is_rollback# false trace_id# 2026-01-07T17:42:45.070552Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, TEvTxResponse current_tx# 1 transactions_size# 2 status# SUCCESS trace_id# 2026-01-07T17:42:45.070647Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, ExecutePhyTx literal# true commit# true deferred_effects_size# 0 tx# 136336319033688 trace_id# 2026-01-07T17:42:45.071634Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, TEvTxResponse current_tx# 2 transactions_size# 2 status# SUCCESS trace_id# 2026-01-07T17:42:45.071793Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, TxInfo status# Committed kind# ReadOnly total_duration# 13.695 server_duration# 13.578 queries_count# 2 trace_id# 2026-01-07T17:42:45.071930Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:42:45.072008Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:42:45.072047Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, EndCleanup is_final# false trace_id# 2026-01-07T17:42:45.072107Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ExecuteState, LegacyTraceId: 01kecrvjj17p20r6t6g06tdmhf, Sent query response back to proxy proxy_request_id# 29 proxy_id# [10:7592676440161807868:2265] trace_id# 2026-01-07T17:42:45.073905Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7592676538946057090:2866], ActorId: [10:7592676538946057091:2867], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, DataQuery #2 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, TxId: 2026-01-07T17:42:45.074010Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:371: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7592676538946057090:2866], ActorId: [10:7592676538946057091:2867], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, TxId: 2026-01-07T17:42:45.074049Z node 10 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7592676538946057090:2866], ActorId: [10:7592676538946057091:2867], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Delete session: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU= 2026-01-07T17:42:45.074140Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TRefreshPoolStateQuery] OwnerId: [10:7592676538946057089:2865], ActorId: [10:7592676538946057090:2866], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , Got response [10:7592676538946057091:2867] SUCCESS 2026-01-07T17:42:45.080155Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:42:45.080200Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:42:45.080228Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:42:45.080253Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:42:45.080321Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=Mzk0MjYxYWEtMTI4YzgzMjMtN2JlMjc2YTItZGU0MzBhMWU=, ActorId: [10:7592676538946057093:2520], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:42:45.090256Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=ZmQxMTkyMjMtNTUxZTVlMGItMzZmNjQyZTUtODkzMTdkZmE=, ActorId: [10:7592676457341677671:2325], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:42:45.090334Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=ZmQxMTkyMjMtNTUxZTVlMGItMzZmNjQyZTUtODkzMTdkZmE=, ActorId: [10:7592676457341677671:2325], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:42:45.090371Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=ZmQxMTkyMjMtNTUxZTVlMGItMzZmNjQyZTUtODkzMTdkZmE=, ActorId: [10:7592676457341677671:2325], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:42:45.090406Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=ZmQxMTkyMjMtNTUxZTVlMGItMzZmNjQyZTUtODkzMTdkZmE=, ActorId: [10:7592676457341677671:2325], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:42:45.090495Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=ZmQxMTkyMjMtNTUxZTVlMGItMzZmNjQyZTUtODkzMTdkZmE=, ActorId: [10:7592676457341677671:2325], ActorState: unknown state, Session actor destroyed trace_id# |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:33.714442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:33.792700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:33.798011Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:33.798246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:33.798358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:34.175009Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.263196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:34.263304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:34.305761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:34.418291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:35.148459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:35.149774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:35.149839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:35.149877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:35.150477Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:35.218476Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:35.866042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:39.280864Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:39.293396Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:39.299371Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:39.343430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:39.343545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:39.386746Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:39.390248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:39.598113Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:39.598290Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:39.600047Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.608293Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.609165Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.610252Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.610891Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.611100Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.611346Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.611646Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.611821Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:39.629539Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.023790Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:40.081325Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:40.081461Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:40.137723Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:40.139010Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:40.139210Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:40.139265Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:40.139321Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:40.139373Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:40.139416Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:40.139461Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:40.140291Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:40.154247Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.154401Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.185090Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:42:40.186057Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:42:40.254715Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:42:40.256342Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:42:40.272293Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:42:40.272371Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:42:40.272457Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:42:40.280245Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:40.284436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.292280Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:40.292433Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:40.305850Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:40.527095Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:42:40.541964Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:40.965908Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:41.127067Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:41.127156Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:42:42.005685Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... ror: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:44.342905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:45.169857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3707:3618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.170215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.171400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3711:3621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.171554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.174380Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3714:3624]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:45.174701Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:45.174997Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-07T17:42:45.175082Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3717:3627] 2026-01-07T17:42:45.175172Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3717:3627] 2026-01-07T17:42:45.176168Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3718:3539] 2026-01-07T17:42:45.176547Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3717:3627], server id = [2:3718:3539], tablet id = 72075186224037894, status = OK 2026-01-07T17:42:45.176917Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3718:3539], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:42:45.177411Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:42:45.177790Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:42:45.177891Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3714:3624], StatRequests.size() = 1 2026-01-07T17:42:45.224622Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:42:45.228196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3722:3631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.228539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.229578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3725:3634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.229844Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.230221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3729:3638], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:45.253401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:45.424201Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:42:45.424438Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:42:45.469611Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3717:3627], schemeshard count = 1 2026-01-07T17:42:45.815816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3731:3640], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:42:46.314652Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3840:3704] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:46.359213Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3863:3720]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:46.359455Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:46.359498Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3863:3720], StatRequests.size() = 1 2026-01-07T17:42:46.708274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:47.684908Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4203:3792]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:47.685249Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:42:47.685308Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:4203:3792], StatRequests.size() = 1 2026-01-07T17:42:47.828397Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4212:3801]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:47.828708Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:42:47.828757Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:4212:3801], StatRequests.size() = 1 2026-01-07T17:42:48.007611Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4254:3789]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:42:48.011362Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:48.011443Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:42:48.012194Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:42:48.012263Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-07T17:42:48.012319Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:42:48.025732Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:42:48.034900Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-07T17:42:48.035227Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-07T17:42:48.035522Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-07T17:42:48.035649Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-07T17:42:48.036005Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4282:3805]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:42:48.038769Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:48.038825Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:42:48.039513Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:48.039589Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-07T17:42:48.039750Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:42:48.040045Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:42:48.043593Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-07T17:42:48.044097Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-07T17:42:48.045127Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-07T17:42:48.045526Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |89.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> KqpQueryPerf::ComputeLength+QueryService >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> KqpWorkload::STOCK >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2262, MsgBus: 20579 2026-01-07T17:42:35.645163Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676496188027003:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:35.645505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:35.939435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:35.939583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:35.980960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:35.999338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:36.056192Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676496188026899:2081] 1767807755637745 != 1767807755637748 2026-01-07T17:42:36.062833Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.190438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:36.190471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:36.190478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:36.190546Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:36.213564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:36.674600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:36.697375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:36.761287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:36.990112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:37.201155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:37.282621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:39.362020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676513367897952:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:39.362172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:39.362723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676513367897962:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:39.362797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:39.791267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:39.854596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:39.912058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.002204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.092455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.164026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.232857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.335706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.466429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676517662866127:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:40.466563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:40.467064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676517662866132:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:40.467108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676517662866133:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:40.467230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:40.472058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:40.495090Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676517662866136:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:42:40.594413Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676517662866187:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:40.649357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676496188027003:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:40.649502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23171, MsgBus: 10232 2026-01-07T17:42:44.062502Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676533073786643:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:44.062562Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:44.161031Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:44.249020Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676533073786518:2081] 1767807764037788 != 1767807764037791 2026-01-07T17:42:44.258199Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:44.261222Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:44.261294Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:44.293911Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:44.461324Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:44.533653Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:44.533676Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:44.533684Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:44.533774Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:45.141098Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:45.593653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:45.620791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:45.702701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:45.921012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:46.083076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.482299Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676550253657580:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:48.482412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:48.482781Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676550253657590:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:48.482827Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:48.584977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.629326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.669099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.709449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.753242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.821241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.900448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.997124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:49.065631Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592676533073786643:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:49.065905Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:49.149985Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676554548625753:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:49.150116Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:49.150991Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676554548625759:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:49.151043Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676554548625760:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:49.151152Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:49.155702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:49.177754Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592676554548625763:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:42:49.237462Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592676554548625816:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TFlatTest::SplitEmptyTwice [GOOD] |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> KqpQueryPerf::UpdateOn-QueryService+UseSink >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo |89.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |89.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> KqpQueryPerf::KvRead+QueryService >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified |89.7%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigTests::TestAutoKind >> TDataShardLocksTest::UseLocksCache [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> TConsoleTests::TestRemoveTenant >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> DataStreams::TestUnsupported [GOOD] |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> TConsoleTests::TestTenantGenerationExtSubdomain >> TConsoleTests::TestCreateTenantAlreadyExists >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAutoSplit |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> DataStreams::TestShardPagination [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TConsoleConfigTests::TestAllowedScopes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2026-01-07T17:39:47.341780Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675774789988134:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:47.346030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:47.601037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:39:47.627320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:47.627403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:47.733783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:47.741332Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:47.742261Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675774789988074:2081] 1767807587337016 != 1767807587337019 2026-01-07T17:39:47.916053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:39:48.203860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:48.233021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:39:48.350823Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:48.380716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:39:48.712638Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.011s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:39:48.729580Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.011s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:39:48.823845Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:39:48.834349Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-07T17:39:52.345148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675774789988134:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:52.345216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:39:52.676174Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.82, eph 1} end=Done, 2 blobs 229r (max 229), put Spent{time=0.043s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (66421 0 0)b }, ecr=1.000 2026-01-07T17:39:52.733502Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.87, eph 1} end=Done, 2 blobs 765r (max 765), put Spent{time=0.061s,wait=0.010s,interrupts=1} Part{ 1 pk, lobs 0 +0, (50257 0 0)b }, ecr=1.000 2026-01-07T17:39:52.802473Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.589, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-07T17:39:53.097120Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.162, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.012s,wait=0.007s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-07T17:39:53.107735Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.163, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.018s,wait=0.017s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-07T17:39:53.120401Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.151, eph 2} end=Done, 2 blobs 465r (max 474), put Spent{time=0.095s,wait=0.028s,interrupts=1} Part{ 1 pk, lobs 0 +0, (134603 0 0)b }, ecr=1.000 2026-01-07T17:39:53.124657Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.165, eph 1} end=Done, 2 blobs 501r (max 501), put Spent{time=0.034s,wait=0.024s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31966 0 0)b }, ecr=1.000 2026-01-07T17:39:53.189595Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.166, eph 1} end=Done, 2 blobs 1500r (max 1500), put Spent{time=0.097s,wait=0.069s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103070 0 0)b }, ecr=1.000 2026-01-07T17:39:53.358878Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.171, eph 2} end=Done, 2 blobs 1533r (max 1536), put Spent{time=0.251s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (100555 0 0)b }, ecr=1.000 2026-01-07T17:39:53.392023Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.729, eph 1} end=Done, 2 blobs 10001r (max 10001), put Spent{time=0.266s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-07T17:39:53.613752Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.243, eph 3} end=Done, 2 blobs 721r (max 721), put Spent{time=0.086s,wait=0.007s,interrupts=1} Part{ 1 pk, lobs 0 +0, (208570 0 0)b }, ecr=1.000 2026-01-07T17:39:53.662593Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.256, eph 3} end=Done, 2 blobs 2286r (max 2289), put Spent{time=0.046s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (149824 0 0)b }, ecr=1.000 2026-01-07T17:39:53.807674Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1097, eph 2} end=Done, 2 blobs 3r (max 5), put Spent{time=0.010s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-07T17:39:53.993807Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.320, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.022s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-07T17:39:53.995001Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.321, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.023s,wait=0.008s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-07T17:39:54.012086Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.311, eph 4} end=Done, 2 blobs 973r (max 974), put Spent{time=0.071s,wait=0.014s,interrupts=1} Part{ 1 pk, lobs 0 +0, (281344 0 0)b }, ecr=1.000 2026-01-07T17:39:54.028125Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.323, eph 2} end=Done, 2 blobs 1003r (max 1003), put Spent{time=0.056s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63792 0 0)b }, ecr=1.000 2026-01-07T17:39:54.129113Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.322, eph 2} end=Done, 2 blobs 3006r (max 3006), put Spent{time=0.157s,wait=0.014s,interrupts=1} Part{ 1 pk, lobs 0 +0, (206360 0 0)b }, ecr=1.000 2026-01-07T17:39:54.225699Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.330, eph 4} end=Done, 2 blobs 3081r (max 3084), put Spent{time=0.189s,wait=0.010s,interrupts=1} Part{ 1 pk, lobs 0 +0, (201931 0 0)b }, ecr=1.000 2026-01-07T17:39:54.240176Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1246, eph 2} end=Done, 2 blobs 10001r (max 10511), put Spent{time=0.226s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-07T17:39:54.348042Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.380, eph 5} end=Done, 2 blobs 1224r (max 1225), put Spent{time=0.071s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (353831 0 0)b }, ecr=1.000 2026-01-07T17:39:54.413349Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.397, eph 5} end=Done, 2 blobs 3834r (max 3837), put Spent{time=0.108s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (251254 0 0)b }, ecr=1.000 2026-01-07T17:39:54.433747Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1605, eph 3} end=Done, 2 blobs 3r (max 5), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-07T17:39:54.536884Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.460, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.022s,wait=0.021s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-07T17:39:54.548140Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.450, eph 6} end=Done, 2 blobs 1483r (max 1484), put Spent{time=0.041s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (428659 0 0)b }, ecr=1.000 2026-01-07T17:39:54.549206Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.459, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.035s,wait=0.012s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-07T17:39:54.584807Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.461, eph 3} end=Done, 2 blobs 1504r (max 1504), put Spent{time=0.070s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (95605 0 0)b }, ecr=1.000 2026-01-07T17:39:54.625416Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.457, eph 3} end=Done, 2 blobs 4506r (max 4506), put Spent{time=0.113s,wait=0.038s,interrupts=1} Part{ 1 pk, lobs 0 +0, (309242 0 0)b }, ecr=1.000 2026-01-07T17:39:54.655899Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.466, eph 6} end=Done, 2 blobs 4590r (max 4593), put Spent{time=0.105s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (300772 0 0)b }, ecr=1.000 2026-01-07T17:39:54.660516Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1754, eph 3} end=Done, 2 blobs 10001r (max 10503), put Spent{time=0.110s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-07T17:39:54.736765Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.507, eph 7} end=Done, 2 blobs 1739r (max 1740), put Spent{time=0.058s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (502626 0 0)b }, ecr=1.000 2026-01-07T17:39:54.742798Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.514, eph 7} end=Done, 2 blobs 5343r (max 5346), put Spent{time=0.041s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (350095 0 0)b }, ecr=1.000 2026-01-07T17:39:54.799661Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.2121, eph 4} end=Done, 2 blobs 3r (max 5), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-07T17:39:54.889384Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.572, eph 4} end=Done, 2 blobs 2r (max 3), put Spent{time=0.010s,wait=0.007s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-07T17:39:54.933338Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.571, eph 4} end=Done, 2 blobs 2r (max 3), put Spent{time=0.053s,wait=0.018s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0 ... SHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:42:49.613483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592676557720942374 RawX2: 4503608217307454 } TabletId: 72075186224037893 State: 4 2026-01-07T17:42:49.613515Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:42:49.613660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592676557720942375 RawX2: 4503608217307455 } TabletId: 72075186224037894 State: 4 2026-01-07T17:42:49.613720Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:42:49.613887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592676553425974566 RawX2: 4503608217307375 } TabletId: 72075186224037889 State: 4 2026-01-07T17:42:49.613912Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:42:49.614034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592676557720942372 RawX2: 4503608217307453 } TabletId: 72075186224037895 State: 4 2026-01-07T17:42:49.614058Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:42:49.614228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:42:49.614252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:42:49.614357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:42:49.614371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:42:49.614425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:42:49.614437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:42:49.614480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:42:49.614490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:42:49.614533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:42:49.614547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:42:49.614606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:42:49.614630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:42:49.615981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-07T17:42:49.616331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 7 2026-01-07T17:42:49.616579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:42:49.616765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-07T17:42:49.616898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:42:49.617065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-07T17:42:49.625301Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-07T17:42:49.626769Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-07T17:42:49.627650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-07T17:42:49.627672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-07T17:42:49.627726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:42:49.627744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:42:49.627783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:42:49.627795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:42:49.628809Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:42:49.640005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-07T17:42:49.640324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:42:49.640608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-07T17:42:49.640757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:42:49.640963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-07T17:42:49.641110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-07T17:42:49.641246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2026-01-07T17:42:49.645218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:42:49.647926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:42:49.647973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:42:49.648055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:42:49.654113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-07T17:42:49.654158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-07T17:42:49.654220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-07T17:42:49.654230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-07T17:42:49.655060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-07T17:42:49.655085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-07T17:42:49.655316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:8 2026-01-07T17:42:49.655334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2026-01-07T17:42:49.655458Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:42:49.673240Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2026-01-07T17:42:49.673283Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2026-01-07T17:42:49.676606Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-07T17:42:49.676648Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestInvalidRetentionCombinations [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> DataStreams::TestListShards1Shard >> DataStreams::TestPutRecords >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2026-01-07T17:42:37.834306Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676502658956361:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:37.834356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:38.576472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:38.683377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:38.683518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:38.703443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:38.769520Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:38.913843Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:38.938023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:39.059552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:39.059595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:39.059605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:39.059728Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:39.307641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:39.422364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:39.687766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:39.709863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2026-01-07T17:42:40.406155Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676515543860445:3553] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:43.773959Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.012812s 2026-01-07T17:42:43.891376Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676529421254977:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:43.891669Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:44.009183Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:44.215399Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:44.215496Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:44.233255Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:44.237784Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:44.241709Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:44.457720Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:44.457743Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:44.457750Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:44.457826Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:44.645025Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:44.893355Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:45.182289Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:45.310479Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:45.526156Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:48.889545Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676529421254977:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:48.889770Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:53.772702Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592676572415795247:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:53.773444Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:53.861383Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:54.022632Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:54.040706Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:54.040813Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:54.047970Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:54.089525Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:54.405333Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:54.405363Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:54.405372Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:54.405486Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:54.785074Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:54.981187Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:55.004139Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:55.142532Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:55.517839Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2026-01-07T17:38:21.233985Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675406636480747:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:38:21.234430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:38:21.521189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:38:21.550750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:38:21.550866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:38:21.568457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:38:21.674381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:38:21.707303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:38:21.879144Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592675406636480886:2118] Handle TEvNavigate describe path dc-1 2026-01-07T17:38:21.879227Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592675406636481379:2442] HANDLE EvNavigateScheme dc-1 2026-01-07T17:38:21.879413Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592675406636480893:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:38:21.879595Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592675406636481167:2300][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592675406636480893:2121], cookie# 1 2026-01-07T17:38:21.881410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675406636481216:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675406636481213:2300], cookie# 1 2026-01-07T17:38:21.881463Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675406636481217:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675406636481214:2300], cookie# 1 2026-01-07T17:38:21.881478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592675406636481218:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675406636481215:2300], cookie# 1 2026-01-07T17:38:21.881550Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675406636480574:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675406636481216:2300], cookie# 1 2026-01-07T17:38:21.881582Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675406636480577:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675406636481217:2300], cookie# 1 2026-01-07T17:38:21.881603Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592675406636480580:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592675406636481218:2300], cookie# 1 2026-01-07T17:38:21.881653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675406636481216:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675406636480574:2050], cookie# 1 2026-01-07T17:38:21.881672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675406636481217:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675406636480577:2053], cookie# 1 2026-01-07T17:38:21.881687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592675406636481218:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675406636480580:2056], cookie# 1 2026-01-07T17:38:21.881728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675406636481167:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675406636481213:2300], cookie# 1 2026-01-07T17:38:21.881756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592675406636481167:2300][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:38:21.881773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675406636481167:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675406636481214:2300], cookie# 1 2026-01-07T17:38:21.881799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592675406636481167:2300][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:38:21.881837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592675406636481167:2300][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592675406636481215:2300], cookie# 1 2026-01-07T17:38:21.881865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592675406636481167:2300][/dc-1] Sync cookie mismatch: sender# [1:7592675406636481215:2300], cookie# 1, current cookie# 0 2026-01-07T17:38:21.881927Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592675406636480893:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:38:21.887949Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592675406636480893:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592675406636481167:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:38:21.888110Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592675406636480893:2121], cacheItem# { Subscriber: { Subscriber: [1:7592675406636481167:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:38:21.891759Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592675406636481380:2443], recipient# [1:7592675406636481379:2442], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:38:21.891861Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592675406636481379:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:38:21.940409Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592675406636481379:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:38:21.944112Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592675406636481379:2442] Handle TEvDescribeSchemeResult Forward to# [1:7592675406636481378:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:38:21.945869Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592675406636480886:2118] Handle TEvProposeTransaction 2026-01-07T17:38:21.945897Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592675406636480886:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:38:21.945946Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592675406636480886:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592675406636481386:2448] 2026-01-07T17:38:22.062251Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592675406636481386:2448] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-07T17:38:22.062318Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592675406636481386:2448] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatab ... 054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [6:7592676002877983626:2110] 2026-01-07T17:42:50.928095Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [5:7592675998885529118:2054] Unsubscribe: subscriber# [6:7592676002877983626:2110], path# /dc-1/USER_0 2026-01-07T17:42:50.928122Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [5:7592675998885529121:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [6:7592676002877983627:2110] 2026-01-07T17:42:50.928137Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [5:7592675998885529121:2057] Unsubscribe: subscriber# [6:7592676002877983627:2110], path# /dc-1/USER_0 2026-01-07T17:42:50.929699Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:42:51.271243Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7592676000497990419:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.271412Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7592676000497990419:2109], cacheItem# { Subscriber: { Subscriber: [7:7592676013382892660:2317] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:42:51.271514Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7592676563138707141:2592], recipient# [7:7592676563138707140:2598], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.704120Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7592676000497990419:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.704265Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7592676000497990419:2109], cacheItem# { Subscriber: { Subscriber: [7:7592676004792958053:2313] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:42:51.704356Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7592676563138707143:2593], recipient# [7:7592676563138707142:2599], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.722927Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7592676002877983609:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.723069Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7592676002877983609:2105], cacheItem# { Subscriber: { Subscriber: [6:7592676007172951323:2348] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:42:51.723173Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7592676565518701224:3308], recipient# [6:7592676565518701223:2710], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.760974Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7592676002877983609:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.761123Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7592676002877983609:2105], cacheItem# { Subscriber: { Subscriber: [6:7592676007172951323:2348] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:42:51.761245Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7592676565518701226:3309], recipient# [6:7592676565518701225:2711], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.913895Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7592676002877983609:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:51.914107Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7592676002877983609:2105], cacheItem# { Subscriber: { Subscriber: [6:7592676015762885980:2385] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:42:51.914218Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7592676565518701228:3310], recipient# [6:7592676565518701227:2712], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:52.725544Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7592676002877983609:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:42:52.725743Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7592676002877983609:2105], cacheItem# { Subscriber: { Subscriber: [6:7592676007172951323:2348] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:42:52.725871Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7592676569813668526:3311], recipient# [6:7592676569813668525:2713], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3601, MsgBus: 1049 2026-01-07T17:42:49.216421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676554290095172:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:49.216710Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:49.569184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:49.579107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:49.579236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:49.617977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:49.709344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:49.723027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676554290094947:2081] 1767807769170551 != 1767807769170554 2026-01-07T17:42:49.933097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:49.981614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:49.981648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:49.981658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:49.981748Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:50.194392Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:50.799228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:50.806010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:50.882720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:51.105932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:51.369077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:51.483179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:53.607628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676571469966013:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:53.607729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:53.608267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676571469966023:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:53.608333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:53.921182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:53.955717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.013431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.054031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.094060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.175104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.206124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676554290095172:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:54.209122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:54.222149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.286294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.417185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676575764934194:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:54.417306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:54.421374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676575764934199:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:54.421455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676575764934200:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:54.421522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:54.427751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:54.487604Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676575764934203:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:54.576614Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676575764934254:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TConsoleConfigTests::TestCheckConfigUpdates >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope |89.7%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2026-01-07T17:42:42.753160Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676525241406776:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:42.753263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:43.085802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:43.170588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:43.170736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:43.212383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:43.261839Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:43.351083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:43.491128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:43.491163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:43.491176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:43.491264Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:43.789051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:43.848000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:43.950703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:44.261865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:50.115497Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.006480s 2026-01-07T17:42:50.408131Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:50.408258Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:50.605082Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:50.799426Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:50.841824Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:50.841914Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:50.872356Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:50.985102Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:51.095691Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:51.095712Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:51.095722Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:51.095801Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:51.370649Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:51.440210Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:51.595850Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:51.840802Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:52.103353Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:52.143560Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7592676570223540987:3183], for# user2@builtin, access# DescribeSchema 2026-01-07T17:42:52.157550Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7592676570223540990:3184], for# user2@builtin, access# DescribeSchema 2026-01-07T17:42:52.168678Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:56.324542Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006465s 2026-01-07T17:42:56.345063Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592676586599534516:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:56.353746Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:56.386943Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:56.552592Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:56.560812Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:56.560895Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:56.567699Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:56.617102Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:56.742607Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:56.742627Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:56.742633Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:56.742695Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:56.984378Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:57.048705Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:57.307122Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72)
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080 2026-01-07T17:42:57.373340Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup;
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> KqpQueryPerf::UpdateOn+QueryService-UseSink >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> KqpQueryPerf::KvRead+QueryService [GOOD] >> TConsoleConfigTests::TestManageValidators >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb >> KqpQueryPerf::KvRead-QueryService >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain >> TConsoleTxProcessorTests::TestTxProcessorSingle >> KqpQueryPerf::Insert+QueryService-UseSink >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> TConsoleConfigTests::TestManageValidators [GOOD] >> BsControllerConfig::OverlayMap >> TConsoleConfigTests::TestDryRun >> DataStreams::ListStreamsValidation [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8492, MsgBus: 22313 2026-01-07T17:42:54.811721Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676575211555900:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:54.811923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:54.856752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:55.313435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:55.313562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:55.316355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:55.448272Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:55.461169Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676575211555645:2081] 1767807774727755 != 1767807774727758 2026-01-07T17:42:55.461259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:55.719554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:55.744461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:55.744472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:55.744479Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:55.744555Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:55.805729Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:56.496783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:56.507007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:42:56.580249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:56.746509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:56.915339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.001548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:58.794352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676592391426719:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:58.794471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:58.794753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676592391426729:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:58.794794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.163170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.216288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.265399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.307436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.356263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.399804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.480347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.556681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.639777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676596686394901:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.639886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.640259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676596686394906:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.640310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676596686394907:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.640342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.644886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:59.660971Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676596686394910:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:42:59.754405Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676596686394961:3787] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:59.811695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676575211555900:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:59.811747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexReplace-QueryService-UseSink >> TConsoleConfigTests::TestDryRun [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table [GOOD] >> BsControllerConfig::OverlayMap [GOOD] >> DataStreams::TestReservedStorageMetering [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> DataStreams::TestPutRecords [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] >> DataStreams::TestListShards1Shard [GOOD] >> BsControllerConfig::ManyPDisksRestarts >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> BsControllerConfig::OverlayMapCrossReferences >> TConsoleConfigTests::TestAffectedConfigs >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query >> DataStreams::TestReservedConsumersMetering >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] Test command err: 2026-01-07T17:42:31.778104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:31.778188Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:31.980372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:33.203354Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:33.203418Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:33.278004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:34.276453Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:34.276598Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.365703Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:35.566910Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:35.566971Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:35.645457Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:36.876869Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:36.876967Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.990389Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:38.316270Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:38.316343Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:38.421173Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:39.656806Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:39.656874Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:39.759847Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:44.166272Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:44.166345Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:44.278359Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:48.837971Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:48.838045Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:49.058366Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:50.916215Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:50.916285Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:51.106494Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:52.021631Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 11 2026-01-07T17:42:52.022351Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 11 2026-01-07T17:42:52.022591Z node 11 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [11:465:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-07T17:42:52.022860Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 11 2026-01-07T17:42:53.280596Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:53.280674Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:53.389355Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:55.401550Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:55.401654Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:55.638316Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:57.589340Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:57.589433Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:57.738103Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:59.707845Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:59.707939Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:59.852846Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:01.435887Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:01.435996Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:01.561165Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:02.803294Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:02.803407Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:02.911832Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:05.017224Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:05.017298Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:05.105569Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestGetUnknownTenantStatus >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> KqpQueryPerf::KvRead-QueryService [GOOD] >> BsControllerConfig::SoleCommandRollback >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] >> KqpWorkloadService::TestDiskIsFullRunBelowQueryLimit [GOOD] >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink >> TConsoleTxProcessorTests::TestTxProcessorTemporary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 11267, MsgBus: 22657 2026-01-07T17:42:50.149690Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676558639039707:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:50.149939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:50.194954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:50.621672Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:50.628607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:50.628733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:50.678043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:50.862600Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676558639039582:2081] 1767807770095236 != 1767807770095239 2026-01-07T17:42:50.889664Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:51.023719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:51.123497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:51.123517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:51.123527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:51.123611Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:51.157353Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:51.973019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:51.980163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:52.077312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.228445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.380682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.459510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.771231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676575818910637:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:54.771354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:54.772053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676575818910647:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:54.772091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:55.120988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:55.144952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676558639039707:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:55.145012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:55.165868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:55.212643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:55.269276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:55.323629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:55.377301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:55.422004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:55.481995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:55.580065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676580113878815:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:55.580211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:55.580730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676580113878820:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:55.580769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676580113878821:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:55.580914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:55.585695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:55.606139Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676580113878824:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:55.703146Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676580113878876:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64123, MsgBus: 20234 2026-01-07T17:42:59.704100Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676600390898353:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:59.704154Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:59.799554Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:59.842840Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:59.853285Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:59.853374Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:59.857461Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:59.914998Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:59.915029Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:59.915037Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:59.915113Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:00.098476Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:00.430932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:00.440837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:00.499049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:00.635054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:00.717432Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:00.735653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.057183Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676617570769374:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.057255Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.057611Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676617570769384:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.057670Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.119823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.143686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.170002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.197693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.229060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.260870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.292158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.341087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.445435Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676617570770253:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.445533Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.445610Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676617570770258:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.445664Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676617570770260:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.445846Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.449890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:03.458665Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592676617570770262:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:43:03.561131Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592676617570770313:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:04.704356Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592676600390898353:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:04.704426Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> BsControllerConfig::SoleCommandRollback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 8963, MsgBus: 63885 2026-01-07T17:42:53.367737Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676572315740495:2188];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:53.368902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:53.410528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:53.714172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:53.723683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:53.723794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:53.725049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676572315740334:2081] 1767807773342652 != 1767807773342655 2026-01-07T17:42:53.732983Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:53.745556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:53.865651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:53.865678Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:53.865690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:53.865765Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:54.014250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:54.369863Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:54.406129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:54.413722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:54.490936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.631713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.818840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:54.926164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:56.729309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676585200644097:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:56.729419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:56.729879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676585200644107:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:56.729927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.025460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.061193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.095839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.138201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.178216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.233757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.277905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.341156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.447250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676589495612282:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.447346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.447551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676589495612288:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.447596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.447612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676589495612287:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.451368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:57.463072Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676589495612291:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:42:57.553688Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676589495612342:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:58.365004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676572315740495:2188];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:58.365068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64731, MsgBus: 2891 2026-01-07T17:43:00.442269Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676602377879294:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:00.442346Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:00.455601Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:00.528371Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:00.530302Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676602377879266:2081] 1767807780441268 != 1767807780441271 2026-01-07T17:43:00.561870Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:00.561954Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:00.564551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:00.596666Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:00.596690Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:00.596698Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:00.596773Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:00.671450Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:00.998606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:01.006314Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:43:01.021148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.081999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.219381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.301550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.448312Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:03.689938Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676615262783048:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.690034Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.690334Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676615262783058:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.690388Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:03.746612Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.776257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.800553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.828716Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.857581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.893945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.929860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:03.977977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:04.059302Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676619557751219:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:04.059407Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:04.059495Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676619557751224:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:04.059663Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676619557751226:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:04.059713Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:04.063141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:04.073944Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592676619557751228:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:43:04.148116Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592676619557751279:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:05.442423Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592676602377879294:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:05.442486Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2026-01-07T17:42:31.820642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:31.820709Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:31.997573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:33.081360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:33.081420Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:33.166739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:34.157485Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:34.157550Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.238970Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:35.451754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:35.451828Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:35.533252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:36.549481Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:42:36.549559Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.641429Z node 4 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2026-01-07T17:42:37.264519Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:37.264619Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:37.363262Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:38.504239Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:42:38.504328Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:38.589325Z node 5 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[5:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2026-01-07T17:42:39.143536Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:39.143622Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:39.256194Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:43.126857Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:43.126921Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:43.238423Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:47.367010Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:47.367093Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:47.479667Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:49.211380Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:49.211474Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:49.324959Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:51.112694Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:51.112798Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:51.342090Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:53.096298Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:53.096393Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:53.226981Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:54.963405Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:54.963492Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:55.122412Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:56.856354Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:56.856437Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:56.970068Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:58.701430Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:58.701521Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:58.798583Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:00.148805Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:00.148905Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:00.278456Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:01.479941Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:01.480034Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:01.595177Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:02.842669Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:02.842760Z node 17 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:02.986733Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:04.199249Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:04.199337Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:04.300166Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:05.755227Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:05.755319Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:05.847860Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:06.892332Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:06.892426Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:06.984511Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> TReplicaTest::Handshake >> BsControllerConfig::UnsupportedCommandError >> KqpQueryPerf::IndexReplace-QueryService+UseSink |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> BsControllerConfig::UnsupportedCommandError [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1207, MsgBus: 5307 2026-01-07T17:42:47.514476Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676547027917567:2253];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:47.514573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:47.871321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:47.871447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:47.961506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:47.983906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:48.002406Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:48.009048Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676547027917347:2081] 1767807767500241 != 1767807767500244 2026-01-07T17:42:48.168714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:48.193534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:48.193564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:48.193582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:48.193713Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:48.513794Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:48.860358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:48.873956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:42:48.939791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:49.160203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:49.395516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:49.468042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:51.419197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676564207788415:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:51.419312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:51.420022Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676564207788425:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:51.420084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:51.946085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.009623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.045313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.082715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.133539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.184227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.235165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.324303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:52.439198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676568502756595:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:52.439310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:52.440044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676568502756601:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:52.440098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676568502756600:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:52.440135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:52.445323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:52.464280Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676568502756604:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:42:52.515918Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676547027917567:2253];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:52.516000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:52.543331Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676568502756655:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:42:54.844876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... ification cookie mismatch for subscription [2:7592676595789339116:2081] 1767807778860206 != 1767807778860209 2026-01-07T17:42:59.033397Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:59.033497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:59.039229Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:59.208175Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:59.245519Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:59.245547Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:59.245577Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:59.245667Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:59.809781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:59.817192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:42:59.827804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.906594Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:59.913355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:00.062760Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:00.125033Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.446398Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676612969210168:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.446504Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.446776Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676612969210177:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.446845Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.516290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.544436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.574084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.602938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.631433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.680168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.772993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.845128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:02.953879Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676612969211056:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.953975Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.954131Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676612969211061:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.954173Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676612969211062:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.954270Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:02.958811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:02.974475Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592676612969211065:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:43:03.059626Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592676617264178412:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:03.860756Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592676595789339139:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:03.860825Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:43:04.633876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:04.667336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:04.701419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 24525, MsgBus: 27830 2026-01-07T17:42:57.196250Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676589490555212:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:57.196314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:57.491510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:57.500016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:57.500135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:57.588737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:57.594229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:57.601045Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676589490555185:2081] 1767807777193468 != 1767807777193471 2026-01-07T17:42:57.664862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:57.731857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:57.731877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:57.731883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:57.736735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:58.221100Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:58.536735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:58.569453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:42:58.684378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:58.828844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:58.991877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.073440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:00.910341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676602375458951:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:00.910498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:00.911048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676602375458961:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:00.911091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:01.251846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.281001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.339334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.376358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.402999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.432992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.467059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.510504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:01.584475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676606670427133:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:01.584563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:01.584761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676606670427138:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:01.584829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676606670427139:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:01.584856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:01.588731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:01.600631Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676606670427142:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:43:01.689023Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676606670427193:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:02.203591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676589490555212:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:02.203703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21368, MsgBus: 24284 2026-01-07T17:43:04.225532Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676619521429245:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:04.225724Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:04.240766Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:04.297875Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:04.299384Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676619521429141:2081] 1767807784219712 != 1767807784219715 2026-01-07T17:43:04.312891Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:04.313012Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:04.333079Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:04.375783Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:04.375807Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:04.375815Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:04.375888Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:04.418116Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:04.712018Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:04.720614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:04.783170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:04.886320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:04.937961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:05.229745Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:07.034170Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676632406332893:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.034305Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.034602Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676632406332903:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.034671Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.099704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.124459Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.150834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.180551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.204818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.231705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.257714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.292406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.344422Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676632406333771:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.344494Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676632406333776:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.344506Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.344633Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676632406333779:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.344675Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.347039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:07.375501Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592676632406333778:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:43:07.478264Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592676632406333831:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19085, MsgBus: 5691 2026-01-07T17:43:03.979179Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676613758285997:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:03.979355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:04.249095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:04.284464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:04.284610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:04.357856Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676613758285952:2081] 1767807783977229 != 1767807783977232 2026-01-07T17:43:04.364987Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:04.370158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:04.441981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:04.442005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:04.442009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:04.442077Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:04.522150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:04.794392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:04.852046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:04.977581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:05.029842Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:05.090510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:05.148189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:06.724645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676626643189712:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:06.724760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:06.727003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676626643189722:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:06.727087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.073771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.100100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.125132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.150569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.176683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.230816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.257336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.294006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.350502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676630938157888:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.350582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.350679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676630938157893:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.350725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676630938157895:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.350760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.353389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:07.362168Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676630938157897:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:43:07.465200Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676630938157948:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:08.979212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676613758285997:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:08.979331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2026-01-07T17:42:42.478623Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676526846471068:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:42.478854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:42.727836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:42.757318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:42.757445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:42.762285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:42.903281Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:43.115245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:43.159199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:43.159223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:43.159229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:43.159313Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:43.482412Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:43.492257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:43.583309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:43.754391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:43.786105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-07T17:42:47.191973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:47.481187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676526846471068:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:47.481250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:47.536641Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037890:1][1:7592676548321309398:2354] Failed entry at 'ResolveCdcStream': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-07T17:42:47.927405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:48.154510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-07T17:42:48.252009Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-01-07T17:42:48.252042Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-01-07T17:42:48.252053Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-07T17:42:48.252080Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-01-07T17:42:48.252102Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2026-01-07T17:42:48.252114Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2026-01-07T17:42:48.252176Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-01-07T17:42:48.252194Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-01-07T17:42:48.252221Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-07T17:42:48.252235Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2026-01-07T17:42:48.252245Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-01-07T17:42:48.252257Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2026-01-07T17:42:48.265269Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-01-07T17:42:48.277125Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-07T17:42:48.277154Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2026-01-07T17:42:48.277168Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-07T17:42:50.674106Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676558354994753:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:50.674169Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:50.789102Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:50.973482Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:51.083756Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:51.106808Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:51.106895Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:51.113156Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:51.200468Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005041s 2026-01-07T17:42:51.360744Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:51.360766Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:51.360773Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:51.360853Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:51.381074Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:51.661548Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:51.729092Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:51.790429Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:52.167467Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:52.572604Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:52.705392Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:52.860664Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:55.770397Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592676580401581065:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:55.770641Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:55.921668Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:56.031677Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:56.031778Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:56.081740Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:56.086360Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:56.097482Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:56.201742Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007795s 2026-01-07T17:42:56.320313Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:56.320336Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:56.320344Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:56.320421Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:56.590633Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:56.649979Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:56.697083Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:56.807920Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:56.910114Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:57.176118Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7592676588991517765:3586] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:00.878770Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592676604309764029:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:00.881349Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:00.892667Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:00.991067Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:01.012740Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:01.012844Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:01.031565Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:01.054674Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:01.093729Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:01.093754Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:01.093763Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:01.093844Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:01.346140Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:01.414378Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:01.659813Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TReplicaTest::Handshake [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TConsoleTests::TestAlterUnknownTenant >> DataStreams::TestReservedConsumersMetering [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestListTenants >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> TPipeTrackerTest::TestShareTablet [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TReplicaTest::DoubleDelete [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3275, MsgBus: 22207 2026-01-07T17:43:05.211802Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676624067190882:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:05.212042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:05.372150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:05.383166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:05.383307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:05.385752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:05.477062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676624067190854:2081] 1767807785210562 != 1767807785210565 2026-01-07T17:43:05.483462Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:05.547914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:05.547950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:05.547957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:05.548083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:05.615265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:05.950141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:06.005198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:06.124031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:06.218586Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:06.244549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:06.309637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:07.760728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676632657127314:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.760854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.761244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676632657127324:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.761316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.980953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.007497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.034311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.061811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.086610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.114685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.142891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.180259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.247631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676636952095490:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.247684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.247694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676636952095495:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.247837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676636952095497:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.247888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.250940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:08.260711Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676636952095498:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:43:08.317914Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676636952095550:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:10.212096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676624067190882:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:10.212171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22189, MsgBus: 11353 2026-01-07T17:43:10.698183Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676644593022306:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:10.698227Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:10.707905Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:10.757354Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:10.758467Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676644593022279:2081] 1767807790697385 != 1767807790697388 2026-01-07T17:43:10.809453Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:10.809474Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:10.809483Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:10.809567Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:10.811586Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:10.811644Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:10.813248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:10.902472Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:11.098661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:11.108371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:11.153307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:11.247339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:11.293598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:11.717705Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:12.875064Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676653182958734:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:12.875149Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:12.875324Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676653182958744:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:12.875368Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:12.919894Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:12.944200Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:12.968438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.022671Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.047380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.070679Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.094321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.125613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.178118Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676657477926908:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:13.178190Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676657477926913:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:13.178189Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:13.178320Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676657477926915:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:13.178361Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:13.180595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:13.187495Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592676657477926916:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:43:13.289810Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592676657477926968:3760] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2026-01-07T17:43:13.054266Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:43:13.054337Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:43:13.305147Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-07T17:43:13.305198Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:43:13.305334Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:43:13.305360Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:43:13.310096Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:43:13.310233Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-07T17:43:13.310291Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:43:13.310375Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-07T17:43:13.310433Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# path 2026-01-07T17:43:13.310481Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-07T17:43:13.571729Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:43:13.571783Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:43:13.571911Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-07T17:43:13.571945Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-07T17:43:13.572000Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:43:13.572095Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:43:13.572128Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:43:13.572180Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:43:13.572291Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-07T17:43:13.572320Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-07T17:43:13.572346Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:43:13.572430Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-07T17:43:13.572470Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:43:13.572545Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-07T17:43:13.572570Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2026-01-07T17:43:15.826621Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.826668Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.826691Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:15.827112Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:43:15.827159Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.827185Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.828149Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005137s 2026-01-07T17:43:15.828567Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:15.828900Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:43:15.829007Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.829813Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.829827Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.829853Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:15.830137Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:43:15.830166Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.830187Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.830240Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006956s 2026-01-07T17:43:15.830590Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:15.831034Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:43:15.831124Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.831821Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.831834Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.831845Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:15.832172Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-07T17:43:15.832207Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.832228Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.832289Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.161127s 2026-01-07T17:43:15.832642Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:15.833063Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:43:15.833133Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.833755Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.833768Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.833778Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:15.834050Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-07T17:43:15.834080Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.834099Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.834162Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.189786s 2026-01-07T17:43:15.834531Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:15.834915Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:43:15.835000Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.835656Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.835676Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.835701Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:15.835937Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:15.836230Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:43:15.844248Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.844511Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2026-01-07T17:43:15.844534Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.844549Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.844587Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.259254s 2026-01-07T17:43:15.844895Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-07T17:43:15.846156Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.846185Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.846211Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:15.846536Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:15.846956Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:43:15.847170Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.847540Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:43:15.948591Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:15.948814Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:43:15.948882Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:43:15.948948Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-07T17:43:15.949004Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-07T17:43:16.049353Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:43:16.049499Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-07T17:43:16.050602Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:16.050630Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:16.050671Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:16.051042Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:16.051467Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:43:16.051622Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:16.052065Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:43:16.152788Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:16.153071Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:43:16.153160Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:43:16.153202Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-07T17:43:16.153292Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-07T17:43:16.153422Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-07T17:43:16.153503Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:43:16.153650Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-07T17:43:16.153797Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [3:166:2058] recipient: [3:164:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [3:166:2058] recipient: [3:164:2140] Leader for TabletID 9437184 is [3:172:2144] sender: [3:173:2058] recipient: [3:164:2140] Leader for TabletID 9437185 is [0:0:0] sender: [4:174:2049] recipient: [4:167:2097] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [4:174:2049] recipient: [4:167:2097] Leader for TabletID 9437185 is [4:188:2100] sender: [4:189:2049] recipient: [4:167:2097] Leader for TabletID 9437184 is [3:172:2144] sender: [3:216:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [3:218:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [4:220:2049] recipient: [4:45:2053] Leader for TabletID 9437185 is [4:188:2100] sender: [3:223:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [4:221:2049] recipient: [4:161:2096] Leader for TabletID 9437185 is [4:188:2100] sender: [4:226:2049] recipient: [4:225:2113] Leader for TabletID 9437185 is [4:227:2114] sender: [4:228:2049] recipient: [4:225:2113] Leader for TabletID 9437185 is [4:227:2114] sender: [3:258:2058] recipient: [3:15:2062] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] >> TBackupTests::BackupUuidColumn[Raw] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestAlterBorrowedStorage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:17.624484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:17.693236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:17.699928Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:17.700150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:17.700233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:17.955206Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:18.034747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:18.034870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:18.068723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:18.172704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:18.731967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:18.733026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:18.733068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:18.733111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:18.733274Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:18.799086Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:19.329451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:21.675710Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:21.679690Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:21.681826Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:21.701033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:21.701109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:21.738016Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:21.739433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:21.905834Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:21.905973Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:21.907471Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.908042Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.908464Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.909442Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.909650Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.910012Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.910122Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.910252Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.910488Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:21.925703Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:22.109002Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:22.133351Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:22.133466Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:22.163244Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:22.164363Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:22.164549Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:22.164635Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:22.164681Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:22.164730Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:22.164792Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:22.164834Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:22.165401Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:22.167235Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:42:22.169387Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:22.178822Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:42:22.178908Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:42:22.179010Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:22.186253Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:22.186363Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:22.257524Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:42:22.257833Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:42:22.260062Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:22.289009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:22.300955Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:22.301087Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:22.311236Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:22.499735Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:22.555272Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:22.813470Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:22.942143Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:22.942237Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:42:23.637410Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-07T17:43:01.115665Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-07T17:43:01.115700Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-07T17:43:01.115774Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-07T17:43:01.115842Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:43:01.115941Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-07T17:43:01.115999Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:43:01.116067Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:43:01.116124Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:43:01.116278Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:01.117785Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:43:01.118465Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5587:4792] Owner: [2:5586:4791]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:43:01.118529Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5587:4792] Owner: [2:5586:4791]. Column diff is empty, finishing 2026-01-07T17:43:01.118942Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:43:01.119013Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:43:01.120116Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:43:01.120212Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:43:01.123885Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:43:01.144189Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5596:4799] 2026-01-07T17:43:01.144319Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5548:4770], server id = [2:5596:4799], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:01.144520Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5596:4799], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-07T17:43:01.144727Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5597:4800] 2026-01-07T17:43:01.144842Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5597:4800], schemeshard id = 72075186224037897 2026-01-07T17:43:01.220840Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:43:01.221062Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2026-01-07T17:43:01.221606Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5536:4758], server id = [2:5540:4762], tablet id = 72075186224037900 2026-01-07T17:43:01.221662Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:01.222504Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5602:4805], server id = [2:5606:4809], tablet id = 72075186224037899, status = OK 2026-01-07T17:43:01.222612Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5602:4805], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:01.222830Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5603:4806], server id = [2:5607:4810], tablet id = 72075186224037900, status = OK 2026-01-07T17:43:01.222886Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5603:4806], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:01.224399Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5604:4807], server id = [2:5609:4812], tablet id = 72075186224037901, status = OK 2026-01-07T17:43:01.224469Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5604:4807], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:01.224846Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5605:4808], server id = [2:5608:4811], tablet id = 72075186224037902, status = OK 2026-01-07T17:43:01.224917Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5605:4808], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:01.225281Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:43:01.226708Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5602:4805], server id = [2:5606:4809], tablet id = 72075186224037899 2026-01-07T17:43:01.226742Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:01.227348Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:43:01.227660Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5605:4808], server id = [2:5608:4811], tablet id = 72075186224037902 2026-01-07T17:43:01.227690Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:01.228538Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:43:01.228871Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5604:4807], server id = [2:5609:4812], tablet id = 72075186224037901 2026-01-07T17:43:01.228903Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:01.229788Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:43:01.229832Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:43:01.230048Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:43:01.230218Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:43:01.230509Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5616:4818], ActorId: [2:5617:4819], Starting query actor #1 [2:5618:4820] 2026-01-07T17:43:01.230583Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5617:4819], ActorId: [2:5618:4820], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:01.233117Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5603:4806], server id = [2:5607:4810], tablet id = 72075186224037900 2026-01-07T17:43:01.233145Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:01.233848Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5617:4819], ActorId: [2:5618:4820], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MWMxNDIxMjctZmY1MjMzMDktNWQ0ZmFiYi0zNzY0OGUxMg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:01.329968Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5627:4829]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:01.330375Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:01.330445Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5627:4829], StatRequests.size() = 1 2026-01-07T17:43:01.455040Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5617:4819], ActorId: [2:5618:4820], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWMxNDIxMjctZmY1MjMzMDktNWQ0ZmFiYi0zNzY0OGUxMg==, TxId: 2026-01-07T17:43:01.455126Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5617:4819], ActorId: [2:5618:4820], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWMxNDIxMjctZmY1MjMzMDktNWQ0ZmFiYi0zNzY0OGUxMg==, TxId: 2026-01-07T17:43:01.455595Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5616:4818], ActorId: [2:5617:4819], Got response [2:5618:4820] SUCCESS 2026-01-07T17:43:01.456373Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:01.472891Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:01.472983Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:43:01.564252Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5653:4837]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:01.564671Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:01.564734Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:01.565087Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:01.565137Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:01.565188Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:01.568547Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TConsoleTests::TestListTenantsExtSubdomain >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> DataStreams::TestGetRecordsStreamWithMultipleShards |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:33.756344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:33.838999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:33.846837Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:33.847162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:33.847291Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:34.255130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.349933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:34.350030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:34.384181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:34.471568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:35.239208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:35.240533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:35.240587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:35.240647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:35.240871Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:35.310226Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:35.955338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:39.522574Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:39.531254Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:39.536119Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:39.579625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:39.579775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:39.637126Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:39.640306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.033365Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:40.033493Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:40.035178Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.035877Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.036496Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.037528Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.037743Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.038095Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.038212Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.038473Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.038706Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.061578Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.351462Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:40.384548Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:40.384715Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:40.431178Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:40.432634Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:40.432858Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:40.433616Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:40.433699Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:40.433757Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:40.433822Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:40.433883Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:40.434484Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:40.437152Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:42:40.439216Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:40.450540Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:42:40.450632Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:42:40.450727Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:40.458541Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.458673Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.484173Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:42:40.484663Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:42:40.489395Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:40.534411Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.551146Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:40.551339Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:40.571018Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:40.766335Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:40.791166Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:41.259313Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:41.367726Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:41.367823Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:42:42.253525Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... jcwNg==, TxId: 2026-01-07T17:43:14.250969Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5437:4692], ActorId: [2:5438:4693], Got response [2:5439:4694] SUCCESS 2026-01-07T17:43:14.251238Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:14.265160Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-07T17:43:14.265240Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:43:14.413281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:43:14.413364Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:43:14.467409Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5450:4705], schemeshard count = 1 2026-01-07T17:43:15.475156Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-07T17:43:15.475223Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-07T17:43:15.475255Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:15.479768Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:43:15.507911Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:43:15.508387Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:43:15.508481Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:43:15.509695Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:43:15.545778Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:43:15.546043Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:43:15.546788Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5532:4750], server id = [2:5536:4754], tablet id = 72075186224037899, status = OK 2026-01-07T17:43:15.547121Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5532:4750], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:15.548304Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5533:4751], server id = [2:5537:4755], tablet id = 72075186224037900, status = OK 2026-01-07T17:43:15.548357Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5533:4751], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:15.548594Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5534:4752], server id = [2:5538:4756], tablet id = 72075186224037901, status = OK 2026-01-07T17:43:15.548624Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5534:4752], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:15.549223Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5535:4753], server id = [2:5539:4757], tablet id = 72075186224037902, status = OK 2026-01-07T17:43:15.549278Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5535:4753], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:15.553777Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:43:15.554391Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5532:4750], server id = [2:5536:4754], tablet id = 72075186224037899 2026-01-07T17:43:15.554442Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:15.554983Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:43:15.555553Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5533:4751], server id = [2:5537:4755], tablet id = 72075186224037900 2026-01-07T17:43:15.555584Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:15.555976Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:43:15.556188Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5535:4753], server id = [2:5539:4757], tablet id = 72075186224037902 2026-01-07T17:43:15.556217Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:15.556512Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:43:15.556558Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:43:15.556704Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:43:15.556787Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:43:15.557248Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5534:4752], server id = [2:5538:4756], tablet id = 72075186224037901 2026-01-07T17:43:15.557275Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:15.557680Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:43:15.593056Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:43:15.593258Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-07T17:43:15.593801Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5554:4768], server id = [2:5555:4769], tablet id = 72075186224037900, status = OK 2026-01-07T17:43:15.593891Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5554:4768], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:15.594889Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:43:15.594942Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:43:15.595052Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:43:15.595144Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:43:15.595488Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5557:4771], ActorId: [2:5558:4772], Starting query actor #1 [2:5559:4773] 2026-01-07T17:43:15.595554Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5558:4772], ActorId: [2:5559:4773], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:15.597317Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5554:4768], server id = [2:5555:4769], tablet id = 72075186224037900 2026-01-07T17:43:15.597347Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:15.597964Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5558:4772], ActorId: [2:5559:4773], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YTJhZjU4ZDQtYjNlZDk1NTAtYjUyMmU2OTQtOWE3YmYwNmY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:15.675586Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5568:4782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:15.675842Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:15.675878Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5568:4782], StatRequests.size() = 1 2026-01-07T17:43:15.801198Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5558:4772], ActorId: [2:5559:4773], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTJhZjU4ZDQtYjNlZDk1NTAtYjUyMmU2OTQtOWE3YmYwNmY=, TxId: 2026-01-07T17:43:15.801250Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5558:4772], ActorId: [2:5559:4773], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTJhZjU4ZDQtYjNlZDk1NTAtYjUyMmU2OTQtOWE3YmYwNmY=, TxId: 2026-01-07T17:43:15.801544Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5557:4771], ActorId: [2:5558:4772], Got response [2:5559:4773] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:43:15.801881Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5581:4788]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:15.802179Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:15.802246Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:15.802448Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:15.803131Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:15.803178Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:15.803221Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:15.806334Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2026-01-07T17:42:43.782366Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676531345144314:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:43.782851Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:43.805461Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.019222s 2026-01-07T17:42:44.226450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:44.302472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:44.302590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:44.386138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:44.532782Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:44.581070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:44.685583Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011455s 2026-01-07T17:42:44.689301Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.019189s 2026-01-07T17:42:44.761439Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.010201s 2026-01-07T17:42:44.801063Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:44.938110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:44.938141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:44.938155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:44.938273Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:45.326147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:45.459019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:45.729523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:46.363588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:46.732870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:50.796446Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676560457969198:2264];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:50.789043Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.011934s 2026-01-07T17:42:50.801511Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:50.805341Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:50.893680Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:51.089195Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:51.103544Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:51.103630Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:51.153631Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:51.154936Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:51.269106Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.012841s 2026-01-07T17:42:51.293481Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.036040s 2026-01-07T17:42:51.376909Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:51.376951Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:51.376957Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:51.377028Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:51.449027Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:51.775582Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:51.797067Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:51.797360Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:42:51.864138Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:52.122227Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2026-01-07T17:42:53.361712Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:54.667444Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:54.789118Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:54.942983Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:55.270083Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:57.022796Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592676588351881713:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:57.022853Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:57.045863Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:57.144398Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:57.177935Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:57.178068Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:57.202551Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:57.259201Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:57.319097Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:57.319117Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:57.319123Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:57.319199Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:57.558272Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:57.670630Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:57.927626Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:57.945955Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-07T17:42:58.030244Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> S3SettingsConversion::StyleDeduction [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |89.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:34.106374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:34.209469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:34.216372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:34.216740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:34.216875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:34.607090Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.741955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:34.742102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:34.793867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:34.907054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:35.620163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:35.620685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:35.620730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:35.620772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:35.621356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:35.683177Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:36.314114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:39.988021Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:40.013410Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:40.017744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:40.106617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:40.106773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:40.160624Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:40.169538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.496398Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:40.496543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:40.498209Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.499185Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.499866Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.501032Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.501662Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.501897Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.502194Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.502449Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.502632Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.528688Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.900606Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:41.001376Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:41.001598Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:41.042417Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:41.043733Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:41.043978Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:41.044042Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:41.044105Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:41.044175Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:41.044228Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:41.044278Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:41.045494Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:41.067936Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:41.068048Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:41.097394Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:42:41.098152Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:42:41.177180Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:42:41.178786Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:41.208564Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:42:41.208626Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:42:41.208714Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:41.216359Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:41.220431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:41.227264Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:41.227368Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:41.238582Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:41.460151Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:41.479752Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:41.987688Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:42.170472Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:42.170586Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:42:43.004448Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 509Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5451:4713], server id = [2:5452:4714], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:18.656578Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5452:4714] 2026-01-07T17:43:18.656704Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5452:4714], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:43:18.656756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:43:18.656884Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:43:18.656972Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5449:4711], StatRequests.size() = 1 2026-01-07T17:43:18.657035Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:43:18.757785Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5439:4701], ActorId: [2:5440:4702], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmU5YTA5OS1jZDJmYTQ5NS04OGI2NzkwNi1mY2Q5M2QxYg==, TxId: 2026-01-07T17:43:18.757848Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5439:4701], ActorId: [2:5440:4702], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmU5YTA5OS1jZDJmYTQ5NS04OGI2NzkwNi1mY2Q5M2QxYg==, TxId: 2026-01-07T17:43:18.758141Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5438:4700], ActorId: [2:5439:4701], Got response [2:5440:4702] SUCCESS 2026-01-07T17:43:18.758409Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:18.771847Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-07T17:43:18.771896Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:43:18.850285Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:43:18.850387Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:43:18.904056Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5451:4713], schemeshard count = 1 2026-01-07T17:43:19.924579Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-07T17:43:19.924638Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-07T17:43:19.924673Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:19.927865Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:43:19.955207Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:43:19.955720Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:43:19.955796Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:43:19.956707Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:43:19.980456Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:43:19.980664Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:43:19.981537Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5533:4758], server id = [2:5537:4762], tablet id = 72075186224037899, status = OK 2026-01-07T17:43:19.981790Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5533:4758], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:19.982052Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5534:4759], server id = [2:5538:4763], tablet id = 72075186224037900, status = OK 2026-01-07T17:43:19.982091Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5534:4759], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:19.982646Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5535:4760], server id = [2:5539:4764], tablet id = 72075186224037901, status = OK 2026-01-07T17:43:19.982687Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5535:4760], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:19.983140Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5536:4761], server id = [2:5540:4765], tablet id = 72075186224037902, status = OK 2026-01-07T17:43:19.983175Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5536:4761], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:19.986279Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:43:19.987044Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5533:4758], server id = [2:5537:4762], tablet id = 72075186224037899 2026-01-07T17:43:19.987079Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:19.987499Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:43:19.987693Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5534:4759], server id = [2:5538:4763], tablet id = 72075186224037900 2026-01-07T17:43:19.987711Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:19.988327Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:43:19.988610Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5536:4761], server id = [2:5540:4765], tablet id = 72075186224037902 2026-01-07T17:43:19.988632Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:19.988905Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:43:19.988963Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:43:19.989071Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:43:19.989171Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:43:19.989451Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5553:4774], ActorId: [2:5554:4775], Starting query actor #1 [2:5555:4776] 2026-01-07T17:43:19.989491Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5554:4775], ActorId: [2:5555:4776], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:19.990984Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5535:4760], server id = [2:5539:4764], tablet id = 72075186224037901 2026-01-07T17:43:19.991005Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:19.991534Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5554:4775], ActorId: [2:5555:4776], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YmY5M2I5M2UtN2YzODljZTktODQxZTUxYzktY2QyYjc3MmM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:20.091406Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5564:4785]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:20.091720Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:20.091777Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5564:4785], StatRequests.size() = 1 2026-01-07T17:43:20.192605Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5554:4775], ActorId: [2:5555:4776], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmY5M2I5M2UtN2YzODljZTktODQxZTUxYzktY2QyYjc3MmM=, TxId: 2026-01-07T17:43:20.192658Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5554:4775], ActorId: [2:5555:4776], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmY5M2I5M2UtN2YzODljZTktODQxZTUxYzktY2QyYjc3MmM=, TxId: 2026-01-07T17:43:20.192898Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5553:4774], ActorId: [2:5554:4775], Got response [2:5555:4776] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:43:20.193265Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5577:4791]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:20.193425Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:20.193812Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:20.193855Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:20.194363Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:20.194415Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:20.194461Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:20.197072Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TConsoleTests::TestMergeConfig |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2026-01-07T17:42:36.755420Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676498272821107:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:36.755469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:37.161138Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:37.188020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:37.188095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:37.196776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:37.332957Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:37.377396Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007127s 2026-01-07T17:42:37.400685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:37.573769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:37.573795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:37.573802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:37.573880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:37.794552Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:38.151603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:38.338473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:38.601937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:38.874530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-07T17:42:38.950226Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-07T17:42:38.950263Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-07T17:42:38.950278Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:42:38.950308Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:42:38.962414Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-07T17:42:38.962481Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-07T17:42:38.962505Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-07T17:42:38.962528Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-07T17:42:43.099515Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676528196873906:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:43.099569Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:43.139078Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:43.288091Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:43.318677Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:43.318789Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:43.328597Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:43.339290Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:43.396909Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006490s 2026-01-07T17:42:43.540962Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:43.540985Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:43.540991Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:43.541067Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:43.830850Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:43.934084Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:44.109655Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:44.263143Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:44.633220Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:44.746778Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-07T17:42:44.788435Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2026-01-07T17:42:44.788471Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2026-01-07T17:42:44.788482Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2026-01-07T17:42:44.788498Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2026-01-07T17:42:44.804520Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-07T17:42:44.804645Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-07T17:42:44.804687Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-07T17:42:44.804751Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-07T17:42:48.657035Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.007369s 2026-01-07T17:42:48.668154Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592676550372281222:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:48.677344Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:48.754638Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:48.780881Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.010481s 2026-01-07T17:42:48.830010Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:49.005277Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:49.008526Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:49.008617Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:49.037027Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:49.038376Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:49.153207Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009660s 2026-01-07T17:42:49.131309Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006179s 2026-01-07T17:42:49.169120Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007765s 2026-01-07T17:42:49.282909Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:49.282933Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:49.282940Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:49.283016Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:49.399159Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:49.546492Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:49.613263Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:49.686614Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:49.886304Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:49.917133Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-07T17:42:50.220172Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:50.343459Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:50.427648Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-07T17:42:50.459642Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-01-07T17:42:50.465344Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2026-01-07T17:42:50.465398Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-01-07T17:42:50.465449Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-01-07T17:42:50.471907Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-07T17:42:55.609230Z node 10 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.011564s 2026-01-07T17:42:55.664541Z node 10 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006298s 2026-01-07T17:42:55.778200Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592676580650617175:2093];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:55.778916Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:55.917099Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:56.205838Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:56.304831Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:56.379059Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:56.379161Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:56.384299Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:56.478046Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:56.478076Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:56.478084Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:56.478193Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:56.579603Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:56.816817Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:56.869084Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:56.886018Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:57.142086Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:43:00.705376Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7592676580650617175:2093];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:00.705463Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> ColumnShardTiers::DSConfigsWithQueryServiceDdl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:21.877550Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:21.938739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:21.943397Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:21.943615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:21.943694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:22.174821Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:22.249364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:22.249455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:22.282975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:22.343390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:22.917318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:22.917879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:22.917908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:22.917935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:22.918042Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:22.978835Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:23.492125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:26.148584Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:26.155097Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:26.158209Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:26.183463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:26.183589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:26.224626Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:26.227617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:26.416712Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.417333Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.417961Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.418766Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.418899Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.419255Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.419354Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.419512Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.419761Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.581978Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:26.641995Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:26.642817Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:26.668313Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:26.853703Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:26.900053Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:26.900163Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:26.934458Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:26.934978Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:26.935166Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:26.935218Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:26.935262Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:26.935302Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:26.935351Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:26.935398Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:26.935778Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:27.054911Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:27.055046Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2199:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:27.074414Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2226:2630] 2026-01-07T17:42:27.075477Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2226:2630], schemeshard id = 72075186224037897 2026-01-07T17:42:27.289667Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2345:2694] 2026-01-07T17:42:27.290025Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:27.292445Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. Describe result: PathErrorUnknown 2026-01-07T17:42:27.292507Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. Creating table 2026-01-07T17:42:27.292713Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:27.296360Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2364:2701], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:27.300431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:27.309467Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:27.309592Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:27.324091Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:27.482220Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:27.911855Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:28.040316Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:28.040437Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2347:2696] Owner: [2:2346:2695]. Column diff is empty, finishing 2026-01-07T17:42:28.686372Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2026-01-07T17:43:06.840417Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-07T17:43:06.840451Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1767807786759962 2026-01-07T17:43:06.840490Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-07T17:43:06.840518Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-07T17:43:06.840585Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-07T17:43:06.840638Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:43:06.840709Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-07T17:43:06.840769Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:43:06.840809Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:43:06.840867Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:43:06.841018Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:06.841926Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:43:06.842567Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:43:06.842641Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:43:06.842755Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5655:4803] Owner: [2:5654:4802]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:43:06.842804Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5655:4803] Owner: [2:5654:4802]. Column diff is empty, finishing 2026-01-07T17:43:06.844070Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:43:06.844136Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:43:06.844897Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:43:06.863697Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5664:4810] 2026-01-07T17:43:06.863813Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5620:4782], server id = [2:5664:4810], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:06.863957Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5664:4810], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-07T17:43:06.864111Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5665:4811] 2026-01-07T17:43:06.864191Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5665:4811], schemeshard id = 72075186224037897 2026-01-07T17:43:06.901876Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:43:06.902106Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:43:06.902896Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5670:4816], server id = [2:5674:4820], tablet id = 72075186224037899, status = OK 2026-01-07T17:43:06.903196Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5670:4816], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:06.903457Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5671:4817], server id = [2:5675:4821], tablet id = 72075186224037900, status = OK 2026-01-07T17:43:06.903500Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5671:4817], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:06.904422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5672:4818], server id = [2:5676:4822], tablet id = 72075186224037901, status = OK 2026-01-07T17:43:06.904500Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5672:4818], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:06.906125Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5673:4819], server id = [2:5678:4824], tablet id = 72075186224037902, status = OK 2026-01-07T17:43:06.906202Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5673:4819], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:06.909848Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:43:06.910136Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5670:4816], server id = [2:5674:4820], tablet id = 72075186224037899 2026-01-07T17:43:06.910172Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:06.911431Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:43:06.911653Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5671:4817], server id = [2:5675:4821], tablet id = 72075186224037900 2026-01-07T17:43:06.911677Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:06.912161Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:43:06.912645Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5672:4818], server id = [2:5676:4822], tablet id = 72075186224037901 2026-01-07T17:43:06.912682Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:06.913134Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:43:06.913178Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:43:06.913394Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:43:06.913526Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:43:06.913758Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5690:4832], ActorId: [2:5691:4833], Starting query actor #1 [2:5692:4834] 2026-01-07T17:43:06.913811Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5691:4833], ActorId: [2:5692:4834], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:06.916610Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5673:4819], server id = [2:5678:4824], tablet id = 72075186224037902 2026-01-07T17:43:06.916644Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:06.917827Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5691:4833], ActorId: [2:5692:4834], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODZkZjRlN2UtZmRmN2Y4NzItM2RjODljM2MtMTI0ZGY2NDM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:07.016673Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5701:4843]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:07.016913Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:07.016992Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5701:4843], StatRequests.size() = 1 2026-01-07T17:43:07.161325Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5691:4833], ActorId: [2:5692:4834], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODZkZjRlN2UtZmRmN2Y4NzItM2RjODljM2MtMTI0ZGY2NDM=, TxId: 2026-01-07T17:43:07.161412Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5691:4833], ActorId: [2:5692:4834], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODZkZjRlN2UtZmRmN2Y4NzItM2RjODljM2MtMTI0ZGY2NDM=, TxId: 2026-01-07T17:43:07.161825Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5690:4832], ActorId: [2:5691:4833], Got response [2:5692:4834] SUCCESS 2026-01-07T17:43:07.162190Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:07.199603Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:07.199698Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:43:07.266405Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5717:4851]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:07.266839Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:07.266904Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:07.267188Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:07.267244Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:07.267321Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:07.271125Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:22.459213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:22.524689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:22.530501Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:22.530756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:22.530855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:22.800582Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:22.876333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:22.876443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:22.910648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:22.990309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:23.601389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:23.602621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:23.602675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:23.602708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:23.603320Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:23.667313Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:24.200395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:26.576351Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:26.584838Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:26.589665Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:26.633160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:26.633298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:26.677509Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:26.685889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:26.887153Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:26.887285Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:26.888840Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.889730Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.890291Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.891231Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.891844Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.892102Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.892406Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.892645Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.892805Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:26.909058Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:27.134050Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:27.199628Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:27.199774Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:27.294284Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:27.295604Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:27.295804Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:27.295909Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:27.295963Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:27.296012Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:27.296112Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:27.296189Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:27.297101Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:27.311209Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:27.311310Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:27.335151Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:42:27.335786Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:42:27.384913Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:42:27.386153Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:27.397787Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:42:27.397846Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:42:27.397911Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:27.408134Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:27.412129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:27.419770Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:27.419955Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:27.433640Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:27.636436Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:27.714225Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:27.985405Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:28.149763Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:28.149853Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:42:28.957398Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ner id: 72075186224037897 2026-01-07T17:43:04.461442Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-07T17:43:04.461482Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1767807784346588 2026-01-07T17:43:04.461542Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-07T17:43:04.461584Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-07T17:43:04.461697Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-07T17:43:04.461780Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:43:04.461878Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-07T17:43:04.461946Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:43:04.462018Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:43:04.462080Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:43:04.462205Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:04.463222Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:43:04.463992Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:43:04.464055Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:43:04.464169Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5565:4767] Owner: [2:5564:4766]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:43:04.464230Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5565:4767] Owner: [2:5564:4766]. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR cookie 0 2026-01-07T17:43:04.488386Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5574:4774] 2026-01-07T17:43:04.488752Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5574:4774], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-07T17:43:04.488890Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5530:4746], server id = [2:5574:4774], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:04.489098Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5575:4775] 2026-01-07T17:43:04.489242Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5575:4775], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2026-01-07T17:43:04.605857Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:43:04.605973Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:43:04.607586Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:43:04.622799Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:43:04.622988Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:43:04.624470Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5585:4782], server id = [2:5589:4786], tablet id = 72075186224037899, status = OK 2026-01-07T17:43:04.624959Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5585:4782], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:04.625399Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5586:4783], server id = [2:5590:4787], tablet id = 72075186224037900, status = OK 2026-01-07T17:43:04.625475Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5586:4783], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:04.625734Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5587:4784], server id = [2:5591:4788], tablet id = 72075186224037901, status = OK 2026-01-07T17:43:04.625800Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5587:4784], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:04.626969Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5588:4785], server id = [2:5592:4789], tablet id = 72075186224037902, status = OK 2026-01-07T17:43:04.627031Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5588:4785], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:04.634202Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:43:04.634782Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5585:4782], server id = [2:5589:4786], tablet id = 72075186224037899 2026-01-07T17:43:04.634837Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:04.636847Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:43:04.637381Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5586:4783], server id = [2:5590:4787], tablet id = 72075186224037900 2026-01-07T17:43:04.637424Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:04.637869Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:43:04.638137Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:43:04.638189Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:43:04.638537Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:43:04.638723Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:43:04.639007Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5605:4798], ActorId: [2:5606:4799], Starting query actor #1 [2:5607:4800] 2026-01-07T17:43:04.639085Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5606:4799], ActorId: [2:5607:4800], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:04.642449Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5587:4784], server id = [2:5591:4788], tablet id = 72075186224037901 2026-01-07T17:43:04.642493Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:04.643108Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5588:4785], server id = [2:5592:4789], tablet id = 72075186224037902 2026-01-07T17:43:04.643142Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:04.643584Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5606:4799], ActorId: [2:5607:4800], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=N2ZkNTA0YzctNGViZmQ2ZDctNDhhYWM0NzktZTc4YjY5NTE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:04.767210Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5616:4809]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:04.767585Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:04.767647Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5616:4809], StatRequests.size() = 1 2026-01-07T17:43:04.957792Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5606:4799], ActorId: [2:5607:4800], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2ZkNTA0YzctNGViZmQ2ZDctNDhhYWM0NzktZTc4YjY5NTE=, TxId: 2026-01-07T17:43:04.957877Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5606:4799], ActorId: [2:5607:4800], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2ZkNTA0YzctNGViZmQ2ZDctNDhhYWM0NzktZTc4YjY5NTE=, TxId: 2026-01-07T17:43:04.958251Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5605:4798], ActorId: [2:5606:4799], Got response [2:5607:4800] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:43:04.958693Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5629:4815]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:04.959008Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:04.959398Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:04.959454Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:04.960103Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:04.960149Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:04.960202Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:04.963079Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:18.586586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:18.586667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:18.586721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:18.586751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:18.586789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:18.586828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:18.586876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:18.586939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:18.587608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:18.587836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:18.655152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:18.655208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:18.665986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:18.666250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:18.666389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:18.671481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:18.671774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:18.672317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:18.672519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:18.674707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:18.674856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:18.675816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:18.675865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:18.675978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:18.676015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:18.676052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:18.676197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:18.789220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:18.792959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ultipart# 0, uploadId# (empty maybe) 2026-01-07T17:43:19.800225Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:888:2842], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:9676 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FE9820DC-C886-4BDB-B40C-4726CABFF9C3 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2026-01-07T17:43:19.807114Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:887:2844], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2026-01-07T17:43:19.807489Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:886:2841] 2026-01-07T17:43:19.807630Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:887:2844], sender# [1:886:2841], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-01-07T17:43:19.808624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:19.808738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:19.808783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:43:19.808826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:43:19.808888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:43:19.808994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:9676 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BCCEA2A4-A9F9-431B-A851-02EB33457CD2 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2026-01-07T17:43:19.812002Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:887:2844], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2026-01-07T17:43:19.812057Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:887:2844], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-07T17:43:19.812764Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:886:2841], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:43:19.820712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:43:19.844591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 726 RawX2: 4294970004 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:19.844668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:43:19.844851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 726 RawX2: 4294970004 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:19.844990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 726 RawX2: 4294970004 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:19.845070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:19.845235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:19.845753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 731 RawX2: 4294970006 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:19.845793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:43:19.845948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 731 RawX2: 4294970006 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:19.846040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 731 RawX2: 4294970006 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:19.846107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:19.846147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.846194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:43:19.846236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:43:19.846265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:43:19.846394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:19.849647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.850192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.850631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.850701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:43:19.850826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:19.850872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:19.850918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:19.850952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:19.851003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:43:19.851083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:779:2745] message: TxId: 102 2026-01-07T17:43:19.851136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:19.851193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:43:19.851233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:43:19.851374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:43:19.853384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:43:19.853453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:864:2822] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:19.118904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:19.118967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:19.119005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:19.119033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:19.119073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:19.119109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:19.119155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:19.119202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:19.119810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:19.120033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:19.177505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:19.177561Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:19.187460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:19.187697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:19.187838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:19.192164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:19.192401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:19.192843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:19.193044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:19.194677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:19.194809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:19.195579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:19.195629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:19.195718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:19.195747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:19.195774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:19.195886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.289183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.289897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.289997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:19.290579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 2026-01-07T17:43:19.892520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-07T17:43:19.892616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-07T17:43:19.892725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:19.899743Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:821:2789], attempt# 0 2026-01-07T17:43:19.912656Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:821:2789], sender# [1:820:2788] 2026-01-07T17:43:19.914086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:19.914152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:43:19.914371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:19.914405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-07T17:43:19.914791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.914847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:43:19.915568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:19.915651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:19.915679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:43:19.915720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:43:19.915745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:43:19.915804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:7183 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 150295A1-1C4A-4AEF-AAA1-99A3950FCD1C amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-07T17:43:19.917380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:43:19.917461Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:7183 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2C1155DC-56EC-4BA7-81DA-58A85EE85884 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-07T17:43:19.920511Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2026-01-07T17:43:19.920570Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:820:2788] 2026-01-07T17:43:19.920650Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:821:2789], sender# [1:820:2788], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:7183 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E5DB78F6-3074-4C7E-BC61-B84FC143315A amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2026-01-07T17:43:19.922883Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2026-01-07T17:43:19.922937Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:821:2789], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-07T17:43:19.923059Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:820:2788], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:43:19.929702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-07T17:43:19.929786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:43:19.929997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-07T17:43:19.930138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-07T17:43:19.930224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:19.930290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.930356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:43:19.930404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:43:19.930600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:19.933013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.933450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:19.933511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:43:19.933650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:19.933710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:19.933753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:19.933788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:19.933829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:43:19.933947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 102 2026-01-07T17:43:19.934014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:19.934057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:43:19.934099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:43:19.934238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:19.936603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:43:19.936684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:805:2774] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2026-01-07T17:42:43.498904Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676528270481171:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:43.500249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:43.853174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:43.892524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:43.892637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:43.917044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:44.044453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:44.161484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:44.406768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:44.406795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:44.406809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:44.406881Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:44.529202Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:45.001379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:45.133569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:45.458906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:49.291532Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676555583880908:2095];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:49.292139Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:49.312679Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:49.313258Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.032260s 2026-01-07T17:42:49.383046Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:49.515221Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:49.586743Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:49.586825Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:49.611883Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:49.618092Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:49.689107Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008393s 2026-01-07T17:42:49.713682Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008192s 2026-01-07T17:42:49.797615Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:49.797635Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:49.797641Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:49.797725Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:50.130236Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:50.279665Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:50.300232Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:50.506129Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:50.867418Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:50.964599Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2026-01-07T17:42:51.074800Z :INFO: [/Root/] [/Root/] [76b8bacb-4ae8d480-2651b9da-4410c772] Starting read session 2026-01-07T17:42:51.074880Z :DEBUG: [/Root/] [/Root/] [76b8bacb-4ae8d480-2651b9da-4410c772] Starting session to cluster null (localhost:10009) 2026-01-07T17:42:51.080222Z :DEBUG: [/Root/] [/Root/] [76b8bacb-4ae8d480-2651b9da-4410c772] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:42:51.080266Z :DEBUG: [/Root/] [/Root/] [76b8bacb-4ae8d480-2651b9da-4410c772] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:42:51.080300Z :DEBUG: [/Root/] [/Root/] [76b8bacb-4ae8d480-2651b9da-4410c772] [null] Reconnecting session to cluster null in 0.000000s 2026-01-07T17:42:51.097304Z node 4 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:109: new grpc connection 2026-01-07T17:42:51.097325Z node 4 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:131: new session created cookie 1 2026-01-07T17:42:51.101059Z :DEBUG: [/Root/] [/Root/] [76b8bacb-4ae8d480-2651b9da-4410c772] [null] Successfully connected. Initializing session 2026-01-07T17:42:51.114800Z node 4 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2026-01-07T17:42:51.115726Z node 4 :PQ_READ_PROXY INFO: read_session_actor.cpp:941: session cookie 1 consumer user1 session user1_4_1_15638257118119756141_v1 read init: from# ipv6:[::1]:45006, request# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2026-01-07T17:42:51.116015Z node 4 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer user1 session user1_4_1_15638257118119756141_v1 auth for : user1 20 ... {0, 0} (0-0) 2026-01-07T17:43:00.108899Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 1} (1-1) 2026-01-07T17:43:00.108956Z :DEBUG: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2026-01-07T17:43:00.215795Z :DEBUG: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:00.216685Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2026-01-07T17:43:00.216715Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2026-01-07T17:43:00.216806Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2026-01-07T17:43:00.216867Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2026-01-07T17:43:00.216878Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-07T17:43:00.216948Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2026-01-07T17:43:00.216995Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2026-01-07T17:43:00.217034Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-07T17:43:00.217085Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-07T17:43:00.217129Z :DEBUG: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2026-01-07T17:43:00.217343Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2026-01-07T17:43:00.217392Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2026-01-07T17:43:00.218580Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2026-01-07T17:43:00.218632Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {3, 0} (3-3) 2026-01-07T17:43:00.218669Z :DEBUG: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2026-01-07T17:43:00.219319Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2026-01-07T17:43:00.220744Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2026-01-07T17:43:00.223567Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2026-01-07T17:43:00.224576Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2026-01-07T17:43:00.224653Z :DEBUG: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] [null] The application data is transferred to the client. Number of messages 4, size 3145731 bytes 2026-01-07T17:43:00.225497Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2026-01-07T17:43:00.226518Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2026-01-07T17:43:00.226587Z :DEBUG: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] [null] The application data is transferred to the client. Number of messages 1, size 1048576 bytes 2026-01-07T17:43:00.226732Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2026-01-07T17:43:00.226844Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2026-01-07T17:43:00.226937Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2026-01-07T17:43:00.228083Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2026-01-07T17:43:00.230235Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2026-01-07T17:43:00.233779Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2026-01-07T17:43:00.234994Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2026-01-07T17:43:00.235053Z :DEBUG: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] [null] The application data is transferred to the client. Number of messages 4, size 4194304 bytes 2026-01-07T17:43:00.239000Z :INFO: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] Closing read session. Close timeout: 0.000000s 2026-01-07T17:43:00.239098Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:0:5:1:0 null:stream_TestPutRecordsCornerCases:1:4:8:0 null:stream_TestPutRecordsCornerCases:2:3:0:0 null:stream_TestPutRecordsCornerCases:3:2:3:0 null:stream_TestPutRecordsCornerCases:4:1:1:0 2026-01-07T17:43:00.239151Z :INFO: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] Counters: { Errors: 0 CurrentSessionLifetimeMs: 165 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:43:00.239267Z :NOTICE: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:43:00.239324Z :DEBUG: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] [null] Abort session to cluster 2026-01-07T17:43:00.240042Z :NOTICE: [/Root/] [/Root/] [25b48aa7-55164694-8f558eb5-c74dd541] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:43:00.241514Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer user1 session user1_7_1_106182512929426675_v1 grpc read failed 2026-01-07T17:43:00.241589Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer user1 session user1_7_1_106182512929426675_v1 grpc closed 2026-01-07T17:43:00.241675Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer user1 session user1_7_1_106182512929426675_v1 is DEAD 2026-01-07T17:43:01.734860Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592676607199328269:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:01.734923Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:01.749787Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:01.846572Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:01.871581Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:01.871663Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:01.888496Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:01.910846Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:01.976911Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:01.976961Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:01.976969Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:01.977091Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:02.232716Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:02.355978Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:02.601700Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:43:02.740175Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:03.000040Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101)
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2026-01-07T17:43:03.126941Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3349, MsgBus: 18484 2026-01-07T17:43:05.664541Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676625057635473:2115];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:05.665092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:05.857804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:05.865482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:05.865607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:05.922486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:05.942826Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:05.943743Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676625057635396:2081] 1767807785658727 != 1767807785658730 2026-01-07T17:43:05.985155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:05.985185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:05.985199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:05.985305Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:06.040161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:06.356170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:06.415740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:06.547841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:06.670748Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:06.677349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:06.739162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.252106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676637942539158:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.252196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.252486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676637942539168:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.252535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.505053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.529472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.558349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.586496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.613468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.644185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.676116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.718588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:08.779911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676637942540043:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.779972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.779981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676637942540048:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.780142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676637942540050:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.780193Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:08.783401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:08.792230Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676637942540051:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:43:08.909932Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676637942540103:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:10.083438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:10.110981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:10.137269Z node ... Disconnected -> Connecting 2026-01-07T17:43:12.931908Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:12.938606Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676656379591655:2081] 1767807792855141 != 1767807792855144 2026-01-07T17:43:12.941558Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:12.942456Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:12.996574Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:12.996593Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:12.996599Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:12.996657Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:13.148178Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:13.275328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:13.284191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.324197Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.455715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.501053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:13.859946Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:15.353601Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676669264495405:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.353676Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.353889Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676669264495414:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.353928Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.404195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:15.427923Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:15.452807Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:15.476583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:15.503810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:15.531080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:15.559620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:15.600733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:15.673118Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676669264496281:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.673191Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676669264496286:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.673194Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.673371Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592676669264496289:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.673409Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:15.675697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:15.682708Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592676669264496288:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:43:15.769754Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592676669264496341:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:16.911535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:16.938947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:16.969565Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:17.856149Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592676656379591683:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:17.856214Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:33.963170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:34.078484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:34.087584Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:34.088080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:34.088235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:34.520823Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.651954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:34.652124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:34.706922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:34.855222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:35.550750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:35.551799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:35.551854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:35.551887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:35.552349Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:35.629307Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:36.248240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:39.802713Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:39.811717Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:39.815949Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:39.896798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:39.901121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:39.959340Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:39.969682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.408942Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:40.409111Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:40.410723Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.418153Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.418718Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.419630Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.420226Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.420427Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.420763Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.421090Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.421266Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.437817Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.813048Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:40.858901Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:40.859025Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:40.919349Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:40.920885Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:40.921131Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:40.921267Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:40.921328Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:40.921387Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:40.921444Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:40.921509Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:40.922363Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:40.924449Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1909:2454] 2026-01-07T17:42:40.927441Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:40.932343Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. Describe result: PathErrorUnknown 2026-01-07T17:42:40.932414Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. Creating table 2026-01-07T17:42:40.932521Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:40.945851Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.945979Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2592], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.984281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2190:2634] 2026-01-07T17:42:40.985128Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2190:2634], schemeshard id = 72075186224037897 2026-01-07T17:42:41.001564Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2212:2641], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:41.031392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:41.079088Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:41.079298Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:41.098425Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:41.554089Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:41.567809Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:42.014206Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:42.180809Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:42.180916Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2116:2580] Owner: [2:2115:2579]. Column diff is empty, finishing 2026-01-07T17:42:42.941587Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... atabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:07.916304Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:3985:3529], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:43:07.919508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:07.971161Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:3968:3524], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-07T17:43:08.172468Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4054:3571], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:43:08.196975Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4053:3570] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:08.391344Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4075:3584]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:08.391568Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:43:08.391627Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4077:3586] 2026-01-07T17:43:08.391691Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4077:3586] 2026-01-07T17:43:08.391937Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4078:3587] 2026-01-07T17:43:08.392018Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4078:3587], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:43:08.392069Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:43:08.392148Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4077:3586], server id = [2:4078:3587], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:08.392198Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:43:08.392255Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4075:3584], StatRequests.size() = 1 2026-01-07T17:43:08.392312Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:43:13.629785Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:3940:3509], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:43:13.630026Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:3940:3509], Start read next stream part 2026-01-07T17:43:13.630358Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33150, txId: 281474976720660] shutting down 2026-01-07T17:43:13.630568Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:4157:3648], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:43:13.632601Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:3940:3509], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:43:13.632664Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:3940:3509], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzcyYWQ3ZjktNjMxMDVkNTctNTA1ZmU5ZTYtYzFiZGIwNGY=, TxId: 2026-01-07T17:43:13.633448Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:4157:3648], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:43:13.633524Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:4157:3648], Start read next stream part 2026-01-07T17:43:13.738550Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4181:3668]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:13.738839Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:13.738891Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4181:3668], StatRequests.size() = 1 2026-01-07T17:43:17.230212Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:4157:3648], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:43:17.230306Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:4157:3648], Start read next stream part 2026-01-07T17:43:17.230414Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33170, txId: 281474976720663] shutting down 2026-01-07T17:43:17.230666Z node 2 :SYSTEM_VIEWS WARN: tx_interval_summary.cpp:204: [72075186224037891] TEvIntervalQuerySummary, wrong stage: node id# 2 2026-01-07T17:43:17.231415Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4251:3720], ActorId: [2:4252:3721], Starting query actor #1 [2:4253:3722] 2026-01-07T17:43:17.231472Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4252:3721], ActorId: [2:4253:3722], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:17.231619Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:4157:3648], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:43:17.231652Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3930:2464], ActorId: [2:4157:3648], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTUyZWE5ZTMtZGYzMzA4MjctOGNmMjJlNTgtY2MyYTZmMDQ=, TxId: 2026-01-07T17:43:17.234016Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4252:3721], ActorId: [2:4253:3722], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODdhNDA2YWItOGE5ZWI0MWYtZjg2NmYxMTQtOGRiNGYwYjk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:17.279066Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4265:3733]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:17.279291Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:17.279323Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4265:3733], StatRequests.size() = 1 2026-01-07T17:43:17.392881Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4252:3721], ActorId: [2:4253:3722], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODdhNDA2YWItOGE5ZWI0MWYtZjg2NmYxMTQtOGRiNGYwYjk=, TxId: 2026-01-07T17:43:17.392991Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4252:3721], ActorId: [2:4253:3722], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODdhNDA2YWItOGE5ZWI0MWYtZjg2NmYxMTQtOGRiNGYwYjk=, TxId: 2026-01-07T17:43:17.393207Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4251:3720], ActorId: [2:4252:3721], Got response [2:4253:3722] SUCCESS 2026-01-07T17:43:17.393462Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:17.406828Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:17.406906Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3050:3427] 2026-01-07T17:43:17.407413Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4288:3748]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:43:17.407711Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:43:17.407756Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:17.407916Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:43:17.407956Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:17.408005Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:17.408223Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:43:17.412136Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-07T17:43:17.413222Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-07T17:43:17.413308Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 >> ColumnShardTiers::TTLUsage >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> S3SettingsConversion::FoldersStyleDeduction [GOOD] >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> IcebergClusterProcessor::ValidateDdlCreationForHiveWithS3 [GOOD] >> IcebergClusterProcessor::ValidateDdlCreationForHadoopWithS3 [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> TConsoleTests::TestMergeConfig [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |89.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:33.912873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:34.017107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:34.024761Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:34.025160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:34.025323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:34.402071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:34.494510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:34.494664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:34.531450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:34.634866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:35.404050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:35.405004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:35.405046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:35.405084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:35.405562Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:35.473588Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:36.115985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:39.605637Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:39.614958Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:39.619523Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:39.689886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:39.690037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:39.736706Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:39.740449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.068675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:40.068822Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:40.070404Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.071242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.071771Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.072628Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.077569Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.077867Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.078158Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.078542Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.078764Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.097914Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.529153Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:40.617471Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:40.617600Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:40.665228Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:40.668356Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:40.668643Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:40.668712Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:40.668794Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:40.668859Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:40.668948Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:40.669021Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:40.670099Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:40.688281Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.688410Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.729122Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:42:40.729975Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:42:40.808690Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:42:40.810820Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:40.829472Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:42:40.829547Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:42:40.829641Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:40.846482Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:40.852304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:40.860674Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:40.860820Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:40.879622Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:41.163197Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:41.180031Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:41.734917Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:41.897069Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:41.897175Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:42:42.747580Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... T17:43:22.260245Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-07T17:43:22.260285Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1767807802131859 2026-01-07T17:43:22.260328Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-07T17:43:22.260367Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-07T17:43:22.260403Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-07T17:43:22.260497Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-07T17:43:22.260560Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:43:22.260667Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-07T17:43:22.260737Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:43:22.260809Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:43:22.260880Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:43:22.261070Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:22.262455Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:43:22.262616Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5596:4796] Owner: [2:5595:4795]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:43:22.262678Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5596:4796] Owner: [2:5595:4795]. Column diff is empty, finishing 2026-01-07T17:43:22.263435Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:43:22.263511Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:43:22.264395Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:43:22.264486Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:43:22.266907Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:43:22.295496Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5605:4803] 2026-01-07T17:43:22.295687Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5561:4775], server id = [2:5605:4803], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:22.295902Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5605:4803], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-07T17:43:22.296128Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5606:4804] 2026-01-07T17:43:22.296199Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5606:4804], schemeshard id = 72075186224037897 2026-01-07T17:43:22.357342Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:43:22.357539Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-07T17:43:22.358539Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5611:4809], server id = [2:5615:4813], tablet id = 72075186224037899, status = OK 2026-01-07T17:43:22.358659Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5611:4809], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:22.359077Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5612:4810], server id = [2:5616:4814], tablet id = 72075186224037900, status = OK 2026-01-07T17:43:22.359145Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5612:4810], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:22.359360Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5613:4811], server id = [2:5617:4815], tablet id = 72075186224037901, status = OK 2026-01-07T17:43:22.359417Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5613:4811], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:22.361564Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5614:4812], server id = [2:5618:4816], tablet id = 72075186224037902, status = OK 2026-01-07T17:43:22.361632Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5614:4812], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:43:22.362402Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:43:22.363280Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5611:4809], server id = [2:5615:4813], tablet id = 72075186224037899 2026-01-07T17:43:22.363329Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:22.363706Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:43:22.364109Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:43:22.364445Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5612:4810], server id = [2:5616:4814], tablet id = 72075186224037900 2026-01-07T17:43:22.364479Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:22.364705Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5613:4811], server id = [2:5617:4815], tablet id = 72075186224037901 2026-01-07T17:43:22.364729Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:22.364807Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:43:22.364852Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:43:22.365078Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:43:22.365242Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:43:22.365516Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5623:4821], ActorId: [2:5624:4822], Starting query actor #1 [2:5625:4823] 2026-01-07T17:43:22.365580Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5624:4822], ActorId: [2:5625:4823], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:22.365846Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5614:4812], server id = [2:5618:4816], tablet id = 72075186224037902 2026-01-07T17:43:22.365879Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:43:22.369662Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5624:4822], ActorId: [2:5625:4823], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NjA1MDkzNDEtODI1OGZkMDQtY2JlNjFjYzMtM2EwMTE4ODU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:22.492541Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5634:4832]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:22.492982Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:22.493050Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5634:4832], StatRequests.size() = 1 2026-01-07T17:43:22.613778Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5624:4822], ActorId: [2:5625:4823], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjA1MDkzNDEtODI1OGZkMDQtY2JlNjFjYzMtM2EwMTE4ODU=, TxId: 2026-01-07T17:43:22.613860Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5624:4822], ActorId: [2:5625:4823], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjA1MDkzNDEtODI1OGZkMDQtY2JlNjFjYzMtM2EwMTE4ODU=, TxId: 2026-01-07T17:43:22.614193Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5623:4821], ActorId: [2:5624:4822], Got response [2:5625:4823] SUCCESS 2026-01-07T17:43:22.614520Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:22.629737Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:22.629820Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:43:22.708100Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5653:4840]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:22.708491Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:22.708546Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:22.708798Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:22.708856Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:22.708904Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:22.712731Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription >> TConsoleTests::TestCreateSubSubDomain |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] Test command err: test case: 1 test case: 2 test case: 3 test case: 4 test case: 5 test case: 6 test case: 7 test case: 8 test case: 9 |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2026-01-07T17:42:44.677452Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676535418714916:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:44.677514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:44.780901Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.011816s 2026-01-07T17:42:44.765163Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.027966s 2026-01-07T17:42:44.791720Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006203s 2026-01-07T17:42:45.489503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:45.665463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:45.665605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:45.773167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:45.950607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:45.953437Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:45.953474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:46.058715Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.021447s 2026-01-07T17:42:46.109236Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009026s 2026-01-07T17:42:46.398201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:46.398232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:46.398241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:46.398356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:47.084082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:47.476422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:47.894086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:48.346020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2026-01-07T17:42:48.469433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:48.647229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-07T17:42:48.728752Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-07T17:42:48.728784Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:42:48.728798Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:42:48.743595Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-07T17:42:48.743658Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-07T17:42:48.743697Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-07T17:42:49.617250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676535418714916:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:49.617304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1767807768149-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767807768,"finish":1767807768},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807768}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1767807768565-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767807768,"finish":1767807768},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1767807768}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1767807768556-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767807768,"finish":1767807768},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807768}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1767807768149-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767807768,"finish":1767807768},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807768}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1767807768565-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767807768,"finish":1767807768},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1767807768}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1767807768556-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767807768,"finish":1767807768},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root" ... ARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:58.321139Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:58.453851Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1767807778.711422 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.711602 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.723219 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.723385 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.734138 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.734262 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.741368 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.741520 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-07T17:42:58.768823Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:58.814227Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1767807778.874125 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.874248 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-07T17:42:58.886667Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1767807778.945091 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807778.945232 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-07T17:42:58.957289Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1767807779.031268 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807779.031399 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807779.049650 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807779.049810 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-07T17:42:59.122703Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-07T17:42:59.175350Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-01-07T17:42:59.175384Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-01-07T17:42:59.175397Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2026-01-07T17:42:59.175410Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-01-07T17:42:59.175426Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2026-01-07T17:42:59.176045Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found E0000 00:00:1767807779.218568 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807779.218697 273954 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-07T17:43:02.075030Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592676610873640886:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:02.075671Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:02.103854Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:02.212594Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:02.231094Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:02.231191Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:02.270458Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:02.319791Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:02.319819Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:02.319829Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:02.319956Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:02.362224Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:02.546282Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:02.606402Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:02.868856Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) E0000 00:00:1767807783.035124 275270 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807783.049714 275270 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807783.059001 275270 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807783.070274 275270 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767807783.078739 275270 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-07T17:43:03.081595Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::UnsupportedCommandError [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:26:2057] recipient: [1:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:26:2057] recipient: [1:23:2068] Leader for TabletID 72057594037932033 is [1:28:2070] sender: [1:29:2057] recipient: [1:23:2068] 2026-01-07T17:43:09.490397Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:09.491149Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:09.491402Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:43:09.491858Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:09.493784Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:43:09.493962Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:09.493978Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:09.494105Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:43:09.500834Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:43:09.500946Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:43:09.501057Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:43:09.501122Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:09.501176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:09.501217Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:28:2070] sender: [1:49:2057] recipient: [1:11:2058] 2026-01-07T17:43:09.512227Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:43:09.512354Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:09.533379Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:09.533474Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:09.533539Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:09.533585Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:09.533664Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:09.533701Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:09.533721Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:09.533776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:09.544312Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:09.544458Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:09.555002Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:09.555120Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:43:09.556076Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:43:09.556107Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:43:09.556248Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:43:09.556281Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:43:09.566840Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateSettings { PDiskSpaceColorBorder: CYAN } } } Status { Success: true } Success: true 2026-01-07T17:43:09.578163Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateSettings { PDiskSpaceColorBorder: YELLOW } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-07T17:43:09.599736Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } Status { Success: true BaseConfig { Node { NodeId: 1 PhysicalLocation: "\n\001\000\000\000\024\001\000\000\000\036\001\000\000\000(\001\000\000\000" HostKey { Fqdn: "::1" IcPort: 12001 } Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Settings { DefaultMaxSlots: 16 EnableSelfHeal: true EnableDonorMode: true ScrubPeriodicitySeconds: 2592000 PDiskSpaceMarginPromille: 150 GroupReserveMin: 0 GroupReservePartPPM: 0 MaxScrubbedDisksAtOnce: 4294967295 PDiskSpaceColorBorder: CYAN EnableGroupLayoutSanitizer: false AllowMultipleRealmsOccupation: true UseSelfHealLocalPolicy: false TryToRelocateBrokenDisksLocallyFirst: false } } } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [2:26:2057] recipient: [2:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [2:26:2057] recipient: [2:23:2068] Leader for TabletID 72057594037932033 is [2:28:2070] sender: [2:29:2057] recipient: [2:23:2068] 2026-01-07T17:43:09.747814Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:09.748401Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:09.748556Z node 2 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:43:09.748827Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:09.749655Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:43:09.749784Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:09.749799Z node 2 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:09.749921Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:43:09.756491Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:43:09.756583Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:43:09.756661Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:43:09.756722Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:09.756796Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:09.756843Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [2:28:2070] sender: [2:49:2057] recipient: [2:11:2058] 2026-01-07T17:43:09.767940Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:43:09.768163Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:09.789077Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:09.789188Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:09.789240Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:09.789332Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:09.789418Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:09.789456Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:09.789486Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:09.789519Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:09.800012Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:09.800113Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:09.810592Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEna ... 16 EnableSelfHeal: true EnableDonorMode: true ScrubPeriodicitySeconds: 2592000 PDiskSpaceMarginPromille: 150 GroupReserveMin: 0 GroupReservePartPPM: 0 MaxScrubbedDisksAtOnce: 4294967295 PDiskSpaceColorBorder: CYAN EnableGroupLayoutSanitizer: false AllowMultipleRealmsOccupation: true UseSelfHealLocalPolicy: false TryToRelocateBrokenDisksLocallyFirst: false } } } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [3:26:2057] recipient: [3:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [3:26:2057] recipient: [3:24:2068] Leader for TabletID 72057594037932033 is [3:28:2070] sender: [3:29:2057] recipient: [3:24:2068] 2026-01-07T17:43:10.009580Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:10.010431Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:10.010645Z node 3 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:43:10.011789Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:10.012029Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:43:10.012180Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:10.012200Z node 3 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:10.012331Z node 3 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:43:10.018537Z node 3 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:43:10.018611Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:43:10.018681Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:43:10.018746Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:10.018800Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:10.018836Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [3:28:2070] sender: [3:49:2057] recipient: [3:11:2058] 2026-01-07T17:43:10.029894Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:43:10.030044Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:10.051146Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:10.051261Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:10.051340Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:10.051411Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:10.051506Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:10.051551Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:10.051589Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:10.051659Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:10.062192Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:10.062270Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:10.072754Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:10.072921Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:43:10.074221Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:43:10.074272Z node 3 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:43:10.074439Z node 3 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:43:10.074498Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:43:10.075046Z node 3 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { } } Status { ErrorDescription: "unsupported command 0" } ErrorDescription: "unsupported command 0" Leader for TabletID 72057594037932033 is [0:0:0] sender: [4:26:2057] recipient: [4:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [4:26:2057] recipient: [4:24:2068] Leader for TabletID 72057594037932033 is [4:28:2070] sender: [4:29:2057] recipient: [4:24:2068] 2026-01-07T17:43:10.290078Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:10.291099Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:10.291356Z node 4 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:43:10.292614Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:10.293131Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:43:10.293291Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:10.293319Z node 4 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:10.293511Z node 4 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:43:10.303446Z node 4 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:43:10.303598Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:43:10.303731Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:43:10.303869Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:10.303981Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:10.304096Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [4:28:2070] sender: [4:49:2057] recipient: [4:11:2058] 2026-01-07T17:43:10.315580Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:43:10.315746Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:10.337232Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:10.337394Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:10.337481Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:10.337572Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:10.337700Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:10.337770Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:10.337811Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:10.337858Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:10.348708Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:10.348851Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:10.359694Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:10.359850Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:43:10.361232Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:43:10.361284Z node 4 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:43:10.361499Z node 4 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:43:10.361550Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:43:10.362214Z node 4 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { } } Status { ErrorDescription: "unsupported command 0" } ErrorDescription: "unsupported command 0" |89.9%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:23.392239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:23.392299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:23.392325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:23.392351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:23.392396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:23.392421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:23.392472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:23.392525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:23.393147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:23.393360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:23.450778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:23.450827Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:23.460608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:23.460822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:23.460967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:23.465281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:23.465514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:23.465947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:23.466130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:23.467917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:23.468052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:23.468950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:23.468991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:23.469083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:23.469114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:23.469142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:23.469283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:23.560781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.561943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.562018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.562100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.562139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.562173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:23.562208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944 2026-01-07T17:43:24.239440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-07T17:43:24.239572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-07T17:43:24.239720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:24.248173Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:818:2786], attempt# 0 2026-01-07T17:43:24.269828Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:818:2786], sender# [1:817:2785] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-07T17:43:24.274136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:24464 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F5F56AE7-C92E-45FB-BC1D-B0620D771970 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: 2026-01-07T17:43:24.274215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-07T17:43:24.274569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:24.274617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:43:24.275091Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:818:2786], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } 2026-01-07T17:43:24.277051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:24.277134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:43:24.277959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:24.278084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:24.278129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:43:24.278192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:43:24.278253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:43:24.278361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:24464 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 002FFAD3-49A8-4A96-8247-C3CE77B65F59 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-07T17:43:24.279883Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:818:2786], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-07T17:43:24.280047Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:817:2785] 2026-01-07T17:43:24.280191Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:818:2786], sender# [1:817:2785], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-01-07T17:43:24.282382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:24464 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A5B66BDB-E03D-4C29-BF9A-F83490E3D19B amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2026-01-07T17:43:24.282976Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:818:2786], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2026-01-07T17:43:24.283032Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:818:2786], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-07T17:43:24.283180Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:817:2785], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:43:24.292721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:24.292803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:43:24.292987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:24.293108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 712 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:24.293194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:24.293234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:24.293294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:43:24.293339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:43:24.293499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:24.295238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:24.295409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:24.295471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:43:24.295582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:24.295618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:24.295668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:24.295702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:24.295738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:43:24.295795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:738:2715] message: TxId: 102 2026-01-07T17:43:24.295853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:24.295889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:43:24.295918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:43:24.296030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:24.297984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:43:24.298034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:803:2772] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:45.863350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:46.127296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:46.162200Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:46.162821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:46.162913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:47.094320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:47.094519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:47.300251Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807761813843 != 1767807761813847 2026-01-07T17:42:47.360444Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:47.466629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:47.624167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:48.021997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:48.036917Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:48.155301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:48.198680Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:42:48.199945Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:42:48.200264Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2768] 2026-01-07T17:42:48.200527Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:42:48.251842Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:891:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:42:48.252325Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:879:2760], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:42:48.253581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:42:48.253830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:42:48.255608Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:42:48.255687Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:42:48.255757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:42:48.256116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:42:48.256231Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:879:2760], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:42:48.256513Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:895:2770] 2026-01-07T17:42:48.256728Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:42:48.266852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:42:48.266955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:922:2768] in generation 1 2026-01-07T17:42:48.267174Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:879:2760], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:42:48.267868Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:42:48.267979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:42:48.269623Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:42:48.269704Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:42:48.269752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:42:48.270076Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:42:48.270191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:42:48.270263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-07T17:42:48.281793Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:42:48.318209Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:42:48.318456Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:42:48.318579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-07T17:42:48.318619Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:42:48.318654Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:42:48.318691Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:42:48.319025Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:891:2768], Recipient [1:891:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:42:48.319076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:42:48.319184Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:42:48.319238Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:42:48.319299Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:42:48.319359Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-07T17:42:48.319384Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:42:48.319407Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:42:48.319429Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:42:48.319754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:895:2770], Recipient [1:895:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:42:48.319794Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:42:48.319982Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:42:48.320092Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:42:48.320276Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:42:48.320351Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:42:48.320416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:42:48.320455Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:42:48.320523Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:42:48.320556Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:42:48.320602Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:42:48.320657Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:42:48.320726Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:42:48.320845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2771], Recipient [1:891:2768]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:42:48.322082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:42:48.322173Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:885:2764], serverId# [1:896:2771], sessionId# [0:0:0] 2026-01-07T17:42:48.322245Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:42:48.322280Z node 1 :TX_DAT ... 24037888 is Executed 2026-01-07T17:42:57.590058Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:42:57.590090Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037888 has finished 2026-01-07T17:42:57.600970Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-07T17:42:57.601058Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:42:57.601101Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:42:57.601145Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 1 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T17:42:57.601269Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:42:57.601429Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:1152:2950], Recipient [2:894:2770]: {TEvReadSet step# 3500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-07T17:42:57.601478Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:42:57.601520Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-01-07T17:42:57.771117Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1194:2976], Recipient [2:1152:2950]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T17:42:57.771322Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:42:57.771416Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2026-01-07T17:42:57.771527Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:42:57.771576Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:42:57.771617Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:42:57.771677Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:42:57.771731Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-07T17:42:57.771800Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:42:57.771850Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:42:57.771878Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:42:57.771903Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:42:57.772029Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T17:42:57.772328Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-07T17:42:57.772412Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1194:2976], 0} after executionsCount# 1 2026-01-07T17:42:57.772475Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1194:2976], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:42:57.772569Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1194:2976], 0} finished in read 2026-01-07T17:42:57.772651Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:42:57.772682Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:42:57.772719Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:42:57.772753Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:42:57.772801Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T17:42:57.772848Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:42:57.772876Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-07T17:42:57.772946Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:42:57.773071Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:42:57.773606Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1194:2976], Recipient [2:1152:2950]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:42:57.773671Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-07T17:42:57.773883Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1194:2976], Recipient [2:894:2770]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-07T17:42:57.774023Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-07T17:42:57.774082Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2026-01-07T17:42:57.774140Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-07T17:42:57.774167Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2026-01-07T17:42:57.774192Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-07T17:42:57.774240Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-07T17:42:57.774285Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037889 2026-01-07T17:42:57.774336Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-07T17:42:57.774405Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-07T17:42:57.774431Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2026-01-07T17:42:57.774489Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2026-01-07T17:42:57.774592Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-07T17:42:57.774806Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-07T17:42:57.774850Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[2:1194:2976], 1} after executionsCount# 1 2026-01-07T17:42:57.774890Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[2:1194:2976], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:42:57.774957Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[2:1194:2976], 1} finished in read 2026-01-07T17:42:57.775001Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-07T17:42:57.775025Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2026-01-07T17:42:57.775049Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:42:57.775074Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:42:57.775115Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-07T17:42:57.775223Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:42:57.775255Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037889 has finished 2026-01-07T17:42:57.775282Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-07T17:42:57.775360Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-07T17:42:57.776554Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1194:2976], Recipient [2:894:2770]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-07T17:42:57.776631Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2026-01-07T17:42:37.436991Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008155s 2026-01-07T17:42:37.498384Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676502061805586:2160];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:37.498566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:38.021656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:38.051404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:38.051514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:38.115268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:38.276397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:38.299292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:38.545174Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:38.620458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:38.620491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:38.620499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:38.620581Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:38.902265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:39.004664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:39.214740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:39.462467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:43.212116Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676528268354902:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:43.216728Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:43.242894Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:43.376656Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:43.390491Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:43.390567Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:43.411898Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:43.535134Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:43.536639Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:43.536664Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:43.536679Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:43.536758Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:43.814440Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:43.979985Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:44.226253Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:44.270337Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:44.622047Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:42:44.735339Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } records { sequence_number: "67" shard_id: "shard-000000" } records { sequence_number: "68" shard_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } 2026-01-07T17:42:48.217056Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676528268354902:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:48.217124Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } 2026-01-07T17:42:52.961131Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592676567333214776:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:52.965040Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:52.977406Z node 7 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.022004s 2026-01-07T17:42:52.997353Z node 7 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.019844s 2026-01-07T17:42:53.200557Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:53.460454Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:53.511998Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:53.523707Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:53.523811Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:53.542020Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:53.692406Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:53.692438Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:53.692444Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:53.692522Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:54.013034Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:54.014117Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:54.078119Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:54.101862Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:54.410338Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: 2026-01-07T17:38:42.837617Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:38:42.998648Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:38:42.998757Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:38:42.998821Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:38:42.998885Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:38:43.024672Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:38:43.050895Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:38:43.052179Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:38:43.054140Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:38:43.056170Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-01-07T17:38:43.058013Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-01-07T17:38:43.071673Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:38:43.072204Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|50de65e-c3074c86-a4747e6d-53ee4be7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 100 2026-01-07T17:38:43.087272Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:38:43.087744Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fae2af0-9abe6de8-cf4426d1-8417d01_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:38:43.113713Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:38:43.114204Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8fbe46b1-56162bc7-1cc8af6f-31811422_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-07T17:38:43.136291Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:38:43.136696Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8926ea63-5bc0e216-98013dd3-b8dd4aba_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-07T17:38:43.138417Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:38:43.141045Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d4db9ae6-5b88a5fb-aa416788-4144228a_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:38:43.181782Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:38:43.182289Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fa97bc78-f8984f7f-9e821407-9124defd_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-07T17:38:43.717988Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2026-01-07T17:38:43.778155Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:38:43.778230Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:38:43.778285Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:38:43.778350Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:182:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:187:2057] recipient: [2:186:2195] Leader for TabletID 72057594037927937 is [2:188:2196] sender: [2:189:2057] recipient: [2:186:2195] 2026-01-07T17:38:43.825931Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:38:43.826020Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:38:43.826072Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:38:43.826130Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:112:2142]) rebooted! 2026-01-07T17:38:43.836736Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 !Reboot 72057594037927937 (actor [2:112:2142]) tablet resolver refreshed! new actor is[2:188:2196] 2026-01-07T17:38:43.905216Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:43.941237Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:43.951881Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:43.983130Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:44.041359Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:44.075786Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:44.223008Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:44.233775Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:44.545306Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:44.633614Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:45.027412Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:45.194242Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:38:45.392052Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:188:2196] sender: [2:268:2057] recipient: [2:14:2061] 2026-01-07T17:38:45.527599Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:38:45.528659Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-07T17:38:45.529861Z node 2 :PERSQUEUE INFO: partition_init ... 2026-01-07T17:42:55.458801Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:589:2576] connected; active server actors: 1 2026-01-07T17:42:55.462167Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:594:2581] connected; active server actors: 1 2026-01-07T17:42:55.465402Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:599:2586] connected; active server actors: 1 2026-01-07T17:42:55.468677Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:604:2591] connected; active server actors: 1 2026-01-07T17:42:55.471682Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:609:2596] connected; active server actors: 1 2026-01-07T17:42:55.475211Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:614:2601] connected; active server actors: 1 2026-01-07T17:42:55.478421Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:619:2606] connected; active server actors: 1 2026-01-07T17:42:55.481705Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:624:2611] connected; active server actors: 1 2026-01-07T17:42:55.484723Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:629:2616] connected; active server actors: 1 2026-01-07T17:42:55.487726Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:634:2621] connected; active server actors: 1 2026-01-07T17:42:55.490861Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:639:2626] connected; active server actors: 1 2026-01-07T17:42:55.494082Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:644:2631] connected; active server actors: 1 2026-01-07T17:42:55.497619Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:649:2636] connected; active server actors: 1 2026-01-07T17:42:55.500876Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:654:2641] connected; active server actors: 1 2026-01-07T17:42:55.504003Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:659:2646] connected; active server actors: 1 2026-01-07T17:42:55.509134Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:664:2651] connected; active server actors: 1 2026-01-07T17:42:55.512837Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:669:2656] connected; active server actors: 1 2026-01-07T17:42:55.518082Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:674:2661] connected; active server actors: 1 2026-01-07T17:42:55.522205Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:679:2666] connected; active server actors: 1 2026-01-07T17:42:55.526154Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:684:2671] connected; active server actors: 1 2026-01-07T17:42:55.529511Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:689:2676] connected; active server actors: 1 2026-01-07T17:42:55.532698Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:694:2681] connected; active server actors: 1 2026-01-07T17:42:55.536115Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:699:2686] connected; active server actors: 1 2026-01-07T17:42:55.539274Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:704:2691] connected; active server actors: 1 2026-01-07T17:42:55.542729Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:709:2696] connected; active server actors: 1 2026-01-07T17:42:55.546016Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:714:2701] connected; active server actors: 1 2026-01-07T17:42:55.549207Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:719:2706] connected; active server actors: 1 2026-01-07T17:42:55.552171Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:724:2711] connected; active server actors: 1 2026-01-07T17:42:55.558659Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:729:2716] connected; active server actors: 1 2026-01-07T17:42:55.562484Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:734:2721] connected; active server actors: 1 2026-01-07T17:42:55.566299Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:739:2726] connected; active server actors: 1 2026-01-07T17:42:55.569787Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:744:2731] connected; active server actors: 1 2026-01-07T17:42:55.576081Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:749:2736] connected; active server actors: 1 2026-01-07T17:42:55.590115Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:754:2741], now have 1 active actors on pipe 2026-01-07T17:42:55.602626Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:757:2744], now have 1 active actors on pipe 2026-01-07T17:42:55.604280Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:760:2747], now have 1 active actors on pipe 2026-01-07T17:42:55.610075Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:763:2750] connected; active server actors: 1 2026-01-07T17:42:56.547187Z node 157 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 157 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:42:56.615084Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:42:56.615184Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:42:56.615275Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:42:56.615375Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:42:56.695703Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:42:56.695819Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:42:56.695906Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:42:56.696029Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:42:56.696540Z node 157 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 157 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:42:56.701392Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:42:56.703202Z node 157 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 163 actor [157:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 163 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 163 } Consumers { Name: "aaa" Generation: 163 Important: true } 2026-01-07T17:42:56.704590Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [157:248:2196] 2026-01-07T17:42:56.706241Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 3 [157:248:2196] 2026-01-07T17:42:56.708018Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [157:250:2196] 2026-01-07T17:42:56.709053Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 3 [157:250:2196] 2026-01-07T17:42:56.750682Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:42:56.750796Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:42:56.752415Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:42:56.752494Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:42:56.753594Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [157:326:2254] 2026-01-07T17:42:56.755196Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [157:328:2254] 2026-01-07T17:42:56.761501Z node 157 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:42:56.762030Z node 157 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:42:56.762183Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 4 [157:326:2254] 2026-01-07T17:42:56.762942Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 4 [157:328:2254] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2026-01-07T17:42:45.236945Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676540411345269:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:45.236998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:46.253168Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:46.372812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:46.372962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:46.455997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:46.456112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:46.500360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:46.692717Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:46.774866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:46.828534Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007068s 2026-01-07T17:42:46.853010Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.029346s 2026-01-07T17:42:46.865002Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.010486s 2026-01-07T17:42:47.042556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:47.042592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:47.042599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:47.042697Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:47.396477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:47.569999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:47.838871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:48.230229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } 2026-01-07T17:42:50.238572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676540411345269:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:50.238633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000007" } records { sequence_number: "10" shard_id: "shard-000007" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000007" } records { sequence_number: "18" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000005" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "17" shard_id: "shard-000009" } records { sequence_number: "7" shard_id: "shard-000008" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000006" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_n ... ":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787017-105","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1767807787,"finish":1767807787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787049-106","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":1},"usage":{"quantity":0,"unit":"second","start":1767807787,"finish":1767807787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787049-107","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1767807787,"finish":1767807787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787074-108","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":1},"usage":{"quantity":0,"unit":"second","start":1767807787,"finish":1767807787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787074-109","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1767807787,"finish":1767807787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787100-110","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1767807787,"finish":1767807787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787100-111","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1767807787,"finish":1767807787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767807787130-112","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767807787,"finish":1767807788},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807788}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787130-113","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767807787,"finish":1767807788},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807788}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807787130-114","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767807787,"finish":1767807788},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807788}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767807788168-115","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767807788,"finish":1767807789},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807789}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807788168-116","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767807788,"finish":1767807789},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807789}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807788168-117","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767807788,"finish":1767807789},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807789}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767807789182-118","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767807789,"finish":1767807790},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807790}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807789182-119","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767807789,"finish":1767807790},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807790}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807789182-120","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767807789,"finish":1767807790},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807790}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767807790197-121","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767807790,"finish":1767807791},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807791}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807790197-122","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767807790,"finish":1767807791},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807791}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807790197-123","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767807790,"finish":1767807791},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807791}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767807791214-124","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767807791,"finish":1767807792},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807792}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807791214-125","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767807791,"finish":1767807792},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807792}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767807791214-126","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767807791,"finish":1767807792},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767807792}' |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:12.198892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:12.310210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:12.317959Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:12.318321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:12.318483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:12.705724Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:12.799858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:12.800024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:12.835039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:12.924351Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:13.645725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:13.646537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:13.646591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:13.646621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:13.647051Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:13.713360Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:14.252393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:17.042537Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:17.048508Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:17.051904Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:17.077007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:17.077116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:17.104558Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:17.106671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:17.278463Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:17.278562Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:17.279785Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.280262Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.280841Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.281453Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.281869Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.282008Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.282124Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.282306Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.282381Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:17.297100Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:17.458495Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:17.526796Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:17.526910Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:17.551341Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:17.552610Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:17.552773Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:17.552832Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:17.552874Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:17.552912Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:17.552982Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:17.553037Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:17.554304Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:17.572382Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:17.572473Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2122:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:17.593033Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2170:2626] 2026-01-07T17:42:17.593945Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2170:2626], schemeshard id = 72075186224037897 2026-01-07T17:42:17.648947Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2654] 2026-01-07T17:42:17.652240Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:17.669397Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. Describe result: PathErrorUnknown 2026-01-07T17:42:17.669473Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. Creating table 2026-01-07T17:42:17.669569Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:17.687180Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2319:2699], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:17.696808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:17.721671Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:17.721813Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:17.740697Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:17.908306Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:17.921380Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:18.188996Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:18.309721Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:18.309827Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2235:2659] Owner: [2:2234:2658]. Column diff is empty, finishing 2026-01-07T17:42:18.979726Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:42:53.568274Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5438:4704], schemeshard count = 1 2026-01-07T17:42:54.789877Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-07T17:42:54.789952Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-07T17:42:54.789991Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:42:54.800217Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:42:54.834908Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:42:54.835525Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:42:54.835623Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:42:54.836662Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:42:54.852358Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:42:54.852681Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:42:54.854327Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5518:4747], server id = [2:5522:4751], tablet id = 72075186224037899, status = OK 2026-01-07T17:42:54.854724Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5518:4747], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:42:54.856301Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5519:4748], server id = [2:5523:4752], tablet id = 72075186224037900, status = OK 2026-01-07T17:42:54.856403Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5519:4748], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:42:54.856692Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5520:4749], server id = [2:5524:4753], tablet id = 72075186224037901, status = OK 2026-01-07T17:42:54.856759Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5520:4749], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:42:54.858177Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5521:4750], server id = [2:5525:4754], tablet id = 72075186224037902, status = OK 2026-01-07T17:42:54.858250Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5521:4750], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:42:54.866955Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:42:54.867499Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5518:4747], server id = [2:5522:4751], tablet id = 72075186224037899 2026-01-07T17:42:54.867557Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:42:54.869514Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:42:54.870695Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5519:4748], server id = [2:5523:4752], tablet id = 72075186224037900 2026-01-07T17:42:54.870725Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:42:54.870872Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:42:54.871127Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5520:4749], server id = [2:5524:4753], tablet id = 72075186224037901 2026-01-07T17:42:54.871149Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:42:54.871320Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:42:54.871380Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:42:54.871549Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:42:54.871715Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:42:54.872198Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5538:4763], ActorId: [2:5539:4764], Starting query actor #1 [2:5540:4765] 2026-01-07T17:42:54.872269Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5539:4764], ActorId: [2:5540:4765], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:42:54.875001Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5521:4750], server id = [2:5525:4754], tablet id = 72075186224037902 2026-01-07T17:42:54.875042Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:42:54.875787Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5539:4764], ActorId: [2:5540:4765], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZWQ1ODg0MDctYWM4ZDgzNWYtZWQ5YjI2MWUtNjZmMTBiYjE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:42:54.991100Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5549:4774]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:54.991384Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:54.991431Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5549:4774], StatRequests.size() = 1 2026-01-07T17:42:55.141612Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5539:4764], ActorId: [2:5540:4765], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWQ1ODg0MDctYWM4ZDgzNWYtZWQ5YjI2MWUtNjZmMTBiYjE=, TxId: 2026-01-07T17:42:55.141699Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5539:4764], ActorId: [2:5540:4765], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWQ1ODg0MDctYWM4ZDgzNWYtZWQ5YjI2MWUtNjZmMTBiYjE=, TxId: 2026-01-07T17:42:55.142099Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5538:4763], ActorId: [2:5539:4764], Got response [2:5540:4765] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:42:55.142656Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:42:55.144427Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2835:3036];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2026-01-07T17:42:55.179118Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:42:55.179200Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:42:55.324425Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:5569:4784];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; 2026-01-07T17:42:55.336939Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } UpsertIndexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Options { SchemeNeedActualization: false } ; 2026-01-07T17:42:55.337507Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2026-01-07T17:42:55.337886Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 DefaultCompression { } UpsertIndexes { Id: 4 Name: "cms_value" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 2 } } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2026-01-07T17:42:55.645946Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5687:4877]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:55.646297Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:42:55.646346Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:42:55.646565Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:42:55.646602Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:42:55.646657Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:42:55.649715Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> SplitterBasic::LimitExceed [GOOD] >> Cache::Test4 [GOOD] >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] >> EntityId::CheckId [GOOD] >> Cache::Test5 >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> EntityId::Order |89.9%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> ColumnShardTiers::TieringUsage >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test3 [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> Cache::Test5 [GOOD] >> Cache::Test6 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2026-01-07T17:42:46.991920Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.021863s 2026-01-07T17:42:47.001711Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676548784653512:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:47.001768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:47.832990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:47.884036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:47.884219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:47.920379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:48.110905Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:48.259042Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:48.292438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:48.309245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:48.309263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:48.309271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:48.309353Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:48.779709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:48.887110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:49.277793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:49.802435Z node 1 :PERSQUEUE ERROR: partition_read.cpp:831: [72075186224037888][Partition][0][StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2026-01-07T17:42:49.802501Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'stream_TestGetRecordsStreamWithSingleShard' partition: 0 messageNo: 0 requestId: error: trying to read from future. ReadOffset 100000, 0 EndOffset 30 2026-01-07T17:42:54.219535Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676578213604838:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:54.220012Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:54.244703Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:54.473026Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:54.526670Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:54.566303Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:54.566387Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:54.623170Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:54.793562Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:54.833927Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:54.833953Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:54.833960Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:54.834046Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:55.122338Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:55.198186Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:42:55.242770Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:55.622097Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:42:59.218795Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592676578213604838:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:59.218888Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:43:09.463769Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:43:09.463795Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:21.940479Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592676694487610200:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:21.940517Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:21.954029Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:22.031220Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:22.050523Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:22.050600Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:22.080911Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:22.119032Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:22.119049Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:22.119062Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:22.119125Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:22.235729Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:22.285130Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:22.366253Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:22.557129Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:43:25.573283Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592676711833510973:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:25.573344Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:25.593072Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:25.719931Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:25.735916Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:25.736012Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:25.767049Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:25.821270Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:25.821301Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:25.821310Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:25.821414Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:25.885709Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:26.081731Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:26.141837Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:26.357326Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:43:26.373658Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 >> TestProgram::Like >> TestProgram::JsonExistsBinary >> TestProgram::NumRowsWithNulls >> TestProgram::NumRowsWithNulls [GOOD] >> TestProgram::Like [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |90.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N5(8):{\"a\":true,\"p\":{\"function\":{\"function\":\"NumRows\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N6(8):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N6[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N6->N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":6}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]},{"owner_id":6,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"10002","t":"Projection"},"w":8,"id":5},"4":{"p":{"a":true,"p":{"function":{"function":"NumRows","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10002","t":"Calculation"},"w":8,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Filter; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N6(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N4(15):{\"i\":\"7,16\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N10 -> N3[label="1"]; N4[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N7(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N5(23):{\"i\":\"17\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N8(23):{\"i\":\"18\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N9(54):{\"i\":\"19,20\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N10(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N10[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N1->N10->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[{"from":10}]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]},{"owner_id":10,"inputs":[]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"10":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":10},"6":{"p":{"i":"17","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:31.513991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:31.621863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:31.630437Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:31.630845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:31.631000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:32.018035Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:32.111283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:32.111389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:32.145411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:32.223250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:32.927037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:32.928411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:32.928466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:32.928519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:32.928767Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:33.005309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:33.578934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:36.681009Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:36.688373Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:36.692331Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:36.730767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:36.730893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:36.772992Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:36.774824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:36.986715Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:36.986853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:36.988466Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:36.994364Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:36.995180Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:36.997531Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:36.997681Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:36.998039Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:36.998153Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:36.998296Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:36.998520Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.015662Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:37.259384Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:37.337734Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:37.337830Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:37.381097Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:37.382507Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:37.382809Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:37.382874Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:37.382924Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:37.382984Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:37.383048Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:37.383099Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:37.383784Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:37.386285Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:42:37.389164Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:37.400690Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:42:37.400762Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:42:37.400854Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:37.408475Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:37.408574Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:37.441074Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:42:37.441567Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:42:37.445685Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:37.488305Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:37.510956Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:37.511103Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:37.527487Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:37.722070Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:37.914012Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:38.348301Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:38.474647Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:38.474742Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:42:39.269038Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... UtZmEwN2QxYzMtZWFhMmI3YjAtNjhlOTU0Mzk=, TxId: 2026-01-07T17:43:19.373820Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4555:3931], ActorId: [2:4558:3933], Got response [2:4561:3935] SUCCESS 2026-01-07T17:43:19.374044Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:19.398431Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:19.398506Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3344:3481] 2026-01-07T17:43:19.451467Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:43:19.451564Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:43:19.494006Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4384:3799], schemeshard count = 1 2026-01-07T17:43:20.326005Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:43:20.328498Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4628:3975], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:43:20.330587Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4628:3975], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2` 2026-01-07T17:43:20.330679Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4628:3975], Start read next stream part 2026-01-07T17:43:20.358122Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4645:3985]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:20.358312Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:43:20.358351Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4645:3985], StatRequests.size() = 1 2026-01-07T17:43:25.846667Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4628:3975], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:43:25.846811Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4628:3975], Start read next stream part 2026-01-07T17:43:25.847000Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4721:4043], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:43:25.848904Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 281474976720666] shutting down 2026-01-07T17:43:25.849184Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4628:3975], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:43:25.849220Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4628:3975], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTdhZDI4MWYtNGY3ZmYyM2MtNGM1ZjdjNDQtOGM3ZTk1ZDM=, TxId: 2026-01-07T17:43:25.849305Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4721:4043], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2` 2026-01-07T17:43:25.849372Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4721:4043], Start read next stream part 2026-01-07T17:43:25.890576Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4739:4058]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:25.890804Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-07T17:43:25.890839Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4739:4058], StatRequests.size() = 1 2026-01-07T17:43:29.576827Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4721:4043], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:43:29.576981Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4721:4043], Start read next stream part 2026-01-07T17:43:29.577498Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33110, txId: 281474976720669] shutting down 2026-01-07T17:43:29.577820Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4721:4043], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:43:29.577872Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4618:2463], ActorId: [2:4721:4043], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGE2OTY0M2MtZjUzODc0MzQtNDBmM2ZkMTMtZDVkYWQ3YjE=, TxId: 2026-01-07T17:43:29.578247Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4803:4106], ActorId: [2:4807:4109], Starting query actor #1 [2:4808:4110] 2026-01-07T17:43:29.578302Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4807:4109], ActorId: [2:4808:4110], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:29.582163Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4807:4109], ActorId: [2:4808:4110], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OTM1MTlhNDItODI4ZGFlNTMtZmUzNGQ0MzktZTdlZTE5MTU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:29.632453Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4807:4109], ActorId: [2:4808:4110], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTM1MTlhNDItODI4ZGFlNTMtZmUzNGQ0MzktZTdlZTE5MTU=, TxId: 2026-01-07T17:43:29.632535Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4807:4109], ActorId: [2:4808:4110], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTM1MTlhNDItODI4ZGFlNTMtZmUzNGQ0MzktZTdlZTE5MTU=, TxId: 2026-01-07T17:43:29.632829Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4803:4106], ActorId: [2:4807:4109], Got response [2:4808:4110] SUCCESS 2026-01-07T17:43:29.633177Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:29.647931Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2026-01-07T17:43:29.648010Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3344:3481] 2026-01-07T17:43:29.648695Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:4835:4128]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:43:29.649052Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-07T17:43:29.649113Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:29.649397Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-07T17:43:29.649450Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 6 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:29.649513Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:29.649778Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:43:29.654817Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-07T17:43:29.656293Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 6 2026-01-07T17:43:29.656386Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 6 2026-01-07T17:43:29.657070Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 7 ], ReplyToActorId[ [2:4849:4142]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T17:43:29.657411Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2026-01-07T17:43:29.657468Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:43:29.657702Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2026-01-07T17:43:29.657753Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 7 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:43:29.657832Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:43:29.658101Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:43:29.660492Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-07T17:43:29.660835Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 7 2026-01-07T17:43:29.662788Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 7 >> TestProgram::YqlKernelStartsWith |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging >> TestProgram::YqlKernelStartsWith [GOOD] >> TestScript::StepMerging [GOOD] >> TestProgram::YqlKernelContains |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TestProgram::YqlKernelContains [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> Cache::Test6 [GOOD] >> TestProgram::YqlKernelStartsWithScalar >> TContinuousBackupTests::Basic |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> IncrementalBackup::MultiBackup >> TContinuousBackupTests::TakeIncrementalBackup >> TestProgram::YqlKernelStartsWithScalar [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; >> IncrementalBackup::E2EBackupCollection >> IncrementalBackup::BackupRestore |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> IncrementalBackup::SimpleBackup >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange >> TestProgram::JsonValueBinary |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test6 [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |90.0%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] >> TestProgram::JsonValueBinary [GOOD] >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TestProgram::YqlKernel >> TContinuousBackupTests::Basic [GOOD] >> TestProgram::YqlKernelEndsWithScalar >> TestProgram::JsonExists >> TestProgram::YqlKernelEquals >> TestProgram::YqlKernelEndsWith >> TestProgram::CountUIDByVAT [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> TestProgram::YqlKernel [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] >> TestProgram::JsonExists [GOOD] >> TestProgram::YqlKernelEquals [GOOD] >> TestProgram::YqlKernelEndsWith [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:33.038880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:33.038998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:33.039053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:33.039089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:33.039149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:33.039181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:33.039231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:33.039298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:33.040149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:33.040460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:33.128614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:33.128684Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:33.143697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:33.144028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:33.144269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:33.151355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:33.151703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:33.152623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:33.152906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:33.156116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:33.156305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:33.157498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:33.157566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:33.157688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:33.157737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:33.157776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:33.157951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:33.302956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.303917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.304994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... RD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 964 } } CommitVersion { Step: 5000041 TxId: 104 } 2026-01-07T17:43:34.418034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-07T17:43:34.418308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 964 } } CommitVersion { Step: 5000041 TxId: 104 } 2026-01-07T17:43:34.418438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 964 } } CommitVersion { Step: 5000041 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:43:34.419316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-07T17:43:34.419365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-07T17:43:34.419534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-07T17:43:34.419603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:43:34.419725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-07T17:43:34.419800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:34.419852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:43:34.419894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:43:34.419942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-07T17:43:34.422724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:43:34.423011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:43:34.423296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:43:34.423350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:43:34.423474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-07T17:43:34.423513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:43:34.423554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-07T17:43:34.423605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:43:34.423646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-07T17:43:34.423706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 104 2026-01-07T17:43:34.423776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-07T17:43:34.423819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:43:34.423857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:43:34.423982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:43:34.424036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-07T17:43:34.424060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-07T17:43:34.424089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:43:34.424113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-07T17:43:34.424158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-07T17:43:34.424245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:43:34.424637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:43:34.424688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:43:34.424758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:43:34.424800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:43:34.424845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:34.427502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:43:34.427550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1095:3007] 2026-01-07T17:43:34.427873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2026-01-07T17:43:34.428465Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:43:34.428734Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 301us result status StatusPathDoesNotExist 2026-01-07T17:43:34.428951Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:43:34.429538Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:43:34.429761Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 204us result status StatusPathDoesNotExist 2026-01-07T17:43:34.429929Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |90.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"2,4\",\"p\":{\"options\":[\"{10001(Count):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[4]},\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"a":true,"i":"2,4","p":{"options":["{10001(Count):[2]}"],"type":"AGGREGATION","keys":[4]},"o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"3,4\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\" ... ?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.1%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAuthorization |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> TestProgram::CountWithNulls |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TestProgram::CountWithNulls [GOOD] |90.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TestProgram::JsonValue [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"a\":true,\"i\":\"2\",\"p\":{\"function\":{\"function\":\"Count\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"a":true,"i":"2","p":{"function":{"function":"Count","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; >> ColumnShardTiers::DSConfigs |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000 ... 04\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:33.253311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:33.253399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:33.253434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:33.253468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:33.253543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:33.253576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:33.253639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:33.253719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:33.254550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:33.254822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:33.328626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:33.328704Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:33.345575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:33.346014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:33.346252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:33.353363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:33.353722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:33.354437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:33.354708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:33.357658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:33.357848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:33.359056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:33.359124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:33.359257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:33.359311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:33.359351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:33.359539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:33.502826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.503803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.503923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.503985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:33.504835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard schema changes has been received, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-07T17:43:35.600551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710757:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:43:35.600584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710757:0 129 -> 240 2026-01-07T17:43:35.602310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-07T17:43:35.603029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-07T17:43:35.603255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-07T17:43:35.603293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2026-01-07T17:43:35.603378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-07T17:43:35.603410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-07T17:43:35.603440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-07T17:43:35.603466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-07T17:43:35.603495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2026-01-07T17:43:35.603578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1376:3195] message: TxId: 281474976710757 2026-01-07T17:43:35.603621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-07T17:43:35.603664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2026-01-07T17:43:35.603691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:0 2026-01-07T17:43:35.603780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:43:35.603843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2026-01-07T17:43:35.603866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:1 2026-01-07T17:43:35.603893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:43:35.603911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2026-01-07T17:43:35.603936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:2 2026-01-07T17:43:35.603991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:43:35.604279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:43:35.604326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:43:35.604388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:43:35.604423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:43:35.604446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:43:35.606539Z node 1 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2026-01-07T17:43:35.606673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:43:37.635475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:43:37.635867Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 433us result status StatusPathDoesNotExist 2026-01-07T17:43:37.636095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:43:37.636840Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:43:37.640360Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 3.36ms result status StatusPathDoesNotExist 2026-01-07T17:43:37.641142Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:43:37.643193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:43:37.644061Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 751us result status StatusSuccess 2026-01-07T17:43:37.644955Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::SimpleRestore |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] >> S3SettingsConversion::Port |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] >> AnalyzeColumnshard::AnalyzeServerless [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexBackupBackupCollection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2026-01-07T17:40:58.034855Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676077862864181:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:58.035095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:58.088268Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676077704818590:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:40:58.088461Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:40:58.278886Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:58.296819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:40:58.320493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:58.320626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:58.323938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:40:58.324014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:40:58.352670Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:40:58.352994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:58.353349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:40:58.431999Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:40:58.447774Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:58.501675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:40:58.501699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:40:58.501705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:40:58.501830Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:40:58.574536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:40:58.710959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:40:59.044422Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:40:59.094075Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:00.983922Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:41:00.984451Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:41:00.984476Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:41:00.984796Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7592676086294753376:2295], Start check tables existence, number paths: 2 2026-01-07T17:41:00.986378Z node 2 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 2 2026-01-07T17:41:00.990013Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7592676086294753376:2295], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:41:00.990108Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7592676086294753376:2295], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:41:00.990135Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7592676086294753376:2295], Successfully finished 2026-01-07T17:41:00.990179Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:41:01.079556Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:41:01.079838Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676090747767136:2326], Start check tables existence, number paths: 2 2026-01-07T17:41:01.081105Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:41:01.081134Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:41:01.081359Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676090747767136:2326], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:41:01.081413Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676090747767136:2326], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:41:01.081446Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592676090747767136:2326], Successfully finished 2026-01-07T17:41:01.081597Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:41:01.081741Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 2 2026-01-07T17:41:01.083351Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZDhjZmVkMjYtYmY3ODYyMDItYzdkYWU5ZmQtNmJlNGZhMjM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZDhjZmVkMjYtYmY3ODYyMDItYzdkYWU5ZmQtNmJlNGZhMjM= temp_dir_name# 5ac81754-427e-67f8-70ee-b5b65546d849 trace_id# 2026-01-07T17:41:01.083492Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZDhjZmVkMjYtYmY3ODYyMDItYzdkYWU5ZmQtNmJlNGZhMjM=, ActorId: [1:7592676090747767161:2329], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:01.099924Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676090747767163:2810], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:41:01.103434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:01.107026Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676090747767163:2810], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2026-01-07T17:41:01.107205Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676090747767163:2810], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:41:01.120824Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676090747767163:2810], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:41:01.212637Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676090747767163:2810], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:41:01.282654Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676090747767238:2866] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:01.282778Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676090747767163:2810], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:41:01.285759Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MTljOWIzMjQtOWIzODY2OGEtOTYyNTVkMGEtYzE3N2Q5NTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MTljOWIzMjQtOWIzODY2OGEtOTYyNTVkMGEtYzE3N2Q5NTc= temp_dir_name# 3b3fa8f2-4f6f-23e9-8369-b1bf278734b7 trace_id# 2026-01-07T17:41:01.286325Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:41:01.286346Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-07T17:41:01.286392Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-07T17:41:01.286423Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MTljOWIzMjQtOWIzODY2OGEtOTYyNTVkMGEtYzE3N2Q5NTc=, ActorId: [1:7592676090747767248:2330], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:41:01.286607Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=MTljOWIzMjQtOWIzODY2OGEtOTYyNTVkMGEtYzE3N2Q5NTc=, ActorId: [1: ... orkloadService] [TCleanupTablesActor] ActorId: [14:7592676767405479370:2321], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:43:38.289123Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7592676767405479370:2321], Successfully finished 2026-01-07T17:43:38.289193Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:43:38.289826Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg== temp_dir_name# 133a8b6e-48e4-a6f4-3ab7-11a4a36cbc97 trace_id# 2026-01-07T17:43:38.289945Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:43:38.293200Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7592676767405479400:2502], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:43:38.299867Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:38.301663Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7592676767405479400:2502], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-07T17:43:38.301932Z node 14 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7592676767405479400:2502], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-07T17:43:38.314849Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7592676767405479400:2502], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:43:38.411574Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7592676767405479400:2502], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-07T17:43:38.412538Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7592676745930642059:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:38.412644Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:43:38.416107Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7592676767405479451:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:38.416587Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7592676767405479400:2502], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-07T17:43:38.417016Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: Root, PoolId: sample_pool_id 2026-01-07T17:43:38.417048Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id Root 2026-01-07T17:43:38.417133Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592676767405479458:2327], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-07T17:43:38.419184Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592676767405479458:2327], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-07T17:43:38.419281Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2026-01-07T17:43:38.419309Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-07T17:43:38.419647Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7592676767405479469:2329], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-07T17:43:38.421025Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7592676767405479469:2329], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-07T17:43:38.433348Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:43:38.433391Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-07T17:43:38.433478Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592676767405479482:2331], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-07T17:43:38.433569Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:43:38.433652Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: ReadyState, LegacyTraceId: 01kecrx6p03pxvxjne1bnt1zk3, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-07T17:43:38.435303Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592676767405479482:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:38.435476Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:38.435602Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-07T17:43:38.435670Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592676767405479491:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-07T17:43:38.435965Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592676767405479491:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:38.436049Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:38.481761Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:38.489043Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: ExecuteState, LegacyTraceId: 01kecrx6p03pxvxjne1bnt1zk3, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [14:7592676767405479493:2326] workload_service_cleanup# false trace_id# 2026-01-07T17:43:38.490305Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7592676767405479469:2329], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-07T17:43:38.494634Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: CleanupState, LegacyTraceId: 01kecrx6p03pxvxjne1bnt1zk3, EndCleanup is_final# false trace_id# 2026-01-07T17:43:38.494926Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: CleanupState, LegacyTraceId: 01kecrx6p03pxvxjne1bnt1zk3, Sent query response back to proxy proxy_request_id# 3 proxy_id# [14:7592676745930642292:2265] trace_id# 2026-01-07T17:43:38.510862Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:43:38.510995Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:43:38.511053Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:43:38.511100Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:43:38.511346Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=14&id=NzVmNGFjOGUtMTAzYzk1NWMtMWRjNTAyMi05NWQ2NDgxYg==, ActorId: [14:7592676767405479398:2326], ActorState: unknown state, Session actor destroyed trace_id# |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> IncrementalBackup::E2EBackupCollection [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental >> TCacheTest::RacyRecreateAndSync ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:09.660537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:09.766716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:09.773122Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:09.773417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:09.773594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:10.222478Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:10.349998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:10.350161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:10.390759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:10.507151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:11.424009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:11.425995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:11.426083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:11.426119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:11.426734Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:11.526250Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:12.239478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:16.623987Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:16.636110Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:41:16.641200Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:16.700793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:16.717451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:16.763850Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:16.770032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:17.213867Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.214649Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.215542Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.216172Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.216621Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.216836Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.217433Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.217652Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.217799Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:17.408014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:41:17.440256Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:17.440417Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:17.462937Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:17.926149Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:18.030362Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:41:18.030482Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:41:18.071503Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:41:18.071775Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:41:18.072048Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:41:18.072121Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:41:18.072188Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:41:18.072251Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:41:18.072327Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:41:18.072394Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:41:18.073153Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:41:18.134635Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:18.134758Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2188:2604], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:18.247093Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2196:2610] 2026-01-07T17:41:18.315928Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2247:2650] 2026-01-07T17:41:18.316674Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2247:2650], schemeshard id = 72075186224037897 2026-01-07T17:41:18.325720Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:41:18.459132Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Describe result: PathErrorUnknown 2026-01-07T17:41:18.459217Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Creating table 2026-01-07T17:41:18.459295Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:41:18.493760Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2347:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:18.509453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:18.520159Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:41:18.520347Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Subscribe on create table tx: 281474976720657 2026-01-07T17:41:18.565194Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:41:18.914104Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:41:19.388119Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:41:19.465891Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:41:19.465996Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Column diff is empty, finishing 2026-01-07T17:41:20.205957Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... ce traversal operations sent the requests. 2026-01-07T17:41:51.912135Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-07T17:41:51.912220Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.954000s, at schemeshard: 72075186224037899 2026-01-07T17:41:51.912427Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-07T17:41:51.926085Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:41:52.012481Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:41:52.015687Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:4894:4151], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:41:52.019445Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:4894:4151], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-07T17:41:52.019590Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:4894:4151], Start read next stream part 2026-01-07T17:41:52.230928Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4921:4165]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:52.231208Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:41:52.231402Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-07T17:41:52.231449Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4924:4168] 2026-01-07T17:41:52.231508Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4924:4168] 2026-01-07T17:41:52.231785Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4925:4169] 2026-01-07T17:41:52.231890Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4924:4168], server id = [2:4925:4169], tablet id = 72075186224037894, status = OK 2026-01-07T17:41:52.231952Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4925:4169], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:41:52.232056Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:41:52.232262Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:41:52.232372Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4921:4165], StatRequests.size() = 1 2026-01-07T17:41:52.232455Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:42:57.198223Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrsyrka8y8nvw6cm8zqcyn", SessionId: ydb://session/3?node_id=2&id=YzQxYTk4ZTAtMTQ0ZDI2NWEtN2UwNzFjZi05ZmQxMTlkNA==, Slow query, duration: 65.172638s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:42:57.200874Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:4894:4151], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:42:57.201209Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:4894:4151], Start read next stream part 2026-01-07T17:42:57.209932Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-07T17:42:57.210154Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:5036:4270], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:42:57.214941Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:4894:4151], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:42:57.215044Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:4894:4151], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjEwNzA0YWQtZmYwZmQ3NjQtY2FmZDlmMjgtZjI2MjA1ZGI=, TxId: 2026-01-07T17:42:57.215984Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:5036:4270], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:42:57.216131Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:5036:4270], Start read next stream part 2026-01-07T17:42:57.396708Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5054:4285]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:57.397046Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:57.397102Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5054:4285], StatRequests.size() = 1 2026-01-07T17:43:39.394855Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrvye0aexcwhpm7fjenh74", SessionId: ydb://session/3?node_id=2&id=ZGE5NGI2ZWUtYTQ5MmQyMmUtYzg2YjUzZi01Y2NjZWM1OQ==, Slow query, duration: 42.172368s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:43:39.396598Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:5036:4270], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:43:39.396710Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:5036:4270], Start read next stream part 2026-01-07T17:43:39.396819Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33590, txId: 18446744073709551615] shutting down 2026-01-07T17:43:39.397364Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:5036:4270], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:43:39.397411Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4884:2474], ActorId: [2:5036:4270], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDc0MjU5OTktZTFkMzJlNmQtOTVhN2E0M2UtNTNlNzIxNzI=, TxId: 2026-01-07T17:43:39.397746Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5146:4371], ActorId: [2:5150:4374], Starting query actor #1 [2:5151:4375] 2026-01-07T17:43:39.397800Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5150:4374], ActorId: [2:5151:4375], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-07T17:43:39.401124Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5150:4374], ActorId: [2:5151:4375], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NDhlZDc1YmUtYjI5OTQ3MTYtNmI4OWExNTktZTcwYTZkYjg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:39.469838Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5163:4386]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:39.470104Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:39.470354Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2026-01-07T17:43:39.470406Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:43:39.470527Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:43:39.470584Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5163:4386], StatRequests.size() = 1 2026-01-07T17:43:39.470639Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:43:39.641424Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5150:4374], ActorId: [2:5151:4375], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDhlZDc1YmUtYjI5OTQ3MTYtNmI4OWExNTktZTcwYTZkYjg=, TxId: 2026-01-07T17:43:39.641539Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5150:4374], ActorId: [2:5151:4375], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDhlZDc1YmUtYjI5OTQ3MTYtNmI4OWExNTktZTcwYTZkYjg=, TxId: 2026-01-07T17:43:39.642049Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5146:4371], ActorId: [2:5150:4374], Got response [2:5151:4375] SUCCESS 2026-01-07T17:43:39.642559Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:39.670118Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-07T17:43:39.670241Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3886:3650] |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TCacheTestWithoutRealSystemViewPaths::SystemViews >> TCacheTest::Attributes >> TCacheTest::SystemViews >> TCacheTest::List >> TCacheTest::Navigate >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TConsoleTests::TestAuthorization [GOOD] >> IncrementalBackup::MultiBackup [GOOD] >> IncrementalBackup::MultiShardIncrementalRestore >> TConsoleTests::TestAuthorizationExtSubdomain >> TCacheTest::Recreate >> TCacheTestWithoutRealSystemViewPaths::SystemViews [GOOD] >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> TBackupTests::BackupUuidColumn[Zstd] >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> TCacheTest::SystemViews [GOOD] >> TCacheTest::TableSchemaVersion >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess [GOOD] >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain >> TCacheTest::MigrationLostMessage >> TCacheTest::MigrationCommon >> AnalyzeColumnshard::Analyze [GOOD] >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> TCacheTest::WatchRoot >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::CheckSystemViewAccess >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::PathBelongsToDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess [GOOD] Test command err: 2026-01-07T17:43:43.389826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.389884Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TestModificationResults wait txId: 1 2026-01-07T17:43:43.434023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2026-01-07T17:43:43.890757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.890832Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TestModificationResults wait txId: 1 2026-01-07T17:43:43.936007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-07T17:43:43.946528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:43:43.954288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:43:43.955116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 102, wait until txId: 102 2026-01-07T17:43:43.960843Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [2:214:2199], for# user1@builtin, access# DescribeSchema 2026-01-07T17:43:43.963407Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [2:220:2205], for# user1@builtin, access# |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> TCacheTest::TableSchemaVersion [GOOD] >> TCacheTest::SysLocks [GOOD] >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> TableCreation::TableCreationWithAcl >> ScriptExecutionsTest::RestartQueryWithGetOperation >> TableCreation::MultipleTablesCreation >> TCacheTest::MigrationCommit [GOOD] >> TCacheTest::CookiesArePreserved >> TableCreation::SimpleTableCreation >> TCacheTest::CheckSystemViewAccess [GOOD] |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |90.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> TableCreation::ConcurrentTableCreation >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2026-01-07T17:43:43.241004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.241074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... 2 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-07T17:43:44.698837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:43:44.712331Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2026-01-07T17:43:43.597861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.597918Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... Step5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-07T17:43:44.784077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:43:44.795284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:43:44.796436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2026-01-07T17:43:44.809542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:43:44.809785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2026-01-07T17:43:44.811000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 FAKE_COORDINATOR: Erasing txId 104 TestModificationResult got TxId: 104, wait until txId: 104 TestModificationResults wait txId: 105 FAKE_COORDINATOR: Add transaction: 105 at step: 5000042 FAKE_COORDINATOR: advance: minStep5000042 State->FrontStep: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000042 FAKE_COORDINATOR: Erasing txId 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 104 TestWaitNotification wait txId: 105 2026-01-07T17:43:44.824466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:43:44.824701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 2026-01-07T17:43:44.831371Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:505:2473], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:43:44.831745Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:507:2475], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:44.835159Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:509:2477], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:44.837197Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:522:2484], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:43:44.838560Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:531:2487], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:43:44.839574Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:539:2495], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:43:44.839828Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:541:2497], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:44.840084Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:543:2499], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:44.840869Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:549:2505], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:43:44.841176Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:551:2507], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2026-01-07T17:43:43.917898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.917960Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-07T17:43:45.057196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> KqpProxy::InvalidSessionID >> TCacheTest::CookiesArePreserved [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2026-01-07T17:43:43.533711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.533776Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... _COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-07T17:43:44.665864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-07T17:43:44.704352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-07T17:43:44.864670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2026-01-07T17:43:43.516384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.516448Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ate->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-07T17:43:45.352568Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-07T17:43:45.358724Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:43:45.362705Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2026-01-07T17:43:45.366805Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:430:2410], for# user1@builtin, access# DescribeSchema 2026-01-07T17:43:45.367898Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:436:2416], for# user1@builtin, access# |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:10.543258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:10.662599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:10.670043Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:10.670356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:10.670573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:11.220769Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:11.369993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:11.370143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:11.413515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:11.503477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:12.413867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:12.414984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:12.415028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:12.415059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:12.415595Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:12.525084Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:13.376899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:17.880694Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:18.013879Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:41:18.019152Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:18.071822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:18.071976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:18.113793Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:18.117299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:18.555242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.556042Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.556846Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.565142Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.565643Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.565791Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.566137Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.566337Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.566471Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:18.818439Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:41:18.843200Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:18.843350Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:18.875029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:19.079277Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:19.229775Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:41:19.229926Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:41:19.269894Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:41:19.270146Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:41:19.270423Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:41:19.270503Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:41:19.270566Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:41:19.270625Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:41:19.270689Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:41:19.270750Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:41:19.271421Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:41:19.334749Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:19.334875Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2188:2604], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:19.363431Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2196:2610] 2026-01-07T17:41:19.399782Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2247:2650] 2026-01-07T17:41:19.400896Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2247:2650], schemeshard id = 72075186224037897 2026-01-07T17:41:19.420571Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:41:19.485926Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Describe result: PathErrorUnknown 2026-01-07T17:41:19.486006Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Creating table 2026-01-07T17:41:19.486100Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:41:19.543315Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2347:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:19.557113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:19.576087Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:41:19.576295Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Subscribe on create table tx: 281474976720657 2026-01-07T17:41:19.592347Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:41:19.802269Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:41:20.285953Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:41:20.361799Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:41:20.361901Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2274:2661] Owner: [2:2273:2660]. Column diff is empty, finishing 2026-01-07T17:41:21.087055Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... default not found or you don't have access permissions } 2026-01-07T17:41:51.234384Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4183:3738], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:51.234534Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:41:51.241170Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4200:3743], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:51.244371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:41:51.295798Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4182:3737], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-07T17:41:51.433325Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4269:3785], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:51.470929Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4268:3784] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:41:51.992041Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4290:3798]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:41:51.992363Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:41:51.992450Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4292:3800] 2026-01-07T17:41:51.992526Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4292:3800] 2026-01-07T17:41:51.992949Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4293:3801] 2026-01-07T17:41:51.993104Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4293:3801], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:41:51.993193Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:41:51.993319Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4292:3800], server id = [2:4293:3801], tablet id = 72075186224037894, status = OK 2026-01-07T17:41:51.993419Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:41:51.993504Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4290:3798], StatRequests.size() = 1 2026-01-07T17:41:53.194601Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:42:58.123438Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrsxzd91md2cw286k50kr7", SessionId: ydb://session/3?node_id=2&id=YzIyM2IzODAtMTE5Y2ZmZS03YmZhZDYwMi05YmY0ZjI4YQ==, Slow query, duration: 66.895800s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:42:58.125121Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4155:3723], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:42:58.125384Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4155:3723], Start read next stream part 2026-01-07T17:42:58.125871Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2026-01-07T17:42:58.126013Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4401:3900], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:42:58.129637Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4155:3723], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:42:58.129720Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4155:3723], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2YzYWFiYTctOTBhMDdlMTYtMzA3MjFiYTgtMTgxZGVjNzA=, TxId: 2026-01-07T17:42:58.130399Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4401:3900], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:42:58.130527Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4401:3900], Start read next stream part 2026-01-07T17:42:58.240839Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4427:3922]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:42:58.241164Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:42:58.241222Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4427:3922], StatRequests.size() = 1 2026-01-07T17:43:42.492623Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrvzajaqyfk4ne27a0dg3e", SessionId: ydb://session/3?node_id=2&id=NTZjZGExMDctZGFhMjA2YzctNWMyMDRlNmMtYTE4N2FmNTA=, Slow query, duration: 44.358059s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:43:42.495688Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4401:3900], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:43:42.495829Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4401:3900], Start read next stream part 2026-01-07T17:43:42.496214Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33170, txId: 18446744073709551615] shutting down 2026-01-07T17:43:42.496846Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4515:4005], ActorId: [2:4518:4007], Starting query actor #1 [2:4521:4009] 2026-01-07T17:43:42.496921Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4518:4007], ActorId: [2:4521:4009], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:42.500397Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4401:3900], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:43:42.500468Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4145:2474], ActorId: [2:4401:3900], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTBkNDU4MzItYjk0ZmFhNGMtOTM3MGIyNzQtYTUyZjRjNmQ=, TxId: 2026-01-07T17:43:42.502195Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4518:4007], ActorId: [2:4521:4009], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MmQ1NGQ2OWYtYjU4MTI2NDYtNTkwODU0NjEtODQ4YmJmMmU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:42.591010Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4534:4021]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:42.591342Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:42.591392Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4534:4021], StatRequests.size() = 1 2026-01-07T17:43:42.768645Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4518:4007], ActorId: [2:4521:4009], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmQ1NGQ2OWYtYjU4MTI2NDYtNTkwODU0NjEtODQ4YmJmMmU=, TxId: 2026-01-07T17:43:42.768760Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4518:4007], ActorId: [2:4521:4009], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmQ1NGQ2OWYtYjU4MTI2NDYtNTkwODU0NjEtODQ4YmJmMmU=, TxId: 2026-01-07T17:43:42.769971Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4515:4005], ActorId: [2:4518:4007], Got response [2:4521:4009] SUCCESS 2026-01-07T17:43:42.770421Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:42.822814Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:42.822924Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3186:3442] |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupRestoreWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2026-01-07T17:43:43.620443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.620509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... RDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-07T17:43:45.881478Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-07T17:43:45.892160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:43:45.910536Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] Test command err: 2026-01-07T17:37:08.233101Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675091622219395:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:08.233136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:37:08.361609Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:37:08.810607Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:37:08.841951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:37:08.842017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:37:08.914300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:37:09.009548Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592675091622219371:2081] 1767807428229654 != 1767807428229657 2026-01-07T17:37:09.012939Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:37:09.038666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:37:09.211789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003cb6/r3tmp/yandexR4Vyx5.tmp 2026-01-07T17:37:09.211814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003cb6/r3tmp/yandexR4Vyx5.tmp 2026-01-07T17:37:09.211980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003cb6/r3tmp/yandexR4Vyx5.tmp 2026-01-07T17:37:09.212077Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:37:09.255199Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:37:09.261651Z INFO: TTestServer started on Port 23315 GrpcPort 10016 PQClient connected to localhost:10016 2026-01-07T17:37:09.635855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:37:09.724205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:09.884808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T17:37:13.234294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675091622219395:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:37:13.234349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:37:13.890784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675113097056876:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.890902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.891311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675113097056905:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.891361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675113097056906:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.891496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.899377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:37:13.903711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675113097056939:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.903841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.913039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592675113097056943:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.913239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:37:13.928066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2026-01-07T17:37:13.928355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592675113097056909:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:37:14.137888Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592675113097056968:2649] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:37:14.169092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:14.207953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:14.340069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:37:14.392508Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592675117392024280:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:37:14.393337Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NGU1NDY4ZTEtMjkwNTk5OC1jNWE2ODY3OC1iODc0M2E1Nw==, ActorId: [1:7592675113097056872:2331], ActorState: ExecuteState, LegacyTraceId: 01kecrhf4nc8xvvkjttsgpwrnp, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:37:14.397888Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592675117392024561:2830] === CheckClustersList. Ok 2026-01-07T17:37:19.828958Z :WriteToTopic_Demo_11_Table INFO: TTopicSdkTestSetup started 2026-01-07T17:37:19.870793Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T17:37:19.905596Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [7 ... PendingWrites: 0 2026-01-07T17:43:40.721930Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:43:40.750046Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:43:40.750075Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:43:40.750100Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.750121Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.750126Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.750142Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.750162Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.750175Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.750188Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-07T17:43:40.750202Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-07T17:43:40.750271Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:43:40.750274Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:43:40.750284Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.750288Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.750294Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.750299Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.750308Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.750315Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.750323Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-07T17:43:40.750347Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-07T17:43:40.822198Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:43:40.822252Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.822267Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.822288Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.822304Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:43:40.850323Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:43:40.850324Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:43:40.850351Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.850360Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.850366Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.850374Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.850388Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.850390Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.850404Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-07T17:43:40.850405Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-07T17:43:40.850444Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:43:40.850446Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:43:40.850455Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.850458Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.850465Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.850469Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.850477Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.850481Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.850487Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-07T17:43:40.850491Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-07T17:43:40.922524Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:43:40.922572Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.922586Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.922606Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.922621Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:43:40.950707Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:43:40.950753Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.950767Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.950774Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:43:40.950786Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.950797Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.950802Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-07T17:43:40.950811Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.950831Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.950843Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-07T17:43:40.950846Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:43:40.950858Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.950870Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.950881Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:43:40.950884Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.950894Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-07T17:43:40.950894Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.950904Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:43:40.950918Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:43:40.950928Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:44.058520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:44.058612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:44.058647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:44.058682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:44.058718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:44.058780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:44.058847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:44.058941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:44.059797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:44.060129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:44.244102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:44.244177Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:44.259674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:44.260093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:44.260281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:44.267351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:44.267713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:44.268509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:44.268777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:44.271880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:44.272068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:44.273317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:44.273394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:44.273515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:44.273565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:44.273604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:44.273802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:44.418862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.419940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.420918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.421008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.421090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:43:45.518982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:43:45.519054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:43:45.519168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:43:45.519511Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:881:2838], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } 2026-01-07T17:43:45.521594Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:886:2841], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2026-01-07T17:43:45.521659Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:886:2841], success# 1, error# , multipart# 0, uploadId# (empty maybe) FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-07T17:43:45.522584Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:885:2840], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:22155 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 49FCACBF-529C-4117-A523-D5F8FA8B9304 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2026-01-07T17:43:45.531011Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:881:2838], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2026-01-07T17:43:45.531777Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:880:2837] 2026-01-07T17:43:45.531933Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:881:2838], sender# [1:880:2837], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22155 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C3A3C776-2D62-4C4B-9CDF-16E3C36CF40A amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2026-01-07T17:43:45.537472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:43:45.538076Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:881:2838], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2026-01-07T17:43:45.538140Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:881:2838], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-07T17:43:45.538324Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:880:2837], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:43:45.549976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 724 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.550056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:43:45.550249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 724 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.550420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 724 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.550518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:45.550751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:45.551366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.551410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:43:45.551561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.551650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.551701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:45.551754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.551806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:43:45.551856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:43:45.551906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:43:45.552094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:45.555841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.556339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.556839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.556901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:43:45.559394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:45.559666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.559838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:45.560031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.560216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:43:45.560608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:777:2743] message: TxId: 102 2026-01-07T17:43:45.560835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.561428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:43:45.561647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:43:45.562174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:43:45.568276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:43:45.568404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:860:2818] TestWaitNotification: OK eventTxId 102 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> ScriptExecutionsTest::RunCheckLeaseStatus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:44.355543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:44.355615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:44.355644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:44.355670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:44.355722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:44.355758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:44.355804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:44.355866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:44.356536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:44.356776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:44.452301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:44.452377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:44.474949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:44.475383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:44.475587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:44.482619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:44.483169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:44.484050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:44.484392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:44.492096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:44.492341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:44.493774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:44.493864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:44.494002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:44.494057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:44.494108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:44.494328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:44.656180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.657989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.658106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.658181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.658245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.658321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7594046678944 2026-01-07T17:43:45.670639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-07T17:43:45.670803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-07T17:43:45.670959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:45.793129Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:821:2789], attempt# 0 2026-01-07T17:43:45.829549Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:821:2789], sender# [1:820:2788] 2026-01-07T17:43:45.835708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:45.835776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:43:45.837117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:45.837200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-07T17:43:45.837906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:11460 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 47A76048-70FD-4715-A76C-54A288DDCF82 amz-sdk-request: attempt=1 content-length: 107 content-md52026-01-07T17:43:45.837976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 : dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-07T17:43:45.840919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:45.841084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:45.841220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:43:45.841263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:43:45.841310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:43:45.841406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:43:45.841702Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:11460 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 02771BD3-8CEC-434A-BF20-08D829E00A19 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-07T17:43:45.855142Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-07T17:43:45.855270Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:820:2788] 2026-01-07T17:43:45.855483Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:821:2789], sender# [1:820:2788], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-01-07T17:43:45.856440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:11460 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7CD3163C-5729-4C27-B3D9-BC60B0144918 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2026-01-07T17:43:45.873065Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2026-01-07T17:43:45.873143Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:821:2789], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-07T17:43:45.873322Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:820:2788], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:43:45.888722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.888794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:43:45.888992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.889103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-07T17:43:45.889223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:45.889281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.889320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:43:45.889370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:43:45.889555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:45.891843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.892218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.892302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:43:45.892411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:45.892452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.892488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:45.892518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.892552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:43:45.892617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 102 2026-01-07T17:43:45.892663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.892706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:43:45.892735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:43:45.892850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:45.895064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:43:45.895116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:805:2774] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:44.354733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:44.354853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:44.354911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:44.354951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:44.355000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:44.355038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:44.355130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:44.355198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:44.356084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:44.356409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:44.515336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:44.515445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:44.541595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:44.542063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:44.542244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:44.549176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:44.549555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:44.550368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:44.550684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:44.553897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:44.554105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:44.555424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:44.555500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:44.555637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:44.555694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:44.555740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:44.555928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:44.720041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.721979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.722089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.722217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.722312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.722377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.722438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2057594046678944 2026-01-07T17:43:45.800778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-07T17:43:45.800910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-07T17:43:45.801148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:45.821704Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:821:2789], attempt# 0 2026-01-07T17:43:45.851620Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:821:2789], sender# [1:820:2788] 2026-01-07T17:43:45.858209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:45.858330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:43:45.858628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:45.858673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-07T17:43:45.859270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.859325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:43:45.860299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:45.860401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:43:45.860439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:43:45.860495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:43:45.860539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:43:45.860627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:5829 Accept: */* 2026-01-07T17:43:45.863584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BB30A6DC-EC2D-42DE-B048-909AC11F578F amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-07T17:43:45.865539Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:5829 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7325C471-0DCF-41DE-BE0A-380AC68EFCA9 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-07T17:43:45.884749Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2026-01-07T17:43:45.884881Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:820:2788] 2026-01-07T17:43:45.885188Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:821:2789], sender# [1:820:2788], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5829 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2F99B265-801D-46A6-B057-2722DF53A79C amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2026-01-07T17:43:45.895890Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:821:2789], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2026-01-07T17:43:45.895971Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:821:2789], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-07T17:43:45.896344Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:820:2788], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:43:45.926263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-07T17:43:45.926385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:43:45.926617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-07T17:43:45.926750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-07T17:43:45.926838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:45.926908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.926965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:43:45.927025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:43:45.927215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:45.937945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.938369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.938444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:43:45.938591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:45.938645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.938721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:43:45.938793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.938849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:43:45.938929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 102 2026-01-07T17:43:45.938975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:43:45.939026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:43:45.939062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:43:45.939218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:43:45.949912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:43:45.949993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:805:2774] TestWaitNotification: OK eventTxId 102 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2026-01-07T17:43:44.702092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:44.702155Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... HARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/compile_cache_queries', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.372314Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720667 2026-01-07T17:43:47.372357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720668 2026-01-07T17:43:47.372441Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.372491Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_read_bytes_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.372520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720669 2026-01-07T17:43:47.372738Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pool_classifiers', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.372875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720670 2026-01-07T17:43:47.373008Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/nodes', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.373111Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720671 2026-01-07T17:43:47.373168Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_hour', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.373346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720672 2026-01-07T17:43:47.373402Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.373523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720673 2026-01-07T17:43:47.373629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720674 2026-01-07T17:43:47.373672Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.373704Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_hour', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.373855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720675 2026-01-07T17:43:47.373937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720676 2026-01-07T17:43:47.373972Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_hour', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374057Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_hour', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720677 2026-01-07T17:43:47.374196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720678 2026-01-07T17:43:47.374251Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/pg_tables', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374354Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_duration_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720679 2026-01-07T17:43:47.374454Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720680 2026-01-07T17:43:47.374513Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_group_members', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374568Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_users', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720681 2026-01-07T17:43:47.374684Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pools', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720682 2026-01-07T17:43:47.374767Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/streaming_queries', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720683 2026-01-07T17:43:47.374892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720684 2026-01-07T17:43:47.374934Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_permissions', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.374981Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720685 2026-01-07T17:43:47.375032Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.375172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720686 2026-01-07T17:43:47.375281Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_effective_permissions', reason: Schema is in ReadOnly mode 2026-01-07T17:43:47.375374Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_groups', reason: Schema is in ReadOnly mode TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2026-01-07T17:43:47.377257Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2026-01-07T17:43:47.377434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:601) 2026-01-07T17:43:47.381216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2026-01-07T17:43:47.383368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:43:47.384499Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 Forgetting tablet 72075186233409549 2026-01-07T17:43:47.391025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:43:47.393249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 Leader for TabletID 72057594046678944 is [2:408:2397] sender: [2:734:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:408:2397] sender: [2:737:2067] recipient: [2:736:2629] Leader for TabletID 72057594046678944 is [2:738:2630] sender: [2:739:2067] recipient: [2:736:2629] 2026-01-07T17:43:47.463200Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:47.463274Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> DataShardReadTableSnapshots::ReadTableDropColumn >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> TableCreation::UpdateTableWithAclRollback >> KqpProxy::PassErrroViaSessionActor >> AnalyzeColumnshard::AnalyzeStatus [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> DataShardReadTableSnapshots::ReadTableSplitBefore >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes >> ReadSessionImplTest::SuccessfulInit >> DataShardReadTableSnapshots::ReadTableSnapshot >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> ReadOnlyVDisk::TestDiscover >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> ReadOnlyVDisk::TestGarbageCollect >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadOnlyVDisk::TestReads >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:41:14.242289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:14.381575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:14.390000Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:41:14.390373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:41:14.390625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:41:14.955728Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:15.091125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:15.091276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:15.133462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:15.228792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:41:16.359240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:41:16.360472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:41:16.360525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:41:16.360558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:41:16.361223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:41:16.458015Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:41:17.600343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:41:23.104282Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:41:23.104371Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:41:23.146264Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:41:23.407952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:23.408081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:23.461242Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:41:23.466104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:23.694232Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.695115Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.696135Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.696630Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.696732Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.696990Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.697234Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.697369Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.697510Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:41:23.886268Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:41:23.923410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:41:23.923552Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:41:23.946892Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:41:24.200129Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:41:24.265261Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:41:24.265418Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:41:24.303733Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:41:24.305268Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:41:24.305555Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:41:24.305625Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:41:24.305681Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:41:24.305745Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:41:24.305804Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:41:24.305877Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:41:24.306655Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:41:24.479149Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2232:2619] 2026-01-07T17:41:24.481990Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:41:24.485812Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:24.485920Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2250:2629], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:41:24.505116Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. Describe result: PathErrorUnknown 2026-01-07T17:41:24.505193Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. Creating table 2026-01-07T17:41:24.505276Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:41:24.516459Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2305:2671] 2026-01-07T17:41:24.518042Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2305:2671], schemeshard id = 72075186224037897 2026-01-07T17:41:24.555182Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2347:2686], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:41:24.570668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:41:24.648800Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:41:24.649036Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. Subscribe on create table tx: 281474976720657 2026-01-07T17:41:24.666803Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:41:24.700466Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:41:25.510540Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:41:25.607997Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:41:25.608082Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2239:2625] Owner: [2:2238:2624]. Column diff is empty, finishing 2026-01-07T17:41:26.247684Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... .cpp:1273: SyncNode(), pipe client id = [2:4337:3820] 2026-01-07T17:41:58.284916Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4338:3821] 2026-01-07T17:41:58.285100Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4338:3821], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:41:58.285190Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:41:58.285385Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4337:3820], server id = [2:4338:3821], tablet id = 72075186224037894, status = OK 2026-01-07T17:41:58.285503Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:41:58.285588Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4335:3818], StatRequests.size() = 1 2026-01-07T17:41:59.001665Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:43:05.071745Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrt4dg822wvqmvjy45qg1m", SessionId: ydb://session/3?node_id=2&id=ZTEzYWU3MTktM2EzYjgwZjktYWRiMjBjOWMtMzgzMzJhMTE=, Slow query, duration: 67.257076s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:43:05.072969Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4202:3744], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:43:05.073148Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4202:3744], Start read next stream part 2026-01-07T17:43:05.073546Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4448:3921], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:43:05.075593Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2026-01-07T17:43:05.076361Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4448:3921], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:43:05.076469Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4448:3921], Start read next stream part 2026-01-07T17:43:05.077287Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4202:3744], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:43:05.077352Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4202:3744], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTZiY2NhZjMtYWVjNGZkYmEtMmRlZmM1YTItMTI1OGEyYzg=, TxId: 2026-01-07T17:43:05.187232Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4474:3943]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:05.187447Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:05.187485Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4474:3943], StatRequests.size() = 1 2026-01-07T17:43:47.201590Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrw63m3s0vwxcn00wdsywx", SessionId: ydb://session/3?node_id=2&id=MTA0MjU0NGMtNjY5ODJjNi1kZjEwYzQ1LWVjYzNlOGQ1, Slow query, duration: 42.121566s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:43:47.203893Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4448:3921], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:43:47.204035Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4448:3921], Start read next stream part 2026-01-07T17:43:47.204388Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33160, txId: 18446744073709551615] shutting down 2026-01-07T17:43:47.204732Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4562:4026], ActorId: [2:4563:4027], Starting query actor #1 [2:4564:4028] 2026-01-07T17:43:47.204806Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4563:4027], ActorId: [2:4564:4028], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:43:47.208799Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4563:4027], ActorId: [2:4564:4028], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Nzc1ZTE1ZWMtNzQxYmQ0YWYtZDc0ZmQxYWMtODkzMWFhODY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:43:47.209928Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4448:3921], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:43:47.209994Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4192:2467], ActorId: [2:4448:3921], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWI0N2I5NDgtZmM0Y2MxMy03MjRhMjg5NC1jMjFmYjRiYg==, TxId: 2026-01-07T17:43:47.294594Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4581:4042]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:47.294952Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:43:47.295030Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4581:4042], StatRequests.size() = 1 2026-01-07T17:43:47.510365Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4563:4027], ActorId: [2:4564:4028], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Nzc1ZTE1ZWMtNzQxYmQ0YWYtZDc0ZmQxYWMtODkzMWFhODY=, TxId: 2026-01-07T17:43:47.510483Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4563:4027], ActorId: [2:4564:4028], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Nzc1ZTE1ZWMtNzQxYmQ0YWYtZDc0ZmQxYWMtODkzMWFhODY=, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:43:47.512884Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4597:4050] 2026-01-07T17:43:47.513682Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_IN_PROGRESS 2026-01-07T17:43:47.515233Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4599:4051]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 2
FastCheckInFlight: 1
FastSchemeShards: 0
FastNodes: 0
CurPropagationSeq: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 0
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 33], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 34], [OwnerId: 72075186224037897, LocalPathId: 33]
ForceTraversals: 1
    1970-01-01T00:00:04Z
NavigatePathId: 
ForceTraversalOperationId: operationId
  CreatedAt: 1970-01-01T00:00:04.039012Z
, ReplyToActorId: [1:1262:3035]
, Types: 1
, Tables size: 1
, Tables: 
    Table[0] PathId: [OwnerId: 72075186224037897, LocalPathId: 34]
        Status: AnalyzeStarted
        AnalyzedShards size: 0
        ColumnTags: 1,2
TraversalStartTime: 2026-01-07T17:41:57Z
TraversalDatabase: 
TraversalPathId: [OwnerId: 72075186224037897, LocalPathId: 34]
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... 2026-01-07T17:43:47.516855Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4562:4026], ActorId: [2:4563:4027], Got response [2:4564:4028] SUCCESS 2026-01-07T17:43:47.517236Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:43:47.559101Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:43:47.559206Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:1262:3035] 2026-01-07T17:43:47.561308Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4603:4054] 2026-01-07T17:43:47.562244Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2026-01-07T17:43:51.002883Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.002916Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.002940Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:51.014532Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:51.017176Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:43:51.017259Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.018675Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.018701Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.018721Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:51.033173Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:51.037165Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:43:51.037246Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.042024Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.042052Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.042074Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:51.045300Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:43:51.045375Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.045409Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.045557Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2026-01-07T17:43:51.050046Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.050071Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.050091Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:51.051575Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-07T17:43:51.051623Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.051648Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.051724Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2026-01-07T17:43:51.058324Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-07T17:43:51.058363Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-07T17:43:51.058405Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:51.068269Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:51.077404Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:43:51.252054Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-07T17:43:51.252797Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:43:51.253202Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2026-01-07T17:43:51.257964Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2026-01-07T17:43:51.265067Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:43:51.265276Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:43:51.265312Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:43:51.265333Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-07T17:43:51.265365Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-07T17:43:51.265387Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-07T17:43:51.265405Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2026-01-07T17:43:51.265424Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2026-01-07T17:43:51.265458Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2026-01-07T17:43:51.265477Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2026-01-07T17:43:51.265494Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2026-01-07T17:43:51.265536Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2026-01-07T17:43:51.265559Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2026-01-07T17:43:51.265576Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2026-01-07T17:43:51.265591Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2026-01-07T17:43:51.265606Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2026-01-07T17:43:51.265674Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2026-01-07T17:43:51.265695Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2026-01-07T17:43:51.265712Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2026-01-07T17:43:51.265732Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2026-01-07T17:43:51.265748Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2026-01-07T17:43:51.265769Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2026-01-07T17:43:51.265786Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2026-01-07T17:43:51.265803Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2026-01-07T17:43:51.265831Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2026-01-07T17:43:51.265858Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2026-01-07T17:43:51.265877Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2026-01-07T17:43:51.265903Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2026-01-07T17:43:51.265926Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2026-01-07T17:43:51.265946Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2026-01-07T17:43:51.265964Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2026-01-07T17:43:51.265982Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2026-01-07T17:43:51.266059Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2026-01-07T17:43:51.266092Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2026-01-07T17:43:51.266112Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2026-01-07T17:43:51.266131Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2026-01-07T17:43:51.266147Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2026-01-07T17:43:51.266168Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2026-01-07T17:43:51.266183Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2026-01-07T17:43:51.266205Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2026-01-07T17:43:51.266220Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2026-01-07T17:43:51.266241Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2026-01-07T17:43:51.266259Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2026-01-07T17:43:51.266286Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2026-01-07T17:43:51.266310Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2026-01-07T17:43:51.266326Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2026-01-07T17:43:51.266343Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2026-01-07T17:43:51.266360Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2026-01-07T17:43:51.266374Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2026-01-07T17:43:51.266416Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2026-01-07T17:43:51.266493Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-07T17:43:51.270669Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2026-01-07T17:43:51.272016Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2026-01-07T17:43:51.272061Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2026-01-07T17:43:51.272086Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2026-01-07T17:43:51.272108Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2026-01-07T17:43:51.272132Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2026-01-07T17:43:51.272154Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2026-01-07T17:43:51.272171Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2026-01-07T17:43:51.272190Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2026-01-07T17:43:51.272227Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2026-01-07T17:43:51.272245Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2026-01-07T17:43:51.272266Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2026-01-07T17:43:51.272283Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2026-01-07T17:43:51.272299Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2026-01-07T17:43:51.272318Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2026-01-07T17:43:51.272347Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2026-01-07T17:43:51.272370Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2026-01-07T17:43:51.272413Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2026-01-07T17:43:51.272430Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2026-01-07T17:43:51.272470Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2026-01-07T17:43:51.272493Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2026-01-07T17:43:51.272508Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2026-01-07T17:43:51.272522Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2026-01-07T17:43:51.272537Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2026-01-07T17:43:51.272555Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2026-01-07T17:43:51.272590Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2026-01-07T17:43:51.272615Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2026-01-07T17:43:51.272632Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2026-01-07T17:43:51.272661Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2026-01-07T17:43:51.272680Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2026-01-07T17:43:51.272707Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2026-01-07T17:43:51.272733Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2026-01-07T17:43:51.272756Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2026-01-07T17:43:51.272830Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2026-01-07T17:43:51.272849Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2026-01-07T17:43:51.272864Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2026-01-07T17:43:51.272883Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2026-01-07T17:43:51.272898Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2026-01-07T17:43:51.272916Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2026-01-07T17:43:51.272956Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2026-01-07T17:43:51.272971Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2026-01-07T17:43:51.272998Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2026-01-07T17:43:51.273027Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2026-01-07T17:43:51.273043Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2026-01-07T17:43:51.273062Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2026-01-07T17:43:51.273081Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2026-01-07T17:43:51.273102Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2026-01-07T17:43:51.273135Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2026-01-07T17:43:51.273157Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2026-01-07T17:43:51.273176Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2026-01-07T17:43:51.273194Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2026-01-07T17:43:51.273256Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-07T17:43:51.273426Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-07T17:43:51.275135Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.275162Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.275190Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:51.284556Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:51.293211Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:43:51.293589Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.297465Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:43:51.400400Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.401135Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:43:51.401207Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:43:51.401250Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-07T17:43:51.401324Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-07T17:43:51.605305Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-07T17:43:51.706290Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-07T17:43:51.706467Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:43:51.706705Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-07T17:43:51.708045Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.708071Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.708094Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:43:51.717305Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:43:51.719059Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:43:51.719275Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.719807Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:43:51.821099Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:43:51.821327Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:43:51.821385Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:43:51.821427Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-07T17:43:51.821499Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-07T17:43:51.821595Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-07T17:43:51.822616Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-07T17:43:51.822711Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:43:51.822915Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |90.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |90.5%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |90.5%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> IncrementalBackup::ComplexBackupBackupCollection [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> ReadOnlyVDisk::TestStorageLoad |90.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation >> TableCreation::TableCreationWithAcl [GOOD] >> TableCreation::UpdateTableWithAclModification |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2026-01-07T17:42:31.898395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:31.898463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:32.118665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:33.193314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:33.345687Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:33.346195Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:33.346801Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17471368510640226151 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:33.392432Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:33.392992Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:33.393252Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16581691150694801095 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:33.490767Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:33.491192Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:33.491447Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 382476272470552259 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:33.494343Z node 4 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:42:33.587134Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:33.587644Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:33.587946Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpGuo5MZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8518871173247248389 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceIn ... rCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:43:42.825201Z node 143 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:43:42.825765Z node 143 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:43:42.825996Z node 143 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13980938198838640692 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:43:42.880526Z node 138 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:43:42.881101Z node 138 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:43:42.881298Z node 138 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4411386273957862499 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:43:42.884682Z node 138 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:43:42.926231Z node 142 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:43:42.926690Z node 142 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:43:42.926890Z node 142 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018c5/r3tmp/tmpdIPKuB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5551551583265581197 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:43:43.464694Z node 136 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.464826Z node 136 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:43.497110Z node 136 :STATISTICS WARN: tx_init.cpp:298: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2026-01-07T17:43:47.080887Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:47.081015Z node 145 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:47.196698Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:51.389242Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:51.389334Z node 154 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:51.562553Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::ConcurrentUpdateTable >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> DataShardReadTableSnapshots::ReadTableUUID >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental >> KqpWorkload::STOCK [GOOD] >> IncrementalBackup::MultiShardIncrementalRestore [GOOD] >> IncrementalBackup::ResetOperationIncrementalBackup |90.5%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> ReadOnlyVDisk::TestSync ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 4562065724182496854 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-07T17:43:52.975813Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-07T17:43:53.289058Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-07T17:43:53.290405Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2026-01-07T17:43:53.537883Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-07T17:43:53.538793Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-07T17:43:53.539325Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-07T17:43:53.539546Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [a27a6dc9fd718dc2] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-07T17:43:53.539806Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.444 sample PartId# [1:1:5:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.444 sample PartId# [1:1:5:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.444 sample PartId# [1:1:5:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.444 sample PartId# [1:1:5:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPut{ TimestampMs# 0.445 sample PartId# [1:1:5:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPut{ TimestampMs# 0.445 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 1.462 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.505 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.015 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 2.059 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 2.226 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 2.244 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 2.255 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 2.267 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 2.287 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 2.402 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 32590, MsgBus: 26952 2026-01-07T17:42:53.829408Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676572610542005:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:53.829452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:53.912194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:54.207796Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:42:54.336348Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676572610541975:2081] 1767807773817113 != 1767807773817116 2026-01-07T17:42:54.349946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:54.350064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:54.352584Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:54.363932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:54.457963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:42:54.477684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:54.477711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:54.477718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:54.478108Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:54.869259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:55.078514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:57.389106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676589790412057:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.389231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.391546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676589790412067:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.391659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:57.690310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:57.843609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:58.686732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:59.002694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676572610542005:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:59.005574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:42:59.167164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676598380350557:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.167259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.167715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676598380350563:2632], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.168007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676598380350562:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.168039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:42:59.173891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:42:59.190760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-07T17:42:59.191544Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676598380350566:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-07T17:42:59.342031Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676598380350617:5085] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:09.178898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:43:09.178936Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded took: 0.418015s took: 0.419614s took: 0.424133s took: 0.426064s took: 0.432383s took: 0.432644s took: 0.435234s took: 0.436520s took: 0.441109s took: 0.441266s 2026-01-07T17:43:42.513010Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976716086; took: 6.658083s took: 6.658776s 2026-01-07T17:43:42.548325Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7592676783063953538:4991], Table: `/Root/stock` ([72057594046644480:38:1]), SessionActorId: [1:7592676752999181222:4991]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7592676783063953538:4991].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-07T17:43:42.548764Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976716091; 2026-01-07T17:43:42.549790Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592676783063952830:4991], SessionActorId: [1:7592676752999181222:4991], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7592676752999181222:4991]. 2026-01-07T17:43:42.550136Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MzZhYWU5YmUtYzY2OGMyNTQtZTY4MzI1YjgtYmU3OWQ4NjU=, ActorId: [1:7592676752999181222:4991], ActorState: ExecuteState, LegacyTraceId: 01kecrx4n14mag9c6vppej1y5y, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7592676783063953478:4991] from: [1:7592676783063952830:4991] trace_id# 2026-01-07T17:43:42.550260Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592676783063953478:4991] TxId: 281474976716086. Ctx: { TraceId: 01kecrx4n14mag9c6vppej1y5y, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzZhYWU5YmUtYzY2OGMyNTQtZTY4MzI1YjgtYmU3OWQ4NjU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:43:42.550580Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MzZhYWU5YmUtYzY2OGMyNTQtZTY4MzI1YjgtYmU3OWQ4NjU=, ActorId: [1:7592676752999181222:4991], ActorState: ExecuteState, LegacyTraceId: 01kecrx4n14mag9c6vppej1y5y, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/stock`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:43:42.553359Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;st ... abletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-07T17:43:54.565875Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2026-01-07T17:43:54.565895Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2026-01-07T17:43:54.565910Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2026-01-07T17:43:54.565927Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2026-01-07T17:43:54.565943Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-07T17:43:54.565959Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2026-01-07T17:43:54.565977Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2026-01-07T17:43:54.565998Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-01-07T17:43:54.566024Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-01-07T17:43:54.569964Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2026-01-07T17:43:54.569996Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2026-01-07T17:43:54.570020Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2026-01-07T17:43:54.578953Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2026-01-07T17:43:54.578995Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-01-07T17:43:54.579015Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:43:54.579033Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2026-01-07T17:43:54.579052Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2026-01-07T17:43:54.579069Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-07T17:43:54.579085Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2026-01-07T17:43:54.579101Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-07T17:43:54.599120Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2026-01-07T17:43:54.603583Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2026-01-07T17:43:54.603641Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2026-01-07T17:43:54.603663Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-07T17:43:54.603690Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2026-01-07T17:43:54.603723Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-01-07T17:43:54.603742Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-07T17:43:54.603779Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:43:54.606415Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2026-01-07T17:43:54.820592Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2026-01-07T17:43:54.820657Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2026-01-07T17:43:54.820678Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2026-01-07T17:43:54.820697Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2026-01-07T17:43:54.820712Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2026-01-07T17:43:54.820726Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2026-01-07T17:43:54.820739Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2026-01-07T17:43:54.820752Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2026-01-07T17:43:54.820765Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2026-01-07T17:43:54.820778Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2026-01-07T17:43:54.820792Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2026-01-07T17:43:54.820804Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2026-01-07T17:43:54.820819Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2026-01-07T17:43:54.820832Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2026-01-07T17:43:54.820846Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2026-01-07T17:43:54.820862Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2026-01-07T17:43:54.820875Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2026-01-07T17:43:54.820889Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2026-01-07T17:43:54.820904Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2026-01-07T17:43:54.820919Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2026-01-07T17:43:54.820951Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2026-01-07T17:43:54.820970Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2026-01-07T17:43:54.820986Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2026-01-07T17:43:54.821006Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2026-01-07T17:43:54.821030Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2026-01-07T17:43:54.821046Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2026-01-07T17:43:54.821062Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2026-01-07T17:43:54.821076Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2026-01-07T17:43:54.821092Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2026-01-07T17:43:54.821107Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2026-01-07T17:43:54.821122Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2026-01-07T17:43:54.821136Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2026-01-07T17:43:54.821150Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2026-01-07T17:43:54.821168Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2026-01-07T17:43:54.821185Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2026-01-07T17:43:54.821202Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2026-01-07T17:43:54.821218Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2026-01-07T17:43:54.821234Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2026-01-07T17:43:54.821251Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2026-01-07T17:43:54.821273Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain |90.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> IncrementalBackup::SimpleBackupRestoreWithIndex [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> TableCreation::UpdateTableWithAclRollback [GOOD] >> TestScriptExecutionsUtils::TestRetryPolicyItem [GOOD] >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |90.5%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> ReadOnlyVDisk::TestWrites |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] Test command err: 2026-01-07T17:43:49.467685Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676811328966252:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:49.474197Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:49.906630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:49.906714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:50.034245Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676811328966039:2081] 1767807829400643 != 1767807829400646 2026-01-07T17:43:50.112300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:50.176366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:50.399927Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:50.798644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:50.798672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:50.798680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:50.798774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:50.978573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:53.834772Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:53.840596Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:53.840633Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:53.840648Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:53.843889Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Describe result: PathErrorUnknown 2026-01-07T17:43:53.843916Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Creating table 2026-01-07T17:43:53.843955Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7592676828508836059:2497] Owner: [1:7592676828508836057:2495]. Describe result: PathErrorUnknown 2026-01-07T17:43:53.843963Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7592676828508836059:2497] Owner: [1:7592676828508836057:2495]. Creating table 2026-01-07T17:43:53.843991Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:53.844007Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676828508836059:2497] Owner: [1:7592676828508836057:2495]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:53.844116Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Describe result: PathErrorUnknown 2026-01-07T17:43:53.844121Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Creating table 2026-01-07T17:43:53.844132Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:53.849233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:53.851007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:53.853681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:53.859765Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676828508836059:2497] Owner: [1:7592676828508836057:2495]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-07T17:43:53.859810Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7592676828508836059:2497] Owner: [1:7592676828508836057:2495]. Subscribe on create table tx: 281474976710658 2026-01-07T17:43:53.860089Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-07T17:43:53.860101Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Subscribe on create table tx: 281474976710660 2026-01-07T17:43:53.862326Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-07T17:43:53.862353Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Subscribe on create table tx: 281474976710659 2026-01-07T17:43:53.864134Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7592676828508836059:2497] Owner: [1:7592676828508836057:2495]. Subscribe on tx: 281474976710658 registered 2026-01-07T17:43:53.864143Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Subscribe on tx: 281474976710660 registered 2026-01-07T17:43:53.868320Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Subscribe on tx: 281474976710659 registered 2026-01-07T17:43:54.011741Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-07T17:43:54.086570Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-07T17:43:54.086619Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Column diff is empty, finishing 2026-01-07T17:43:54.090986Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-07T17:43:54.092054Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7592676828508836059:2497] Owner: [1:7592676828508836057:2495]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-07T17:43:54.097827Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:54.098862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:54.100668Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:43:54.100685Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7592676828508836060:2498] Owner: [1:7592676828508836057:2495]. Successful alter request: ExecComplete 2026-01-07T17:43:54.178189Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-07T17:43:54.178281Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Column diff is empty, finishing 2026-01-07T17:43:54.178395Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:54.179426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:54.181722Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676828508836058:2496] Owner: [1:7592676828508836057:2495]. TEvProposeTransactionStatus: { Status: 48 ... OXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [1:7592676858573608065:2969], ActorId: [1:7592676858573608066:2970], TraceId: ExecutionId: d91df715-c1add248-f98b74a1-2f695bd, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-07T17:44:00.187334Z node 1 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [1:7592676858573608062:2968], ActorId: [1:7592676858573608065:2969], TraceId: ExecutionId: d91df715-c1add248-f98b74a1-2f695bd, RequestDatabase: /dc-1, Got response [1:7592676858573608066:2970] SUCCESS 2026-01-07T17:44:00.187382Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [1:7592676858573608061:2967] ActorId: [1:7592676858573608062:2968] Database: /dc-1 ExecutionId: d91df715-c1add248-f98b74a1-2f695bd. Extracted script execution operation [1:7592676858573608066:2970], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [1:7592676832803803641:2682], LeaseGeneration: 0 2026-01-07T17:44:00.187400Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [1:7592676858573608061:2967] ActorId: [1:7592676858573608062:2968] Database: /dc-1 ExecutionId: d91df715-c1add248-f98b74a1-2f695bd. Reply success 2026-01-07T17:44:00.188762Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=NDE5YWJmOWQtZmZjZDk3ZS01MDM3NDQ3Yy1iZmVjMWM5Mw==, workerId: [1:7592676858573608074:2527], local sessions count: 0 2026-01-07T17:44:00.246176Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecrxvznexhwa7rfey2vg4ve", Request has 18444976265869.305472s seconds to be completed 2026-01-07T17:44:00.248705Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecrxvznexhwa7rfey2vg4ve", Created new session, sessionId: ydb://session/3?node_id=1&id=YzE1MWQ1ZjEtNGM1MzkxNmMtYmU0NTY2YjMtZTdkNWRlZA==, workerId: [1:7592676858573608119:2542], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-07T17:44:00.248874Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecrxvznexhwa7rfey2vg4ve 2026-01-07T17:44:00.270396Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecrxw0dahanc0jrc70996kt, Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=YzE1MWQ1ZjEtNGM1MzkxNmMtYmU0NTY2YjMtZTdkNWRlZA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 44, targetId: [1:7592676858573608119:2542] 2026-01-07T17:44:00.270435Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 44 timeout: 600.000000s actor id: [1:7592676858573608122:2987] 2026-01-07T17:44:00.306632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:00.312274Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecrxw0dahanc0jrc70996kt", Forwarded response to sender actor, requestId: 44, sender: [1:7592676858573608121:2543], selfId: [1:7592676811328966263:2256], source: [1:7592676858573608119:2542] --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:00.327908Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. Describe result: PathErrorUnknown 2026-01-07T17:44:00.327933Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. Creating table 2026-01-07T17:44:00.327975Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-07T17:44:00.330878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:00.334122Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710688 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-07T17:44:00.334155Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. Subscribe on create table tx: 281474976710688 2026-01-07T17:44:00.335634Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. Subscribe on tx: 281474976710688 registered 2026-01-07T17:44:00.364370Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. Request: create. Transaction completed: 281474976710688. Doublechecking... 2026-01-07T17:44:00.431585Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-07T17:44:00.431628Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [1:7592676858573608133:2994] Owner: [1:7592676858573608132:2993]. Column diff is empty, finishing 2026-01-07T17:44:00.464161Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecrxw6f5h4t4fbanawr8rhk", Request has 18444976265869.087483s seconds to be completed 2026-01-07T17:44:00.466138Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecrxw6f5h4t4fbanawr8rhk", Created new session, sessionId: ydb://session/3?node_id=1&id=ZWY3NmMxZjMtOTkxMzVlYmYtNTIyNDkzMi0xNWY4YWYzNQ==, workerId: [1:7592676858573608263:2552], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:00.466306Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecrxw6f5h4t4fbanawr8rhk 2026-01-07T17:44:00.504306Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-07T17:44:00.504354Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:444: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2026-01-07T17:44:00.504426Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Created ESchemeOpAlterTable transaction for path: /dc-1/test/test_table 2026-01-07T17:44:00.506140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:44:00.507304Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710689 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:44:00.507329Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Subscribe on create table tx: 281474976710689 2026-01-07T17:44:00.508337Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Subscribe on tx: 281474976710689 registered 2026-01-07T17:44:00.517877Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=ZWY3NmMxZjMtOTkxMzVlYmYtNTIyNDkzMi0xNWY4YWYzNQ==, workerId: [1:7592676858573608263:2552], local sessions count: 1 2026-01-07T17:44:00.530986Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Request: alter. Transaction completed: 281474976710689. Doublechecking... 2026-01-07T17:44:00.619954Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:00.620025Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Column diff is empty, finishing 2026-01-07T17:44:00.620120Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-07T17:44:00.621110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:00.622868Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710690 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:44:00.622882Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [1:7592676858573608269:3099] Owner: [1:7592676858573608268:3098]. Successful alter request: ExecComplete 2026-01-07T17:44:00.651574Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecrxwcbbjn5rg6zmpkz1zyq", Request has 18444976265868.900075s seconds to be completed 2026-01-07T17:44:00.653815Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecrxwcbbjn5rg6zmpkz1zyq", Created new session, sessionId: ydb://session/3?node_id=1&id=OTc1Zjg3NzUtY2M5YjRmZjMtNjU4OGUwZGEtYmY4YjVjZTY=, workerId: [1:7592676858573608314:2561], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:00.726501Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecrxwcbbjn5rg6zmpkz1zyq 2026-01-07T17:44:00.757614Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=YzE1MWQ1ZjEtNGM1MzkxNmMtYmU0NTY2YjMtZTdkNWRlZA==, workerId: [1:7592676858573608119:2542], local sessions count: 1 2026-01-07T17:44:00.760515Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=OTc1Zjg3NzUtY2M5YjRmZjMtNjU4OGUwZGEtYmY4YjVjZTY=, workerId: [1:7592676858573608314:2561], local sessions count: 0 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:17.209993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:17.210093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:17.210119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:17.210144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:17.210182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:17.210211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:17.210261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:17.210309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:17.210894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:17.211118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:17.275387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:17.275447Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:17.290721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:17.291067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:17.291263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:17.298627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:17.299109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:17.299898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:17.300230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:17.303666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:17.303876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:17.305299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:17.305367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:17.305515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:17.305572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:17.305618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:17.305819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:17.440713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.441951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.442001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.442078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.442123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.442160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:17.442194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:30279 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 22B2CAC2-0992-40A2-8BC9-3C7F00FF5B9D amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2026-01-07T17:44:00.797771Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3861:5822], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2026-01-07T17:44:00.798168Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3860:5821] 2026-01-07T17:44:00.798297Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3861:5822], sender# [1:3860:5821], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:30279 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2A5F99D3-D3C6-4FC8-83AB-68C892F33EE8 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2026-01-07T17:44:00.803245Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3861:5822], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2026-01-07T17:44:00.803344Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3861:5822], success# 1, error# , multipart# 1, uploadId# 1 2026-01-07T17:44:00.812262Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:538: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3861:5822], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:30279 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 77DD67CC-20B6-4FE4-B3D0-58F477A2E7FB amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2026-01-07T17:44:00.823084Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:635: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3861:5822], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2026-01-07T17:44:00.823654Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3860:5821], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:44:00.842479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:00.842577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:44:00.842748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:00.842859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:00.842952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:00.843009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:00.843059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:44:00.843105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:44:00.843286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:00.850337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:00.851070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:00.851139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:44:00.851255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:44:00.851288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:00.851327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:44:00.851361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:00.851394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:44:00.851483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 102 2026-01-07T17:44:00.851545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:00.851583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:44:00.851629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:44:00.851754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:44:00.855801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:44:00.855865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3846:5808] TestWaitNotification: OK eventTxId 102 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |90.5%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |90.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> IncrementalBackup::ResetOperationIncrementalBackup [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::NoLocalSessionExecution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:52.620811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:52.763389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:52.782429Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:52.783010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:52.783077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:53.120428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:53.120579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:53.222466Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807829383507 != 1767807829383511 2026-01-07T17:43:53.238834Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:53.289404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:53.412333Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:53.759166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:53.775766Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:53.887634Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:43:53.887720Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:43:53.887849Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:43:54.134105Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:43:54.134205Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:43:54.134854Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:43:54.135413Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:43:54.135892Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:43:54.136064Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:43:54.136290Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:43:54.136586Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:43:54.138377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:54.139642Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:43:54.139733Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:43:54.192012Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:54.196326Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:54.196646Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:43:54.196949Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:43:54.249832Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:54.250691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:43:54.250809Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:43:54.252462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:43:54.252542Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:43:54.252641Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:43:54.253025Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:43:54.253166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:43:54.253263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:43:54.264597Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:43:54.306420Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:43:54.306636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:43:54.306734Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:43:54.306772Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:43:54.306799Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:43:54.306831Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:43:54.307072Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:54.307110Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:54.307427Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:43:54.307525Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:43:54.307589Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:43:54.307630Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:43:54.307673Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:43:54.307712Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:43:54.307748Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:43:54.307780Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:43:54.307824Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:43:54.307947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:54.307981Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:54.308045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:43:54.308461Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:43:54.308509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:43:54.308633Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:43:54.308919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:43:54.309729Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:43:54.309861Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... g event TEvTxProcessing::TEvStreamClearancePending 2026-01-07T17:44:02.427486Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:959:2816], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2026-01-07T17:44:02.427538Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-07T17:44:02.427679Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:02.427702Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:02.427739Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:44:02.427765Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:44:02.427799Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2026-01-07T17:44:02.427828Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2026-01-07T17:44:02.427861Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715659] at 72075186224037888 2026-01-07T17:44:02.427887Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-07T17:44:02.427913Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2026-01-07T17:44:02.427938Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2026-01-07T17:44:02.427967Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2026-01-07T17:44:02.428166Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2026-01-07T17:44:02.428190Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:44:02.428214Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-07T17:44:02.428237Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:44:02.428257Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:44:02.428296Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:44:02.428678Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:975:2831], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-07T17:44:02.428739Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-07T17:44:02.428848Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:975:2831], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2026-01-07T17:44:02.428888Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-07T17:44:02.429238Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:958:2816], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-07T17:44:02.429294Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:959:2816] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:02.429350Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:959:2816] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-07T17:44:02.429422Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2026-01-07T17:44:02.429528Z node 2 :TX_DATASHARD ERROR: read_table_scan.cpp:681: Got scan fatal error: Invalid DyNumber binary representation 2026-01-07T17:44:02.429584Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2026-01-07T17:44:02.429784Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:975:2831], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2026-01-07T17:44:02.429813Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2026-01-07T17:44:02.429847Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:959:2816] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2026-01-07T17:44:02.429915Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:44:02.429949Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715659, at: 72075186224037888 2026-01-07T17:44:02.430047Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:02.430116Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:02.430167Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:44:02.430217Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:44:02.430250Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2026-01-07T17:44:02.430282Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2026-01-07T17:44:02.430322Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2026-01-07T17:44:02.430371Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-07T17:44:02.430405Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2026-01-07T17:44:02.430444Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:44:02.430473Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2026-01-07T17:44:02.430508Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2026-01-07T17:44:02.430537Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:44:02.430580Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:44:02.430627Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:44:02.430679Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-07T17:44:02.430708Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:44:02.430741Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2026-01-07T17:44:02.430770Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:44:02.430801Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-07T17:44:02.430839Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:44:02.430869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:44:02.430925Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:44:02.430959Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2026-01-07T17:44:02.431053Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2026-01-07T17:44:02.431131Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2026-01-07T17:44:02.431213Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:44:02.431477Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:886:2765], Recipient [2:959:2816]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 388 } } CommitVersion { Step: 0 TxId: 281474976715659 } 2026-01-07T17:44:02.431524Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1922: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2026-01-07T17:44:02.431577Z node 2 :TX_PROXY ERROR: read_table_impl.cpp:2920: [ReadTable [2:959:2816] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2026-01-07T17:44:02.431900Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:959:2816], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 2500 TxId: 281474976715658 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> TableCreation::UpdateTableWithAclModification [GOOD] >> TableCreation::UpdateTableAcl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 17707819855800533561 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-07T17:43:53.511560Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2026-01-07T17:43:53.523058Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-07T17:43:54.578137Z 1 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:43:54.579298Z 2 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2026-01-07T17:43:55.082957Z 1 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:43:55.083183Z 2 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-07T17:43:55.460730Z 1 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:43:55.462054Z 2 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:43:55.463246Z 3 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:43:55.463570Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [ac2e1a7406f284db] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-07T17:43:55.470538Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.688 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.688 sample PartId# [1:1:4:0:0:131072:5] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.689 sample PartId# [1:1:4:0:0:131072:4] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.689 sample PartId# [1:1:4:0:0:131072:3] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.689 sample PartId# [1:1:4:0:0:131072:2] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPut{ TimestampMs# 0.69 sample PartId# [1:1:4:0:0:131072:1] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.489 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.555 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 3.023 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 3.08 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.42 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 3.45 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 3.463 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 3.482 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 3.514 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 3.679 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2026-01-07T17:43:56.002596Z 1 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:43:56.002804Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:43:56.002854Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2026-01-07T17:43:58.268310Z 1 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:43:58.268542Z 2 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:43:58.268606Z 3 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:43:58.268659Z 4 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:726] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2026-01-07T17:43:58.600133Z 1 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:43:58.600362Z 2 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:43:58.600423Z 3 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:43:58.600478Z 4 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:726] 2026-01-07T17:43:58.600532Z 5 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:733] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2026-01-07T17:43:58.932974Z 1 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:43:58.933277Z 2 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:43:58.933338Z 3 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:43:58.933386Z 4 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:726] 2026-01-07T17:43:58.933438Z 5 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:733] 2026-01-07T17:43:58.933489Z 6 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:740] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2026-01-07T17:43:59.203879Z 1 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:43:59.204140Z 2 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:43:59.204203Z 3 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:43:59.204259Z 4 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:726] 2026-01-07T17:43:59.204313Z 5 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:733] 2026-01-07T17:43:59.204369Z 6 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:740] 2026-01-07T17:43:59.204421Z 7 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:747] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2026-01-07T17:43:59.521446Z 2 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:43:59.521551Z 3 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:43:59.521603Z 4 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:726] 2026-01-07T17:43:59.521655Z 5 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:733] 2026-01-07T17:43:59.521708Z 6 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:740] 2026-01-07T17:43:59.521761Z 7 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:747] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2026-01-07T17:43:59.892689Z 3 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:43:59.892783Z 4 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:726] 2026-01-07T17:43:59.892836Z 5 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:733] 2026-01-07T17:43:59.892887Z 6 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:740] 2026-01-07T17:43:59.892959Z 7 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:747] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2026-01-07T17:44:00.315721Z 4 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:726] 2026-01-07T17:44:00.315824Z 5 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:733] 2026-01-07T17:44:00.315885Z 6 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:740] 2026-01-07T17:44:00.315940Z 7 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:747] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2026-01-07T17:44:00.740636Z 5 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:733] 2026-01-07T17:44:00.740749Z 6 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:740] 2026-01-07T17:44:00.740810Z 7 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:747] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2026-01-07T17:44:01.971735Z 6 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:740] 2026-01-07T17:44:01.971832Z 7 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:747] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2026-01-07T17:44:02.502350Z 7 00h14m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:747] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:53.733827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:53.847143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:53.868536Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:53.869128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:53.869189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:54.220723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:54.220879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:54.347095Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807829871449 != 1767807829871453 2026-01-07T17:43:54.359179Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:54.408877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:54.502156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:54.859414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:54.876017Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:54.993712Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:43:54.993788Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:43:54.993903Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:43:55.172226Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:43:55.172354Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:43:55.173112Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:43:55.173232Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:43:55.173660Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:43:55.173828Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:43:55.174039Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:43:55.174370Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:43:55.176422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:55.178634Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:43:55.178712Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:43:55.222570Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:55.223574Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:55.223844Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:43:55.224100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:43:55.273151Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:55.274030Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:43:55.274177Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:43:55.276016Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:43:55.276102Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:43:55.276178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:43:55.276596Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:43:55.276742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:43:55.276830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:43:55.289179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:43:55.322257Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:43:55.322523Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:43:55.322643Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:43:55.322677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:43:55.322714Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:43:55.322753Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:43:55.323008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:55.323060Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:55.323462Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:43:55.323566Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:43:55.323657Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:43:55.323697Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:43:55.323757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:43:55.323810Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:43:55.323842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:43:55.323879Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:43:55.323919Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:43:55.324038Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:55.324076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:55.324136Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:43:55.324511Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:43:55.324546Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:43:55.324700Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:43:55.324947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:43:55.324997Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:43:55.325113Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:44:03.662976Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1196:2986], Recipient [2:1086:2897]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-07T17:44:03.663016Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-07T17:44:03.663096Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1196:2986], Recipient [2:1169:2961]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2026-01-07T17:44:03.663125Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1169:2961] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-07T17:44:03.663164Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1169:2961] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1196:2986] 2026-01-07T17:44:03.663278Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2026-01-07T17:44:03.663620Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:44:03.663936Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1196:2986], Recipient [2:1169:2961]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2026-01-07T17:44:03.663979Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1169:2961] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2026-01-07T17:44:03.664008Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1169:2961] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1196:2986] ShardId# 72075186224037890 2026-01-07T17:44:03.664061Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2026-01-07T17:44:03.664143Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1196:2986], Recipient [2:1169:2961]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2026-01-07T17:44:03.664169Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1169:2961] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-07T17:44:03.664480Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1168:2961], Recipient [2:1169:2961]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-07T17:44:03.664515Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1169:2961] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:03.664542Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1169:2961] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1196:2986] 2026-01-07T17:44:03.664599Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2026-01-07T17:44:03.664670Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:44:03.664825Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1196:2986], Recipient [2:1169:2961]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2026-01-07T17:44:03.664853Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1169:2961] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2026-01-07T17:44:03.664880Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1169:2961] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1196:2986] ShardId# 72075186224037890 2026-01-07T17:44:03.664968Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1169:2961] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.016148s execute time: 0.186023s total time: 0.202171s 2026-01-07T17:44:03.665161Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2026-01-07T17:44:03.665212Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2026-01-07T17:44:03.665520Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-07T17:44:03.665552Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715663, at: 72075186224037890 2026-01-07T17:44:03.665777Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1169:2961], Recipient [2:1083:2895]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 2026-01-07T17:44:03.665975Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1086:2897], Recipient [2:1086:2897]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:03.666006Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:03.666049Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T17:44:03.666081Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:44:03.666119Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2026-01-07T17:44:03.666148Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2026-01-07T17:44:03.666183Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2026-01-07T17:44:03.666224Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2026-01-07T17:44:03.666253Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2026-01-07T17:44:03.666282Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2026-01-07T17:44:03.666312Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2026-01-07T17:44:03.666342Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2026-01-07T17:44:03.666367Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2026-01-07T17:44:03.666395Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2026-01-07T17:44:03.666423Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2026-01-07T17:44:03.666469Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2026-01-07T17:44:03.666493Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2026-01-07T17:44:03.666518Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2026-01-07T17:44:03.666546Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:44:03.666588Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-07T17:44:03.666617Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-07T17:44:03.666648Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-07T17:44:03.666697Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:44:03.666730Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2026-01-07T17:44:03.666767Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:44:03.666838Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:44:03.667096Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549569, Sender [2:1169:2961], Recipient [2:1086:2897]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2026-01-07T17:44:03.667133Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2026-01-07T17:44:03.667186Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2026-01-07T17:44:03.667242Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2026-01-07T17:44:03.667401Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287431, Sender [2:1169:2961], Recipient [2:1086:2897]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2026-01-07T17:44:03.667434Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2026-01-07T17:44:03.667563Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1169:2961], Recipient [2:1086:2897]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:53.188666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:53.317486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:53.335905Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:53.336483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:53.336554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:53.775920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:53.776063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:53.878945Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807829206809 != 1767807829206813 2026-01-07T17:43:53.891318Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:53.945818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:54.063687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:54.449950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:54.464590Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:54.572803Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:43:54.572882Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:43:54.573064Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:43:54.711154Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:43:54.711275Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:43:54.711945Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:43:54.712051Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:43:54.712410Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:43:54.712577Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:43:54.712762Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:43:54.713072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:43:54.714882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:54.716118Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:43:54.716190Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:43:54.762113Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:54.763261Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:54.763594Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:43:54.763852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:43:54.809136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:54.809906Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:43:54.810022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:43:54.811847Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:43:54.811936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:43:54.812013Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:43:54.812431Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:43:54.812577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:43:54.812677Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:43:54.823878Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:43:54.878784Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:43:54.879033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:43:54.879185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:43:54.879230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:43:54.879274Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:43:54.879311Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:43:54.879569Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:54.879623Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:54.880035Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:43:54.880147Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:43:54.880220Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:43:54.880260Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:43:54.880314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:43:54.880360Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:43:54.880399Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:43:54.880432Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:43:54.880482Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:43:54.880636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:54.880674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:54.880729Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:43:54.881157Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:43:54.881211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:43:54.881335Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:43:54.881591Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:43:54.881643Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:43:54.881756Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... on unit ReadTableScan 2026-01-07T17:44:03.624469Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2026-01-07T17:44:03.624688Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2026-01-07T17:44:03.624730Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:44:03.624760Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-07T17:44:03.624887Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:44:03.624949Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:44:03.625022Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:44:03.627541Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1074:2891], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-07T17:44:03.627599Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-07T17:44:03.627670Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-07T17:44:03.627973Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:03.628014Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-07T17:44:03.628099Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-07T17:44:03.628288Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:44:03.628384Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-07T17:44:03.628460Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-07T17:44:03.628528Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-07T17:44:03.628921Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:03.628997Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-07T17:44:03.629050Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-07T17:44:03.629120Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:44:03.629184Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-07T17:44:03.629236Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-07T17:44:03.629270Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-07T17:44:03.629510Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:03.629553Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-07T17:44:03.629597Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-07T17:44:03.629652Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:44:03.629710Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-07T17:44:03.629775Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-07T17:44:03.629816Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-07T17:44:03.630082Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:03.630114Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-07T17:44:03.630153Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-07T17:44:03.630217Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-07T17:44:03.630374Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2026-01-07T17:44:03.630420Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1061:2879] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2026-01-07T17:44:03.630494Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-07T17:44:03.630535Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715662, at: 72075186224037888 2026-01-07T17:44:03.630713Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:03.630769Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:03.630837Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:44:03.630880Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:44:03.630952Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2026-01-07T17:44:03.630996Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2026-01-07T17:44:03.631042Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2026-01-07T17:44:03.631104Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2026-01-07T17:44:03.631144Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2026-01-07T17:44:03.631185Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:44:03.631226Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2026-01-07T17:44:03.631264Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2026-01-07T17:44:03.631299Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:44:03.631334Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:44:03.631376Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:44:03.631445Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2026-01-07T17:44:03.631473Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:44:03.631501Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2026-01-07T17:44:03.631537Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:44:03.631573Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-07T17:44:03.631610Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:44:03.631644Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:44:03.631712Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:44:03.631751Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2026-01-07T17:44:03.631799Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:44:03.631903Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:44:03.632076Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2026-01-07T17:44:03.632151Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1061:2879] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.014167s execute time: 0.141965s total time: 0.156132s 2026-01-07T17:44:03.632456Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2879], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] >> TableCreation::CreateOldTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:53.406158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:53.675998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:53.697833Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:53.698297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:53.698355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:53.988628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:53.988790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:54.130712Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807830075621 != 1767807830075625 2026-01-07T17:43:54.143097Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:54.192602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:54.313237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:54.613377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:54.626781Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:54.736697Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:43:54.736777Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:43:54.736915Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:43:54.906325Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:43:54.906460Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:43:54.907299Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:43:54.907411Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:43:54.908002Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:43:54.908177Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:43:54.908538Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:43:54.908862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:43:54.911395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:54.913958Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:43:54.914047Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:43:54.951766Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:54.953702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:54.954116Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:43:54.954426Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:43:55.034044Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:55.034933Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:43:55.035066Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:43:55.037141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:43:55.037243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:43:55.037329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:43:55.037767Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:43:55.037926Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:43:55.038029Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:43:55.049489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:43:55.089695Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:43:55.089932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:43:55.090071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:43:55.090132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:43:55.090177Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:43:55.090217Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:43:55.090514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:55.090570Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:55.090949Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:43:55.091051Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:43:55.091159Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:43:55.091201Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:43:55.091262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:43:55.091311Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:43:55.091348Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:43:55.091383Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:43:55.091442Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:43:55.091602Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:55.091644Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:55.091693Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:43:55.092137Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:43:55.092185Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:43:55.092322Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:43:55.092580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:43:55.092634Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:43:55.092740Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... ead_table_scan.cpp:662: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:44:03.739790Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1546:3256], Recipient [2:1277:3043]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2026-01-07T17:44:03.739827Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1277:3043] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2026-01-07T17:44:03.739859Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1277:3043] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1546:3256] ShardId# 72075186224037896 2026-01-07T17:44:03.739953Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2026-01-07T17:44:03.740025Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1546:3256], Recipient [2:1277:3043]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2026-01-07T17:44:03.740062Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1277:3043] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2026-01-07T17:44:03.740529Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1276:3043], Recipient [2:1277:3043]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-07T17:44:03.740568Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1277:3043] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:03.740598Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1277:3043] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2026-01-07T17:44:03.740665Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2026-01-07T17:44:03.740746Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2026-01-07T17:44:03.740887Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1546:3256], Recipient [2:1277:3043]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2026-01-07T17:44:03.740919Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1277:3043] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2026-01-07T17:44:03.740965Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1277:3043] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2026-01-07T17:44:03.741044Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037896 2026-01-07T17:44:03.741076Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715664, at: 72075186224037896 2026-01-07T17:44:03.741187Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1446:3177], Recipient [2:1446:3177]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:03.741218Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:03.741294Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037896 2026-01-07T17:44:03.741344Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:44:03.741377Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2026-01-07T17:44:03.741403Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2026-01-07T17:44:03.741434Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2026-01-07T17:44:03.741474Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2026-01-07T17:44:03.741505Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2026-01-07T17:44:03.741533Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2026-01-07T17:44:03.741561Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2026-01-07T17:44:03.741595Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2026-01-07T17:44:03.741624Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2026-01-07T17:44:03.741652Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2026-01-07T17:44:03.741682Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2026-01-07T17:44:03.741723Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2026-01-07T17:44:03.741770Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2026-01-07T17:44:03.741804Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2026-01-07T17:44:03.741837Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:44:03.741887Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037896 2026-01-07T17:44:03.741916Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037896 has no attached operations 2026-01-07T17:44:03.741937Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037896 2026-01-07T17:44:03.741978Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037896 2026-01-07T17:44:03.742007Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2026-01-07T17:44:03.742045Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:44:03.742099Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2026-01-07T17:44:03.742299Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1446:3177], Recipient [2:1277:3043]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 275 } } CommitVersion { Step: 0 TxId: 281474976715664 } 2026-01-07T17:44:03.742329Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1277:3043] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2026-01-07T17:44:03.742376Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1277:3043] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.017809s execute time: 0.621300s total time: 0.639109s 2026-01-07T17:44:03.742730Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1081:2895]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-07T17:44:03.742929Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1189:2977]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-07T17:44:03.743599Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1194:2979]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-07T17:44:03.743849Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1444:3175]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-07T17:44:03.744117Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1549:3259], Recipient [2:1336:3093]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:44:03.744152Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:44:03.744189Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037893, clientId# [2:1547:3257], serverId# [2:1549:3259], sessionId# [0:0:0] 2026-01-07T17:44:03.744389Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1336:3093]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-07T17:44:03.744625Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1550:3260], Recipient [2:1339:3095]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:44:03.744656Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:44:03.744688Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1548:3258], serverId# [2:1550:3260], sessionId# [0:0:0] 2026-01-07T17:44:03.744777Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1446:3177]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-07T17:44:03.745002Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1339:3095]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 15849769105289926898 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-07T17:44:03.004765Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-07T17:44:03.010188Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-07T17:44:03.015643Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-07T17:44:03.018946Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2026-01-07T17:44:03.027126Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2026-01-07T17:44:03.030190Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2026-01-07T17:44:03.033096Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2026-01-07T17:44:03.035661Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2026-01-07T17:44:05.350985Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:44:05.351117Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:44:05.351262Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:44:05.351985Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [9a19ad6f427d9939] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-07T17:44:05.353562Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.374 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.375 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.375 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.375 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.375 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.376 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 1.258 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.307 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.437 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.461 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPutResult{ TimestampMs# 1.479 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2026-01-07T17:44:05.355127Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:44:05.355446Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:44:05.356342Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2026-01-07T17:44:05.358155Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:44:05.358997Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:44:05.359670Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { Order ... rderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2026-01-07T17:44:05.368863Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:44:05.369115Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:44:05.369185Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2026-01-07T17:44:05.371268Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:44:05.371441Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2026-01-07T17:44:05.371520Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2026-01-07T17:44:05.373643Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:44:05.373845Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:44:05.373930Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2026-01-07T17:44:05.376283Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2026-01-07T17:44:05.376393Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2026-01-07T17:44:05.376484Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2026-01-07T17:44:05.381695Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-07T17:44:05.381914Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5334:713] 2026-01-07T17:44:05.381964Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5341:720] 2026-01-07T17:44:05.382464Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [ef9d6df38d1033ca] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2026-01-07T17:44:05.382743Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: {BPG72@dsproxy_get.cpp:427} Query history GroupId# 2181038080 HandleClass# FastRead History# THistory { Entries# [ TEvVGet{ TimestampMs# 0.473 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.474 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVGet{ TimestampMs# 0.474 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVGet{ TimestampMs# 0.474 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 6 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVGet{ TimestampMs# 0.474 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 6 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVGet{ TimestampMs# 0.474 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVGet{ TimestampMs# 0.474 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVGet{ TimestampMs# 0.474 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVGetResult{ TimestampMs# 2.813 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# OK } TEvVGetResult{ TimestampMs# 3.398 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVGetResult{ TimestampMs# 3.442 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVGetResult{ TimestampMs# 3.474 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVGetResult{ TimestampMs# 3.505 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVGetResult{ TimestampMs# 3.72 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPut{ TimestampMs# 3.743 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 3.743 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 3.743 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVGetResult{ TimestampMs# 4.357 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# OK } TEvVPut{ TimestampMs# 4.363 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVGetResult{ TimestampMs# 4.395 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# OK } TEvVPut{ TimestampMs# 4.398 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.421 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } PutAcceleration{ TimestampMs# 4.424 } PutAcceleration{ TimestampMs# 4.439 } TEvVPutResult{ TimestampMs# 4.631 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPutResult{ TimestampMs# 4.822 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } 2026-01-07T17:44:05.382848Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5334:713] 2026-01-07T17:44:05.382902Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5341:720] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] >> KqpProxy::DatabasesCacheForServerless >> TableCreation::SimpleUpdateTable [GOOD] >> TableCreation::RollbackTableAcl ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2026-01-07T17:44:05.913760Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.913790Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.913832Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.914201Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.914777Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.930049Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.931240Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.932944Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.932971Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.932994Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.933340Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.933870Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.934322Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.934527Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.934887Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:44:05.935996Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.936024Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.936070Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.936404Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.937121Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.937259Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.937507Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.938191Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.938493Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.938629Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.938686Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-07T17:44:05.939800Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.939832Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.939872Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.940223Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.940633Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.940774Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.941071Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2026-01-07T17:44:05.941930Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:05.942264Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:05.942606Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-07T17:44:05.942870Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) Getting new event 2026-01-07T17:44:05.961117Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.961176Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:44:05.961220Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:44:05.961383Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2026-01-07T17:44:05.961510Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:44:05.961532Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-07T17:44:05.961555Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:44:05.961715Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2026-01-07T17:44:05.961782Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-07T17:44:05.961803Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-07T17:44:05.961832Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:44:05.961910Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2026-01-07T17:44:05.961957Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-07T17:44:05.961983Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-07T17:44:05.962006Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:44:05.962222Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2026-01-07T17:44:05.969611Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.969655Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.969697Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.970327Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.970811Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.971020Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.971316Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-07T17:44:05.972305Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:05.972525Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-07T17:44:05.977300Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-07T17:44:05.977554Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-07T17:44:05.977730Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.977794Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:44:05.977821Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:44:05.977850Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-07T17:44:05.977895Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:44:05.978136Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2026-01-07T17:44:05.978273Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-07T17:44:05.978297Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-07T17:44:05.978329Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-07T17:44:05.978354Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-07T17:44:05.978380Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:44:05.978528Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2026-01-07T17:44:05.980368Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.980395Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.980425Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.980794Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.981258Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.981435Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.981621Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.982678Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:05.983455Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:05.983809Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2026-01-07T17:44:05.983913Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:44:05.984018Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.984049Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:44:05.984072Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2026-01-07T17:44:05.984089Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2026-01-07T17:44:05.984133Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2026-01-07T17:44:05.984161Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-07T17:44:05.984319Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2026-01-07T17:44:05.984462Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 |90.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadOnlyVDisk::TestReads [GOOD] >> KqpProxy::NodeDisconnectedTest [GOOD] >> KqpProxy::PingNotExistedSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:54.596664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:54.720085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:54.739379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:54.739988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:54.740073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:55.028543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:55.028690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:55.104385Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807831389863 != 1767807831389867 2026-01-07T17:43:55.120321Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:55.171017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:55.280425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:55.594274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:55.608885Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:55.729589Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T17:43:55.729680Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:43:55.729841Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T17:43:55.923168Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:43:55.923277Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:43:55.923915Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:43:55.924009Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:43:55.924346Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:43:55.924485Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:43:55.924634Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:43:55.925034Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:43:55.926807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:55.928068Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:43:55.928154Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:43:56.008013Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:56.009308Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:56.009660Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:43:56.010105Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:43:56.190595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:56.191331Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:43:56.191429Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:43:56.201180Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:43:56.201284Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:43:56.201365Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:43:56.201735Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:43:56.201892Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:43:56.201980Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:43:56.213472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:43:56.439255Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:43:56.439476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:43:56.439766Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:43:56.439830Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:43:56.439875Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:43:56.439913Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:43:56.440183Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:56.440235Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:43:56.440617Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:43:56.440728Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:43:56.440811Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:43:56.440851Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:43:56.440902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:43:56.449301Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:43:56.449402Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:43:56.449463Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:43:56.449545Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:43:56.449760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:56.449808Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:56.449852Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:43:56.450340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:43:56.450395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:43:56.450784Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:43:56.451066Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:43:56.451116Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:43:56.451221Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... sactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2026-01-07T17:44:05.032085Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2026-01-07T17:44:05.032105Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1061:2879] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1197:2986] ShardId# 72075186224037890 2026-01-07T17:44:05.032155Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2026-01-07T17:44:05.032214Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1197:2986], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2026-01-07T17:44:05.032240Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-07T17:44:05.032508Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1060:2879], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-07T17:44:05.032539Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:05.032557Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2026-01-07T17:44:05.032587Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-07T17:44:05.032677Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-07T17:44:05.032768Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1197:2986], Recipient [2:1061:2879]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2026-01-07T17:44:05.032786Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2026-01-07T17:44:05.032803Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1061:2879] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1197:2986] ShardId# 72075186224037890 2026-01-07T17:44:05.032836Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2026-01-07T17:44:05.032876Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1197:2986], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2026-01-07T17:44:05.032891Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-07T17:44:05.033100Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1060:2879], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-07T17:44:05.033120Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-07T17:44:05.033137Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2026-01-07T17:44:05.033161Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-07T17:44:05.033210Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-07T17:44:05.033369Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1197:2986], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2026-01-07T17:44:05.033398Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2026-01-07T17:44:05.033417Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1061:2879] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2026-01-07T17:44:05.033452Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-07T17:44:05.033472Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715662, at: 72075186224037890 2026-01-07T17:44:05.033568Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1102:2911], Recipient [2:1102:2911]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:05.033615Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:44:05.033657Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T17:44:05.033678Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-07T17:44:05.033702Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2026-01-07T17:44:05.033720Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2026-01-07T17:44:05.033741Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2026-01-07T17:44:05.033767Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2026-01-07T17:44:05.033787Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2026-01-07T17:44:05.033807Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2026-01-07T17:44:05.033829Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2026-01-07T17:44:05.033851Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2026-01-07T17:44:05.033869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2026-01-07T17:44:05.033886Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2026-01-07T17:44:05.033911Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2026-01-07T17:44:05.033948Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2026-01-07T17:44:05.033970Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2026-01-07T17:44:05.033987Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2026-01-07T17:44:05.034006Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:44:05.034025Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-07T17:44:05.034052Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-07T17:44:05.034074Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-07T17:44:05.034106Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T17:44:05.034125Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2026-01-07T17:44:05.034151Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:44:05.034211Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T17:44:05.034387Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1102:2911], Recipient [2:1061:2879]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 227 } } CommitVersion { Step: 0 TxId: 281474976715662 } 2026-01-07T17:44:05.034409Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2026-01-07T17:44:05.034492Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1061:2879] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.015147s execute time: 0.344857s total time: 0.360004s 2026-01-07T17:44:05.034767Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2879], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-07T17:44:05.034961Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2879], Recipient [2:1099:2909]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-07T17:44:05.035183Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2879], Recipient [2:1102:2911]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> KqpRm::SingleSnapshotByExchanger ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] Test command err: 2026-01-07T17:42:33.690182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:33.690254Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:33.888534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:34.964329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:35.184189Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:35.184736Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:35.185485Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9743120360702904941 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:35.189098Z node 4 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:42:35.221227Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:35.221745Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:35.222007Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13333071673304426863 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:35.278330Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:35.278681Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:35.278901Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmpgZCKcD/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9422921710552431507 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:35.281266Z node 2 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLo ... d: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-07T17:43:04.655273Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:04.655363Z node 55 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:04.762209Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:08.053759Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:08.053853Z node 64 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:08.162125Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:11.227727Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:11.227819Z node 73 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:11.320443Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:12.198687Z node 77 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:43:12.199153Z node 77 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmp1nBPrH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:43:12.199420Z node 77 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmp1nBPrH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmp1nBPrH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2158363383753125464 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:43:14.541791Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:14.541884Z node 82 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:14.646680Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:15.476669Z node 86 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:43:15.477379Z node 86 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmp63MxKt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:43:15.477693Z node 86 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmp63MxKt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018ae/r3tmp/tmp63MxKt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11431540381725627715 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:43:51.262360Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:51.262477Z node 91 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:51.446410Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:53.224983Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:53.225094Z node 92 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:53.330258Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:54.827520Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:54.827605Z node 93 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:54.934430Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:57.047729Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:57.047815Z node 94 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:57.346604Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:59.218249Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:59.218336Z node 95 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:59.334990Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:00.957626Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:00.957725Z node 96 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:01.082698Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:02.306494Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:02.306608Z node 97 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:02.401079Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:03.716353Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:03.716432Z node 98 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:03.804652Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> KqpRm::NotEnoughMemory >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 10703021649544618372 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] |90.6%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRm::ResourceBrokerNotEnoughResources >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental [GOOD] >> IncrementalBackup::E2EMultipleBackupRestoreCycles >> KqpRm::SnapshotSharingByExchanger ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 14635727353176600786 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-07T17:44:03.953122Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-07T17:44:03.957805Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-07T17:44:03.962740Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-07T17:44:03.965576Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2026-01-07T17:44:03.974040Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2026-01-07T17:44:03.976968Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2026-01-07T17:44:03.980133Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2026-01-07T17:44:03.983148Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2026-01-07T17:44:05.164700Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-07T17:44:05.164831Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-07T17:44:05.164990Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-07T17:44:05.165891Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [04589423cc077180] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-07T17:44:05.167613Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.476 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.476 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.477 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.478 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.478 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.478 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 1.551 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.62 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.776 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.807 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPutResult{ TimestampMs# 1.832 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2026-01-07T17:44:05.169681Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-07T17:44:05.169853Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-07T17:44:05.170955Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2026-01-07T17:44:05.172897Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-07T17:44:05.173804Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-07T17:44:05.174705Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderN ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2026-01-07T17:44:07.429568Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-07T17:44:07.429725Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2026-01-07T17:44:07.433199Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-07T17:44:07.434570Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2026-01-07T17:44:07.438903Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2026-01-07T17:44:07.441730Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-07T17:44:07.441828Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2026-01-07T17:44:07.444644Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-07T17:44:07.444733Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2026-01-07T17:44:07.447621Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-07T17:44:07.447713Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2026-01-07T17:44:07.450331Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-07T17:44:07.450563Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2026-01-07T17:44:07.453303Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-07T17:44:07.453412Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2026-01-07T17:44:07.455872Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-07T17:44:07.456006Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> KqpRm::NodesMembershipByExchanger >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit [GOOD] >> KqpRm::NotEnoughMemory [GOOD] >> ScriptExecutionsTest::RestartQueryWithGetOperation [GOOD] >> ScriptExecutionsTest::BackgroundOperationRestart >> KqpRm::SingleTask >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2026-01-07T17:44:08.143329Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:08.143820Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003ba2/r3tmp/tmp5mzlNQ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:08.144271Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003ba2/r3tmp/tmp5mzlNQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003ba2/r3tmp/tmp5mzlNQ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10199668414945880027 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:08.194107Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:08.194429Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:08.214580Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:08.214750Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:08.214814Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:08.214904Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:08.215019Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:08.215054Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:08.215114Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:08.215143Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:08.215264Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.230062Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.230325Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.230405Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.230820Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:08.230968Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:08.231096Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:08.231128Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.231248Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.231484Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:08.231526Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.231597Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.232122Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:08.232215Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.232645Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.237146Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.237291Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.237481Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.237524Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:08.237809Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:08.238004Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:08.238132Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2026-01-07T17:44:08.653449Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:08.654026Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b41/r3tmp/tmpGzjtPb/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:08.661769Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b41/r3tmp/tmpGzjtPb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003b41/r3tmp/tmpGzjtPb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5329530567951380031 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:08.722506Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:08.722842Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:08.747098Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:08.747520Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:08.747589Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:08.747656Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:08.747772Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:08.747806Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:08.747857Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:08.747881Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:08.748012Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.768666Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.769227Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.769344Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2026-01-07T17:44:08.769774Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:08.769923Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:08.770057Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:08.770092Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.770214Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2026-01-07T17:44:08.770415Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:08.770441Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.770524Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.770998Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:08.771093Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.771583Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.772062Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.772195Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.772350Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.772387Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:08.772677Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:08.772829Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:08.772904Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:08.775759Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:472:2352]) priority=0 resources={0, 1000} 2026-01-07T17:44:08.775854Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:08.775911Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:08.775955Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:08.776035Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:472:2352])) 2026-01-07T17:44:08.776245Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2026-01-07T17:44:08.776334Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:472:2352]) priority=0 resources={0, 100000} 2026-01-07T17:44:08.776373Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:08.776413Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task kqp-1-2-2 (2 by [1:472:2352]) 2026-01-07T17:44:08.776458Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task kqp-1-2-2 (2 by [1:472:2352]) 2026-01-07T17:44:08.776546Z node 1 :KQP_RESOURCE_MANAGER NOTICE: kqp_rm_service.cpp:338: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2026-01-07T17:44:08.775669Z } |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> KqpRm::SingleTask [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> KqpRm::SingleSnapshotByExchanger [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup [GOOD] >> IncrementalBackup::ResetVsUpsertColumnStateSerialization >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2026-01-07T17:44:07.367761Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.367795Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.367820Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:07.368374Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:07.379091Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:07.389304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.389757Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:07.391309Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:07.401055Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:07.401327Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-07T17:44:07.401424Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:07.401510Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:07.401552Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-07T17:44:07.401593Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:44:07.401619Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:44:07.403309Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.403341Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.403383Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:07.403689Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:07.404104Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:07.404312Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.404569Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-07T17:44:07.405524Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:07.405690Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-07T17:44:07.406177Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-07T17:44:07.406391Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-07T17:44:07.406511Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:07.406551Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:44:07.406584Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:44:07.406728Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2026-01-07T17:44:07.406841Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:44:07.406868Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-07T17:44:07.406887Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:44:07.406998Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2026-01-07T17:44:07.407046Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-07T17:44:07.407077Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-07T17:44:07.407097Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:44:07.407172Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2026-01-07T17:44:07.407213Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-07T17:44:07.407230Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-07T17:44:07.407248Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:44:07.407331Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2026-01-07T17:44:07.408589Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.408622Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.408654Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:07.408976Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:07.409399Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:07.409566Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:07.412633Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2026-01-07T17:44:07.413738Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:44:07.413992Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-07T17:44:07.416596Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-07T17:44:07.416869Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-07T17:44:07.417039Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:07.417090Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:44:07.417209Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2026-01-07T17:44:07.417321Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:44:07.417344Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:44:07.417403Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2026-01-07T17:44:07.417450Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:44:07.417467Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-07T17:44:07.417543Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2026-01-07T17:44:07.417623Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-07T17:44:07.417645Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:44:09.454391Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2026-01-07T17:44:09.542525Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-07T17:44:09.542620Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-07T17:44:09.542710Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:09.543198Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:09.544087Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:09.544429Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-07T17:44:09.545012Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2026-01-07T17:44:09.642971Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2026-01-07T17:44:09.644072Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:09.645897Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:44:09.648580Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:44:09.649462Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-07T17:44:09.659955Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-07T17:44:09.660800Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-07T17:44:09.661651Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2026-01-07T17:44:09.662477Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2026-01-07T17:44:09.670994Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2026-01-07T17:44:09.671883Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2026-01-07T17:44:09.671977Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2026-01-07T17:44:09.672218Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-07T17:44:09.675919Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2026-01-07T17:44:09.685465Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:09.685764Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:09.685791Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:09.697204Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:09.697781Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:09.697977Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:09.701215Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:09.701949Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2026-01-07T17:44:09.712567Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:09.712640Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:09.712690Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:09.713134Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:09.713541Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:09.713724Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:09.717648Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:09.717914Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:09.721026Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:09.721108Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:44:09.723502Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |90.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2026-01-07T17:44:09.787855Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:09.788384Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b3c/r3tmp/tmp2ofmtO/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:09.789124Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b3c/r3tmp/tmp2ofmtO/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003b3c/r3tmp/tmp2ofmtO/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2812144265377701548 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:09.850403Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:09.850758Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:09.865826Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:09.865984Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:09.866045Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:09.866130Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:09.866259Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:09.866300Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:09.866365Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:09.866394Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:09.866533Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.885117Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.885381Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.885476Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.885889Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:09.886031Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:09.886166Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:09.886326Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.886448Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.886690Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:09.886738Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.886811Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.887384Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:09.887465Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.887985Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.888542Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.888647Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.888780Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.888810Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:09.889126Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:09.889295Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:09.889403Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:09.891964Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:09.892048Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:09.892108Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:09.892152Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:09.892200Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:472:2352])) 2026-01-07T17:44:09.892422Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:09.892632Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-2-1 (1 by [1:472:2352]) (release resources {0, 100}) 2026-01-07T17:44:09.892700Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:472:2352])) 2026-01-07T17:44:09.892803Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |90.6%| [LD] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2026-01-07T17:44:08.077989Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:08.078525Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b42/r3tmp/tmpl5btqb/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:08.079132Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b42/r3tmp/tmpl5btqb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003b42/r3tmp/tmpl5btqb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9322815547405043970 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:08.129673Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:08.129931Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:08.147746Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:08.147873Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:08.147919Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:08.147968Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:08.148065Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:08.148097Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:08.148145Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:08.148164Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:08.148289Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.165312Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.165539Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.165607Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.165913Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:08.166022Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:08.166105Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:08.166128Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.166205Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.166341Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:08.166356Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.166397Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807848 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:08.166757Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:08.166834Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.167240Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.167588Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.167697Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.167798Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:08.167832Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:08.168011Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:08.168097Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:08.168141Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:08.170409Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:08.170469Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:08.170508Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:08.170537Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:08.170602Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:472:2352])) 2026-01-07T17:44:08.170856Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:08.170947Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-1-2 (2 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:08.170986Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-1-2 (2 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:08.171025Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:08.171059Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-1-2 (2 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:08.171108Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:472:2352])) 2026-01-07T17:44:08.171186Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:08.171400Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:08.171518Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807848 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-07T17:44:08.171779Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:09.294295Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:09.294416Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-2-1 (1 by [1:472:2352]) (release resources {0, 100}) 2026-01-07T17:44:09.294475Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300050 (remove task kqp-1-2-1 (1 by [1:472:2352])) 2026-01-07T17:44:09.294527Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100100 2026-01-07T17:44:09.294589Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-07T17:44:09.294637Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-1-2 (2 by [1:472:2352]) (release resources {0, 100}) 2026-01-07T17:44:09.294689Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300050 to 0.100100 (remove task kqp-2-1-2 (2 by [1:472:2352])) 2026-01-07T17:44:09.294746Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-07T17:44:09.294924Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.295045Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.295357Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:10.070140Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2026-01-07T17:44:09.332517Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:09.337190Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b3d/r3tmp/tmpVrbcOz/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:09.337861Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b3d/r3tmp/tmpVrbcOz/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003b3d/r3tmp/tmpVrbcOz/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12283777607852228334 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:09.404239Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:09.404593Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:09.423897Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:09.424050Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:09.424130Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:09.424184Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:09.424302Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:09.424340Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:09.424395Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:09.424418Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:09.424561Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.446770Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.447078Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.447208Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.447624Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:09.447944Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:09.448106Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:09.448151Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.448257Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.448459Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:09.448500Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.448571Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.449135Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:09.449239Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.449837Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.450394Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.450564Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.450771Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.450819Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:09.451189Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:09.451389Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:09.451499Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:10.596590Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:10.596762Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:10.597881Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:10.884081Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit [GOOD] Test command err: 2026-01-07T17:39:48.882010Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592675780361080381:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:48.882058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:49.109077Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.045734s 2026-01-07T17:39:49.481202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:39:49.523260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:49.523394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:49.555915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:49.704674Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:49.712010Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:39:49.989070Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:50.109704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:39:50.109722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:39:50.109729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:39:50.109803Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:50.704334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:39:53.852104Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:39:53.856667Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZmQyZjU4ODMtNTNiYTBkNzItNzYwZWU2OWUtYjU1OGVjODM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZmQyZjU4ODMtNTNiYTBkNzItNzYwZWU2OWUtYjU1OGVjODM= temp_dir_name# e35af870-4f41-bc69-bd26-a08aba653def trace_id# 2026-01-07T17:39:53.867882Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZmQyZjU4ODMtNTNiYTBkNzItNzYwZWU2OWUtYjU1OGVjODM=, ActorId: [1:7592675801835918132:2330], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:39:53.868096Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:39:53.868116Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:39:53.868150Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675801835918130:2328], Start check tables existence, number paths: 2 2026-01-07T17:39:53.873121Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675801835918130:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:39:53.873190Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675801835918130:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:39:53.873217Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7592675801835918130:2328], Successfully finished 2026-01-07T17:39:53.873291Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:39:53.881145Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:39:53.885046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592675780361080381:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:53.885096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:39:53.979872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:39:54.114476Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592675803804385778:2199];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:54.114525Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:54.165641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:54.165760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:54.182698Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2026-01-07T17:39:54.183342Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:39:54.189451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:54.357232Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:54.357341Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:54.365446Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.365602Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.365702Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.365773Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.365833Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.365900Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.365989Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.366087Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.366168Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:39:54.390576Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:54.735461Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:39:54.757070Z node 3 :STATISTICS WARN: tx_init.cpp:298: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2026-01-07T17:39:55.033928Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:39:55.033973Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:39:55.033983Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:39:55.034071Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:39:55.125863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:39:55.125659Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:39:55.190099Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592675806420721261:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:39:55.190205Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:39:55.338106Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-01-07T17:39:55.339025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:55.339106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:55.348906Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:39:55.350410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:39:55.495890Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:39:55.496002Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:39:5 ... d: sample_pool_id, RequestDatabase: /Root/test-dedicated, RequestSessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, Delete session: ydb://session/3?node_id=13&id=MzMyYmE1ZWQtOThhMjIxYjEtNDc0MWRkYmEtZjc0MjMzZDc= 2026-01-07T17:44:06.137591Z node 13 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TDelayRequestQuery] OwnerId: [13:7592676674362227337:2368], ActorId: [13:7592676880520665743:4494], TraceId: sample_pool_id, RequestDatabase: /Root/test-dedicated, RequestSessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, Got response [13:7592676884815633189:4532] UNAVAILABLE 2026-01-07T17:44:06.137761Z node 13 :KQP_WORKLOAD_SERVICE ERROR: pool_handlers_actors.cpp:777: [WorkloadService] [TPoolHandlerActorBase] ActorId: [13:7592676674362227337:2368], DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, failed to delay request UNAVAILABLE, session id: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, issues: {
: Error: Disk space exhausted. Table `/Root/test-dedicated/.metadata/workload_manager/delayed_requests`., code: 2033 subissue: {
: Error: Cannot perform writes: database is out of disk space, code: 2033 } } 2026-01-07T17:44:06.137954Z node 13 :KQP_WORKLOAD_SERVICE WARN: pool_handlers_actors.cpp:323: [WorkloadService] [TPoolHandlerActorBase] ActorId: [13:7592676674362227337:2368], DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, Reply continue error UNAVAILABLE to [13:7592676743081710911:4185], session id: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, issues: {
: Error: Failed to put request in queue, code: 2033 subissue: {
: Error: Disk space exhausted. Table `/Root/test-dedicated/.metadata/workload_manager/delayed_requests`., code: 2033 subissue: {
: Error: Cannot perform writes: database is out of disk space, code: 2033 } } } 2026-01-07T17:44:06.138048Z node 13 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:547} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Database disks are without free space pool_id# sample_pool_id trace_id# 2026-01-07T17:44:06.138101Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:553} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Continue request pool_id# sample_pool_id trace_id# 2026-01-07T17:44:06.138259Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Sending CompileQuery request trace_id# 2026-01-07T17:44:06.138312Z node 13 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:342: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, Duration: 0.000000s, CpuConsumed: 0.000000s, AdjustCpuQuota: 0 2026-01-07T17:44:06.141400Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=13&id=MzMyYmE1ZWQtOThhMjIxYjEtNDc0MWRkYmEtZjc0MjMzZDc=, ActorId: [13:7592676884815633192:4534], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:44:06.141469Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=13&id=MzMyYmE1ZWQtOThhMjIxYjEtNDc0MWRkYmEtZjc0MjMzZDc=, ActorId: [13:7592676884815633192:4534], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:44:06.141511Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=MzMyYmE1ZWQtOThhMjIxYjEtNDc0MWRkYmEtZjc0MjMzZDc=, ActorId: [13:7592676884815633192:4534], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:44:06.141556Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=13&id=MzMyYmE1ZWQtOThhMjIxYjEtNDc0MWRkYmEtZjc0MjMzZDc=, ActorId: [13:7592676884815633192:4534], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:44:06.141654Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=13&id=MzMyYmE1ZWQtOThhMjIxYjEtNDc0MWRkYmEtZjc0MjMzZDc=, ActorId: [13:7592676884815633192:4534], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-07T17:44:06.257414Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136178862862360 trace_id# 2026-01-07T17:44:06.257503Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Sending to Executer span_id_size# 8 trace_id# 2026-01-07T17:44:06.257686Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Created new KQP executer executer_id# [13:7592676884815633209:4185] is_rollback# false trace_id# 2026-01-07T17:44:06.260144Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Forwarded TEvStreamData to [11:7592676745652750760:4316] trace_id# 2026-01-07T17:44:06.266387Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-07T17:44:06.266579Z node 13 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrx1yp0nr3th3qefc5sjb2", SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, Slow query, duration: 32.676319s, status: STATUS_CODE_UNSPECIFIED, user: user@system, results: 0b, text: "SELECT 42;", parameters: 0b 2026-01-07T17:44:06.266674Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, TxInfo status# Committed kind# Pure total_duration# 9.367 server_duration# 9.276 queries_count# 2 trace_id# 2026-01-07T17:44:06.266779Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-07T17:44:06.267088Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: ExecuteState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-07T17:44:06.267568Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: CleanupState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, EndCleanup is_final# true trace_id# 2026-01-07T17:44:06.267706Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: CleanupState, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Sent query response back to proxy proxy_request_id# 384 proxy_id# [13:7592676657182357171:2266] trace_id# 2026-01-07T17:44:06.267756Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: unknown state, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:44:06.267908Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=13&id=ZDYwNGY2MDgtMTgwNDI2ZjctYWJiNDIzMWEtZjU5MThjMjE=, ActorId: [13:7592676743081710911:4185], ActorState: unknown state, LegacyTraceId: 01kecrx1yp0nr3th3qefc5sjb2, Session actor destroyed trace_id# 2026-01-07T17:44:06.330914Z node 13 :SYSTEM_VIEWS WARN: sysview_service.cpp:813: Summary delivery problem: service id# [13:7592676657182357019:2135], processor id# 72075186224037891, database# /Root/test-dedicated 2026-01-07T17:44:06.341024Z node 11 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2026-01-07T17:44:06.341600Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:44:06.341776Z node 11 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 13 2026-01-07T17:44:06.342004Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:44:06.379934Z node 11 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=11&id=NzM0MWFiMDItZTUzMzI2MGMtZTIyYzAxYjEtYzU5MDlmZGE=, ActorId: [11:7592676659753402921:2331], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:44:06.380039Z node 11 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=11&id=NzM0MWFiMDItZTUzMzI2MGMtZTIyYzAxYjEtYzU5MDlmZGE=, ActorId: [11:7592676659753402921:2331], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:44:06.380087Z node 11 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=11&id=NzM0MWFiMDItZTUzMzI2MGMtZTIyYzAxYjEtYzU5MDlmZGE=, ActorId: [11:7592676659753402921:2331], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:44:06.380136Z node 11 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=11&id=NzM0MWFiMDItZTUzMzI2MGMtZTIyYzAxYjEtYzU5MDlmZGE=, ActorId: [11:7592676659753402921:2331], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:44:06.380266Z node 11 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=11&id=NzM0MWFiMDItZTUzMzI2MGMtZTIyYzAxYjEtYzU5MDlmZGE=, ActorId: [11:7592676659753402921:2331], ActorState: unknown state, Session actor destroyed trace_id# |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2026-01-07T17:44:09.027689Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:09.030835Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b3e/r3tmp/tmpdHr3hd/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:09.031617Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b3e/r3tmp/tmpdHr3hd/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003b3e/r3tmp/tmpdHr3hd/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9051667370189099118 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:09.095652Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:09.096002Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:09.131134Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:09.131290Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:09.131351Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:09.131416Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:09.131534Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:09.131578Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:09.131654Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:09.131679Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:09.131822Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.157507Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.157766Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.157840Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.158240Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:09.158372Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:09.158514Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:09.158547Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.158674Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.158861Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:09.158899Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:09.158980Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807849 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:09.159473Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:09.159565Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.161494Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.162124Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.162272Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.162424Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:09.162465Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:09.162742Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:09.162913Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:09.162987Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:10.329440Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:10.329564Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:10.329703Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:10.329756Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:10.329804Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:10.329841Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:10.329895Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:472:2352])) 2026-01-07T17:44:10.330126Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:10.330198Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-1-2 (2 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:10.330253Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-1-2 (2 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:10.330326Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:10.330400Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-1-2 (2 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:10.330470Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:472:2352])) 2026-01-07T17:44:10.330564Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:10.330728Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:10.330867Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807850 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-07T17:44:10.331221Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:10.633238Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:10.633496Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [2:474:2102]) priority=0 resources={0, 100} 2026-01-07T17:44:10.633556Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [2:474:2102]) to queue queue_kqp_resource_manager 2026-01-07T17:44:10.633613Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:474:2102]) from queue queue_kqp_resource_manager 2026-01-07T17:44:10.633668Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [2:474:2102]) to queue queue_kqp_resource_manager 2026-01-07T17:44:10.633723Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:474:2102])) 2026-01-07T17:44:10.633852Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:10.633924Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-2-2 (2 by [2:474:2102]) priority=0 resources={0, 100} 2026-01-07T17:44:10.633963Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-2-2 (2 by [2:474:2102]) to queue queue_kqp_resource_manager 2026-01-07T17:44:10.633998Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:474:2102]) from queue queue_kqp_resource_manager 2026-01-07T17:44:10.634036Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-2-2 (2 by [2:474:2102]) to queue queue_kqp_resource_manager 2026-01-07T17:44:10.634071Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:474:2102])) 2026-01-07T17:44:10.634151Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:10.634221Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:10.634370Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807851 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-07T17:44:10.634709Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:10.920316Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:10.920445Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:472:2352]) (release resources {0, 100}) 2026-01-07T17:44:10.920489Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [1:472:2352])) 2026-01-07T17:44:10.920533Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2026-01-07T17:44:10.920575Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-07T17:44:10.920613Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-1-2 (2 by [1:472:2352]) (release resources {0, 100}) 2026-01-07T17:44:10.920664Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-1-2 (2 by [1:472:2352])) 2026-01-07T17:44:10.920702Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-07T17:44:10.920760Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:10.920881Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807852 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:10.921203Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:11.226193Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:11.226354Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [2:474:2102]) (release resources {0, 100}) 2026-01-07T17:44:11.226410Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:474:2102])) 2026-01-07T17:44:11.226450Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2026-01-07T17:44:11.226498Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-07T17:44:11.226540Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-2-2 (2 by [2:474:2102]) (release resources {0, 100}) 2026-01-07T17:44:11.226575Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:474:2102])) 2026-01-07T17:44:11.226607Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-07T17:44:11.226663Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:11.226840Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807853 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:11.227151Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:11.555475Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> ScriptExecutionsTest::TestSecureScriptExecutions >> TableCreation::UpdateTableAcl [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl >> TableCreation::CreateOldTable [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |90.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> CompressExecutor::TestReorderedExecutor >> TestSqsTopicHttpProxy::TestSendMessageTooBig >> TableCreation::RollbackTableAcl [GOOD] |90.7%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain |90.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::UpdateTableAcl [GOOD] Test command err: 2026-01-07T17:43:45.329911Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676794342649900:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:45.330130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:45.767828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:45.767931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:45.782959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:45.997093Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676794342649678:2081] 1767807825283681 != 1767807825283684 2026-01-07T17:43:46.002859Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:46.331008Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:46.409725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:46.409754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:46.409765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:46.409852Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:46.518540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:46.533914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:43:49.482547Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:49.547532Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:49.547588Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:49.547629Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:49.565959Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.565984Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Creating table 2026-01-07T17:43:49.566048Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:49.566301Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.566313Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Creating table 2026-01-07T17:43:49.566331Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:49.569269Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7592676811522519707:2494] Owner: [1:7592676811522519704:2492]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.569282Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7592676811522519707:2494] Owner: [1:7592676811522519704:2492]. Creating table 2026-01-07T17:43:49.569308Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676811522519707:2494] Owner: [1:7592676811522519704:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:49.583300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.586641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.588625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.614284Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-07T17:43:49.614369Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Subscribe on create table tx: 281474976710659 2026-01-07T17:43:49.616261Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676811522519707:2494] Owner: [1:7592676811522519704:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-07T17:43:49.616281Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7592676811522519707:2494] Owner: [1:7592676811522519704:2492]. Subscribe on create table tx: 281474976710660 2026-01-07T17:43:49.617007Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-07T17:43:49.617032Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Subscribe on create table tx: 281474976710658 2026-01-07T17:43:49.619598Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Subscribe on tx: 281474976710659 registered 2026-01-07T17:43:49.622106Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7592676811522519707:2494] Owner: [1:7592676811522519704:2492]. Subscribe on tx: 281474976710660 registered 2026-01-07T17:43:49.622114Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Subscribe on tx: 281474976710658 registered 2026-01-07T17:43:49.827130Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-07T17:43:49.892843Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-07T17:43:49.893755Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7592676811522519707:2494] Owner: [1:7592676811522519704:2492]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-07T17:43:49.907136Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-07T17:43:49.907183Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Column diff is empty, finishing 2026-01-07T17:43:49.908140Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:49.909172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.910769Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:43:49.910783Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7592676811522519708:2495] Owner: [1:7592676811522519704:2492]. Successful alter request: ExecComplete 2026-01-07T17:43:49.945299Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-07T17:43:49.945406Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Column diff is empty, finishing 2026-01-07T17:43:49.945506Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676811522519706:2493] Owner: [1:7592676811522519704:2492]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:49.946675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.948398Z node 1 :K ... (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-07T17:44:11.730165Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=YzIzYTM4MjItZDU4ZGEzYzItODc4MDU2ZGUtZTE4ZDYwZWM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7592676907437297387:2515] 2026-01-07T17:44:11.730205Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7592676907437297389:2945] 2026-01-07T17:44:11.735530Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 40, sender: [3:7592676907437297388:2516], selfId: [3:7592676881667492130:2260], source: [3:7592676907437297387:2515] 2026-01-07T17:44:11.735848Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676907437297384:2943], ActorId: [3:7592676907437297385:2944], TraceId: ExecutionId: abcd52b8-cf22c01e-264bab73-60913344, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YzIzYTM4MjItZDU4ZGEzYzItODc4MDU2ZGUtZTE4ZDYwZWM=, TxId: 2026-01-07T17:44:11.736430Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676907437297384:2943], ActorId: [3:7592676907437297385:2944], TraceId: ExecutionId: abcd52b8-cf22c01e-264bab73-60913344, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YzIzYTM4MjItZDU4ZGEzYzItODc4MDU2ZGUtZTE4ZDYwZWM=, TxId: 2026-01-07T17:44:11.736465Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676907437297384:2943], ActorId: [3:7592676907437297385:2944], TraceId: ExecutionId: abcd52b8-cf22c01e-264bab73-60913344, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-07T17:44:11.736545Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676907437297383:2942], ActorId: [3:7592676907437297384:2943], TraceId: ExecutionId: abcd52b8-cf22c01e-264bab73-60913344, RequestDatabase: /dc-1, Got response [3:7592676907437297385:2944] SUCCESS 2026-01-07T17:44:11.736609Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7592676907437297382:2941] ActorId: [3:7592676907437297383:2942] Database: /dc-1 ExecutionId: abcd52b8-cf22c01e-264bab73-60913344. Extracted script execution operation [3:7592676907437297385:2944], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7592676898847362164:2666], LeaseGeneration: 0 2026-01-07T17:44:11.736637Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7592676907437297382:2941] ActorId: [3:7592676907437297383:2942] Database: /dc-1 ExecutionId: abcd52b8-cf22c01e-264bab73-60913344. Reply success 2026-01-07T17:44:11.736741Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YzIzYTM4MjItZDU4ZGEzYzItODc4MDU2ZGUtZTE4ZDYwZWM=, workerId: [3:7592676907437297387:2515], local sessions count: 0 2026-01-07T17:44:11.759801Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry77fdtwr51t2nm019vgq", Request has 18444976265857.791844s seconds to be completed 2026-01-07T17:44:11.762149Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry77fdtwr51t2nm019vgq", Created new session, sessionId: ydb://session/3?node_id=3&id=MTZjNmUxMjAtM2Q3ZTc2Yy1mNTE5ZTIzYy1iZjUwNjk5OQ==, workerId: [3:7592676907437297417:2529], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-07T17:44:11.762365Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry77fdtwr51t2nm019vgq 2026-01-07T17:44:11.770059Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecry77sa0pxnf1em6rs2mfk, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=MTZjNmUxMjAtM2Q3ZTc2Yy1mNTE5ZTIzYy1iZjUwNjk5OQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7592676907437297417:2529] 2026-01-07T17:44:11.770099Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7592676907437297421:2953] 2026-01-07T17:44:11.781221Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:11.785431Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecry77sa0pxnf1em6rs2mfk", Forwarded response to sender actor, requestId: 42, sender: [3:7592676907437297419:2530], selfId: [3:7592676881667492130:2260], source: [3:7592676907437297417:2529] --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:11.793292Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. Describe result: PathErrorUnknown 2026-01-07T17:44:11.793317Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. Creating table 2026-01-07T17:44:11.793355Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-07T17:44:11.796486Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:11.797518Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-07T17:44:11.797544Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. Subscribe on create table tx: 281474976710687 2026-01-07T17:44:11.799872Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. Subscribe on tx: 281474976710687 registered 2026-01-07T17:44:11.823640Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. Request: create. Transaction completed: 281474976710687. Doublechecking... 2026-01-07T17:44:11.912294Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-07T17:44:11.912337Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676907437297431:2960] Owner: [3:7592676907437297430:2959]. Column diff is empty, finishing 2026-01-07T17:44:11.924541Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry7cmej3dzjtvqqqpvmje", Request has 18444976265857.627110s seconds to be completed 2026-01-07T17:44:11.927014Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry7cmej3dzjtvqqqpvmje", Created new session, sessionId: ydb://session/3?node_id=3&id=OTI2ZDI1NjAtYzI4YTA3MmItYWYyNDBhNy0xY2JlZWI3Yg==, workerId: [3:7592676907437297560:2539], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:11.927231Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry7cmej3dzjtvqqqpvmje 2026-01-07T17:44:11.946510Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676907437297566:3064] Owner: [3:7592676907437297565:3063]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-07T17:44:11.946553Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676907437297566:3064] Owner: [3:7592676907437297565:3063]. Column diff is empty, finishing 2026-01-07T17:44:11.946638Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7592676907437297566:3064] Owner: [3:7592676907437297565:3063]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-07T17:44:11.947406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:11.948747Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7592676907437297566:3064] Owner: [3:7592676907437297565:3063]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710688 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:44:11.948784Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7592676907437297566:3064] Owner: [3:7592676907437297565:3063]. Successful alter request: ExecComplete 2026-01-07T17:44:11.951527Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=OTI2ZDI1NjAtYzI4YTA3MmItYWYyNDBhNy0xY2JlZWI3Yg==, workerId: [3:7592676907437297560:2539], local sessions count: 1 2026-01-07T17:44:11.953786Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry7dhf0978feqftjqm4c7", Request has 18444976265857.597856s seconds to be completed 2026-01-07T17:44:11.956208Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry7dhf0978feqftjqm4c7", Created new session, sessionId: ydb://session/3?node_id=3&id=ZWE1NzgyNTktZTM2OTk1MjktMWQ2MzBkNzUtNTZhYjBmMTM=, workerId: [3:7592676907437297579:2543], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:11.956413Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry7dhf0978feqftjqm4c7 2026-01-07T17:44:11.979129Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZWE1NzgyNTktZTM2OTk1MjktMWQ2MzBkNzUtNTZhYjBmMTM=, workerId: [3:7592676907437297579:2543], local sessions count: 1 2026-01-07T17:44:11.981453Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MTZjNmUxMjAtM2Q3ZTc2Yy1mNTE5ZTIzYy1iZjUwNjk5OQ==, workerId: [3:7592676907437297417:2529], local sessions count: 0 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] Test command err: 2026-01-07T17:43:45.673356Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676794607550723:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:45.673548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:46.009644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:46.009748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:46.016523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:46.116875Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:46.545626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:46.545649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:46.545663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:46.545758Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:46.632910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:46.641058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:43:46.685628Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:49.498695Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:49.565770Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:49.565810Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:49.565831Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:49.570867Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7592676811787420701:2493] Owner: [1:7592676811787420699:2491]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.570891Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7592676811787420701:2493] Owner: [1:7592676811787420699:2491]. Creating table 2026-01-07T17:43:49.570927Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676811787420701:2493] Owner: [1:7592676811787420699:2491]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:49.570982Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.570985Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Creating table 2026-01-07T17:43:49.570997Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:49.571867Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.571874Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Creating table 2026-01-07T17:43:49.571894Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:49.596122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.611364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.620627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.631350Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-07T17:43:49.631455Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Subscribe on create table tx: 281474976710660 2026-01-07T17:43:49.633089Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676811787420701:2493] Owner: [1:7592676811787420699:2491]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-07T17:43:49.633120Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7592676811787420701:2493] Owner: [1:7592676811787420699:2491]. Subscribe on create table tx: 281474976710658 2026-01-07T17:43:49.633197Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-07T17:43:49.633231Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Subscribe on create table tx: 281474976710659 2026-01-07T17:43:49.635043Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Subscribe on tx: 281474976710659 registered 2026-01-07T17:43:49.635844Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Subscribe on tx: 281474976710660 registered 2026-01-07T17:43:49.635856Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7592676811787420701:2493] Owner: [1:7592676811787420699:2491]. Subscribe on tx: 281474976710658 registered 2026-01-07T17:43:49.826499Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-07T17:43:49.903340Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-07T17:43:49.919589Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7592676811787420701:2493] Owner: [1:7592676811787420699:2491]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-07T17:43:49.922357Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-07T17:43:49.922457Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Column diff is empty, finishing 2026-01-07T17:43:49.923381Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:49.924367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.925335Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:43:49.925349Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_executions updater. SelfId: [1:7592676811787420700:2492] Owner: [1:7592676811787420699:2491]. Successful alter request: ExecComplete 2026-01-07T17:43:49.957752Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-07T17:43:49.957807Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Column diff is empty, finishing 2026-01-07T17:43:49.957875Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:49.958729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.960895Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676811787420702:2494] Owner: [1:7592676811787420699:2491]. TEvProposeTransactionStatus: { Status: 48 TxId: 28 ... ed 2026-01-07T17:44:12.149694Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676910130569381:2955] Owner: [3:7592676910130569380:2954]. Subscribe on tx: 281474976710691 registered 2026-01-07T17:44:12.149704Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676910130569375:2949] Owner: [3:7592676910130569374:2948]. Subscribe on tx: 281474976710691 registered 2026-01-07T17:44:12.149782Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7592676910130569385:2959] Owner: [3:7592676910130569384:2958]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710694 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 46 PathCreateTxId: 281474976710691 } 2026-01-07T17:44:12.149799Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7592676910130569385:2959] Owner: [3:7592676910130569384:2958]. Subscribe on create table tx: 281474976710691 2026-01-07T17:44:12.150017Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7592676910130569391:2965] Owner: [3:7592676910130569390:2964]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710695 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 46 PathCreateTxId: 281474976710691 } 2026-01-07T17:44:12.150035Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7592676910130569391:2965] Owner: [3:7592676910130569390:2964]. Subscribe on create table tx: 281474976710691 2026-01-07T17:44:12.150360Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676910130569387:2961] Owner: [3:7592676910130569386:2960]. Subscribe on tx: 281474976710691 registered 2026-01-07T17:44:12.150468Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676910130569389:2963] Owner: [3:7592676910130569388:2962]. Subscribe on tx: 281474976710691 registered 2026-01-07T17:44:12.151163Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676910130569383:2957] Owner: [3:7592676910130569382:2956]. Subscribe on tx: 281474976710691 registered 2026-01-07T17:44:12.151173Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676910130569393:2967] Owner: [3:7592676910130569392:2966]. Subscribe on tx: 281474976710691 registered 2026-01-07T17:44:12.151530Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676910130569385:2959] Owner: [3:7592676910130569384:2958]. Subscribe on tx: 281474976710691 registered 2026-01-07T17:44:12.152222Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676910130569391:2965] Owner: [3:7592676910130569390:2964]. Subscribe on tx: 281474976710691 registered 2026-01-07T17:44:12.180527Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569379:2953] Owner: [3:7592676910130569378:2952]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180583Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569385:2959] Owner: [3:7592676910130569384:2958]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180627Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569391:2965] Owner: [3:7592676910130569390:2964]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180647Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569381:2955] Owner: [3:7592676910130569380:2954]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180649Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569375:2949] Owner: [3:7592676910130569374:2948]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180674Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569387:2961] Owner: [3:7592676910130569386:2960]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180688Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569393:2967] Owner: [3:7592676910130569392:2966]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180702Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569383:2957] Owner: [3:7592676910130569382:2956]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180729Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569377:2951] Owner: [3:7592676910130569376:2950]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.180746Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676910130569389:2963] Owner: [3:7592676910130569388:2962]. Request: create. Transaction completed: 281474976710691. Doublechecking... 2026-01-07T17:44:12.232121Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569393:2967] Owner: [3:7592676910130569392:2966]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.232171Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569393:2967] Owner: [3:7592676910130569392:2966]. Column diff is empty, finishing 2026-01-07T17:44:12.234131Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569377:2951] Owner: [3:7592676910130569376:2950]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.234152Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569377:2951] Owner: [3:7592676910130569376:2950]. Column diff is empty, finishing 2026-01-07T17:44:12.235600Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569387:2961] Owner: [3:7592676910130569386:2960]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.235619Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569387:2961] Owner: [3:7592676910130569386:2960]. Column diff is empty, finishing 2026-01-07T17:44:12.244485Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569391:2965] Owner: [3:7592676910130569390:2964]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.244524Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569391:2965] Owner: [3:7592676910130569390:2964]. Column diff is empty, finishing 2026-01-07T17:44:12.246451Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569375:2949] Owner: [3:7592676910130569374:2948]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.246496Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569375:2949] Owner: [3:7592676910130569374:2948]. Column diff is empty, finishing 2026-01-07T17:44:12.250338Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569379:2953] Owner: [3:7592676910130569378:2952]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.250372Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569379:2953] Owner: [3:7592676910130569378:2952]. Column diff is empty, finishing 2026-01-07T17:44:12.253159Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569385:2959] Owner: [3:7592676910130569384:2958]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.253190Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569385:2959] Owner: [3:7592676910130569384:2958]. Column diff is empty, finishing 2026-01-07T17:44:12.257700Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569389:2963] Owner: [3:7592676910130569388:2962]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.257730Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569389:2963] Owner: [3:7592676910130569388:2962]. Column diff is empty, finishing 2026-01-07T17:44:12.261778Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569383:2957] Owner: [3:7592676910130569382:2956]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.261803Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569383:2957] Owner: [3:7592676910130569382:2956]. Column diff is empty, finishing 2026-01-07T17:44:12.276889Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676910130569381:2955] Owner: [3:7592676910130569380:2954]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:12.276948Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676910130569381:2955] Owner: [3:7592676910130569380:2954]. Column diff is empty, finishing 2026-01-07T17:44:12.312917Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry7rr3h51f3c4pf4azv3b", Request has 18444976265857.238732s seconds to be completed 2026-01-07T17:44:12.315538Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry7rr3h51f3c4pf4azv3b", Created new session, sessionId: ydb://session/3?node_id=3&id=YmMwNzUyMWItZmE1MmRiYjItOTZiNGNjZTQtZGZkZDliMmU=, workerId: [3:7592676910130569612:2530], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:12.315758Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry7rr3h51f3c4pf4azv3b 2026-01-07T17:44:12.343515Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YmMwNzUyMWItZmE1MmRiYjItOTZiNGNjZTQtZGZkZDliMmU=, workerId: [3:7592676910130569612:2530], local sessions count: 1 2026-01-07T17:44:12.348284Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MzE1MWU1YzYtNGUzYjM3MDktOTAyYzZiZjMtN2IzMjkwZWQ=, workerId: [3:7592676910130569360:2520], local sessions count: 0 >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2026-01-07T17:43:45.477045Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676796918651735:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:45.477202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:45.682341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:45.682454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:45.889106Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676796918651518:2081] 1767807825424540 != 1767807825424543 2026-01-07T17:43:45.907726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:45.949438Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:46.250386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:46.250413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:46.250424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:46.250509Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:46.299983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:46.473870Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:49.204116Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:49.209840Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:49.209877Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:49.209918Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:49.221631Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.221656Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Creating table 2026-01-07T17:43:49.221710Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:49.221856Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.221864Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Creating table 2026-01-07T17:43:49.221895Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:49.222013Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7592676814098521535:2490] Owner: [1:7592676814098521532:2489]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.222018Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7592676814098521535:2490] Owner: [1:7592676814098521532:2489]. Creating table 2026-01-07T17:43:49.222046Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676814098521535:2490] Owner: [1:7592676814098521532:2489]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:49.227512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.229371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.231923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.242693Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-07T17:43:49.242761Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Subscribe on create table tx: 281474976715658 2026-01-07T17:43:49.242921Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676814098521535:2490] Owner: [1:7592676814098521532:2489]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-07T17:43:49.242936Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7592676814098521535:2490] Owner: [1:7592676814098521532:2489]. Subscribe on create table tx: 281474976715660 2026-01-07T17:43:49.245080Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-07T17:43:49.245122Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Subscribe on create table tx: 281474976715659 2026-01-07T17:43:49.247245Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Subscribe on tx: 281474976715658 registered 2026-01-07T17:43:49.247257Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7592676814098521535:2490] Owner: [1:7592676814098521532:2489]. Subscribe on tx: 281474976715660 registered 2026-01-07T17:43:49.247266Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Subscribe on tx: 281474976715659 registered 2026-01-07T17:43:49.373014Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Request: create. Transaction completed: 281474976715658. Doublechecking... 2026-01-07T17:43:49.424698Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Request: create. Transaction completed: 281474976715659. Doublechecking... 2026-01-07T17:43:49.436445Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7592676814098521535:2490] Owner: [1:7592676814098521532:2489]. Request: create. Transaction completed: 281474976715660. Doublechecking... 2026-01-07T17:43:49.445584Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-07T17:43:49.445629Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Column diff is empty, finishing 2026-01-07T17:43:49.446796Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:49.447941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.457526Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:43:49.457594Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7592676814098521537:2492] Owner: [1:7592676814098521532:2489]. Successful alter request: ExecComplete 2026-01-07T17:43:49.522241Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-07T17:43:49.522282Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Column diff is empty, finishing 2026-01-07T17:43:49.522389Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:49.523411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.525113Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676814098521536:2491] Owner: [1:7592676814098521532:2489]. TEvProposeTra ... ExecutionId: 5fe7b5d3-63bb4a7e-8ce91c62-3dc73875, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-01-07T17:44:13.709788Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976265855.841846s seconds to be completed 2026-01-07T17:44:13.712355Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=3&id=OTg4ZjViY2QtNGYwZGY4ZTctYjdhYmZmMmItN2FiY2U4YWI=, workerId: [3:7592676914923608909:2505], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-07T17:44:13.712558Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-07T17:44:13.712952Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676914923608906:2938], ActorId: [3:7592676914923608907:2939], TraceId: ExecutionId: 5fe7b5d3-63bb4a7e-8ce91c62-3dc73875, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=3&id=OTg4ZjViY2QtNGYwZGY4ZTctYjdhYmZmMmItN2FiY2U4YWI=, TxId: , text: -- TGetScriptExecutionOperationQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT run_script_actor_id, operation_status, execution_status, finalization_status, query_text, syntax, execution_mode, result_set_metas, plan, plan_compressed, plan_compression_method, issues, transient_issues, stats, ast, ast_compressed, ast_compression_method, graph_compressed IS NOT NULL AS has_graph, retry_state, user_token FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-07T17:44:13.717265Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTg4ZjViY2QtNGYwZGY4ZTctYjdhYmZmMmItN2FiY2U4YWI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 38, targetId: [3:7592676914923608909:2505] 2026-01-07T17:44:13.717326Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 38 timeout: 300.000000s actor id: [3:7592676914923608911:2940] 2026-01-07T17:44:13.741827Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 38, sender: [3:7592676914923608910:2506], selfId: [3:7592676880563869013:2200], source: [3:7592676914923608909:2505] 2026-01-07T17:44:13.742215Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676914923608906:2938], ActorId: [3:7592676914923608907:2939], TraceId: ExecutionId: 5fe7b5d3-63bb4a7e-8ce91c62-3dc73875, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=OTg4ZjViY2QtNGYwZGY4ZTctYjdhYmZmMmItN2FiY2U4YWI=, TxId: 2026-01-07T17:44:13.742895Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676914923608906:2938], ActorId: [3:7592676914923608907:2939], TraceId: ExecutionId: 5fe7b5d3-63bb4a7e-8ce91c62-3dc73875, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=OTg4ZjViY2QtNGYwZGY4ZTctYjdhYmZmMmItN2FiY2U4YWI=, TxId: 2026-01-07T17:44:13.742929Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676914923608906:2938], ActorId: [3:7592676914923608907:2939], TraceId: ExecutionId: 5fe7b5d3-63bb4a7e-8ce91c62-3dc73875, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-07T17:44:13.743010Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676914923608905:2937], ActorId: [3:7592676914923608906:2938], TraceId: ExecutionId: 5fe7b5d3-63bb4a7e-8ce91c62-3dc73875, RequestDatabase: /dc-1, Got response [3:7592676914923608907:2939] SUCCESS 2026-01-07T17:44:13.743060Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7592676914923608904:2936] ActorId: [3:7592676914923608905:2937] Database: /dc-1 ExecutionId: 5fe7b5d3-63bb4a7e-8ce91c62-3dc73875. Extracted script execution operation [3:7592676914923608907:2939], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7592676897743739127:2676], LeaseGeneration: 0 2026-01-07T17:44:13.743084Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7592676914923608904:2936] ActorId: [3:7592676914923608905:2937] Database: /dc-1 ExecutionId: 5fe7b5d3-63bb4a7e-8ce91c62-3dc73875. Reply success 2026-01-07T17:44:13.743526Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=OTg4ZjViY2QtNGYwZGY4ZTctYjdhYmZmMmItN2FiY2U4YWI=, workerId: [3:7592676914923608909:2505], local sessions count: 0 2026-01-07T17:44:13.775108Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry96e6yny5y6xhptdy75j", Request has 18444976265855.776541s seconds to be completed 2026-01-07T17:44:13.777638Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry96e6yny5y6xhptdy75j", Created new session, sessionId: ydb://session/3?node_id=3&id=MWVjMTdmNWEtNWYyODA1NC1kNjEyMTBiYi0zODhmYzlmNA==, workerId: [3:7592676914923608940:2519], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-07T17:44:13.777885Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry96e6yny5y6xhptdy75j 2026-01-07T17:44:13.794765Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecry9727ysd48fgaq04zezb, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=MWVjMTdmNWEtNWYyODA1NC1kNjEyMTBiYi0zODhmYzlmNA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7592676914923608940:2519] 2026-01-07T17:44:13.794807Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 600.000000s actor id: [3:7592676914923608943:2949] 2026-01-07T17:44:13.806292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:13.812898Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecry9727ysd48fgaq04zezb", Forwarded response to sender actor, requestId: 40, sender: [3:7592676914923608942:2520], selfId: [3:7592676880563869013:2200], source: [3:7592676914923608940:2519] --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:13.821837Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. Describe result: PathErrorUnknown 2026-01-07T17:44:13.821861Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. Creating table 2026-01-07T17:44:13.821901Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-07T17:44:13.827454Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:13.828592Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715686 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-07T17:44:13.828629Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. Subscribe on create table tx: 281474976715686 2026-01-07T17:44:13.831183Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. Subscribe on tx: 281474976715686 registered 2026-01-07T17:44:13.860977Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. Request: create. Transaction completed: 281474976715686. Doublechecking... 2026-01-07T17:44:13.930493Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:13.930569Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676914923608970:2972] Owner: [3:7592676914923608969:2971]. Column diff is empty, finishing 2026-01-07T17:44:13.931206Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676914923609080:3056] Owner: [3:7592676914923609079:3055]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:44:13.931245Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676914923609080:3056] Owner: [3:7592676914923609079:3055]. Column diff is empty, finishing 2026-01-07T17:44:13.957690Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry9c58q531vjdftdvzagr", Request has 18444976265855.593963s seconds to be completed 2026-01-07T17:44:13.960827Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry9c58q531vjdftdvzagr", Created new session, sessionId: ydb://session/3?node_id=3&id=YTNjMzc3NTItYzBjMzFiOTUtNjQzNTBjZmItZmExMTFmZGI=, workerId: [3:7592676914923609086:2529], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:13.961292Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry9c58q531vjdftdvzagr 2026-01-07T17:44:13.996522Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YTNjMzc3NTItYzBjMzFiOTUtNjQzNTBjZmItZmExMTFmZGI=, workerId: [3:7592676914923609086:2529], local sessions count: 1 2026-01-07T17:44:14.016219Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MWVjMTdmNWEtNWYyODA1NC1kNjEyMTBiYi0zODhmYzlmNA==, workerId: [3:7592676914923608940:2519], local sessions count: 0 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> IncrementalBackup::ResetVsUpsertColumnStateSerialization [GOOD] >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore >> TCacheTest::MigrationDeletedPathNavigate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::RollbackTableAcl [GOOD] Test command err: 2026-01-07T17:43:45.520664Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676795347130713:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:45.520833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:45.990632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:45.990745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:46.066654Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676795347130588:2081] 1767807825510269 != 1767807825510272 2026-01-07T17:43:46.085420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:46.149949Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:46.516442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:46.516467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:46.516476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:46.516554Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:46.560850Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:46.628641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:46.636297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:43:50.525144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676795347130713:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:50.528889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:43:50.559835Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:50.591090Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:50.591140Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:50.591162Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:50.594481Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Describe result: PathErrorUnknown 2026-01-07T17:43:50.594522Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Creating table 2026-01-07T17:43:50.594588Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:50.594993Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7592676816821967918:2498] Owner: [1:7592676816821967913:2494]. Describe result: PathErrorUnknown 2026-01-07T17:43:50.595014Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7592676816821967918:2498] Owner: [1:7592676816821967913:2494]. Creating table 2026-01-07T17:43:50.595048Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676816821967918:2498] Owner: [1:7592676816821967913:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:50.595587Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Describe result: PathErrorUnknown 2026-01-07T17:43:50.595600Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Creating table 2026-01-07T17:43:50.595626Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:50.639252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:50.646316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:50.648349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:50.674176Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-07T17:43:50.674252Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Subscribe on create table tx: 281474976710658 2026-01-07T17:43:50.674425Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-07T17:43:50.674448Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Subscribe on create table tx: 281474976710660 2026-01-07T17:43:50.677049Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676816821967918:2498] Owner: [1:7592676816821967913:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-07T17:43:50.677087Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7592676816821967918:2498] Owner: [1:7592676816821967913:2494]. Subscribe on create table tx: 281474976710659 2026-01-07T17:43:50.679440Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Subscribe on tx: 281474976710658 registered 2026-01-07T17:43:50.679462Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Subscribe on tx: 281474976710660 registered 2026-01-07T17:43:50.680438Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7592676816821967918:2498] Owner: [1:7592676816821967913:2494]. Subscribe on tx: 281474976710659 registered 2026-01-07T17:43:50.909386Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-07T17:43:50.975819Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-07T17:43:50.980167Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-07T17:43:50.980246Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Column diff is empty, finishing 2026-01-07T17:43:50.981309Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:50.985220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:50.993311Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:43:50.993336Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_executions updater. SelfId: [1:7592676816821967915:2495] Owner: [1:7592676816821967913:2494]. Successful alter request: ExecComplete 2026-01-07T17:43:51.002145Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7592676816821967918:2498] Owner: [1:7592676816821967913:2494]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-07T17:43:51.057263Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-07T17:43:51.057297Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Column diff is empty, finishing 2026-01-07T17:43:51.057374Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676816821967917:2497] Owner: [1:7592676816821967913:2494]. Created ESch ... D (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-07T17:44:13.523685Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZjIxNTNjODAtOTQzYzE1OTgtNGRmYjFlNWEtY2NhYzMyMDk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7592676915458943445:2515] 2026-01-07T17:44:13.523741Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7592676915458943447:2950] 2026-01-07T17:44:13.545660Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 40, sender: [3:7592676915458943446:2516], selfId: [3:7592676885394170873:2243], source: [3:7592676915458943445:2515] 2026-01-07T17:44:13.546623Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676915458943442:2948], ActorId: [3:7592676915458943443:2949], TraceId: ExecutionId: 5dadb6e1-527c0690-5f3a7fc2-ead2f5bc, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZjIxNTNjODAtOTQzYzE1OTgtNGRmYjFlNWEtY2NhYzMyMDk=, TxId: 2026-01-07T17:44:13.547271Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676915458943442:2948], ActorId: [3:7592676915458943443:2949], TraceId: ExecutionId: 5dadb6e1-527c0690-5f3a7fc2-ead2f5bc, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZjIxNTNjODAtOTQzYzE1OTgtNGRmYjFlNWEtY2NhYzMyMDk=, TxId: 2026-01-07T17:44:13.547300Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676915458943442:2948], ActorId: [3:7592676915458943443:2949], TraceId: ExecutionId: 5dadb6e1-527c0690-5f3a7fc2-ead2f5bc, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-07T17:44:13.547444Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676915458943441:2947], ActorId: [3:7592676915458943442:2948], TraceId: ExecutionId: 5dadb6e1-527c0690-5f3a7fc2-ead2f5bc, RequestDatabase: /dc-1, Got response [3:7592676915458943443:2949] SUCCESS 2026-01-07T17:44:13.547510Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7592676915458943440:2946] ActorId: [3:7592676915458943441:2947] Database: /dc-1 ExecutionId: 5dadb6e1-527c0690-5f3a7fc2-ead2f5bc. Extracted script execution operation [3:7592676915458943443:2949], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7592676902574040925:2669], LeaseGeneration: 0 2026-01-07T17:44:13.547537Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7592676915458943440:2946] ActorId: [3:7592676915458943441:2947] Database: /dc-1 ExecutionId: 5dadb6e1-527c0690-5f3a7fc2-ead2f5bc. Reply success 2026-01-07T17:44:13.548247Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZjIxNTNjODAtOTQzYzE1OTgtNGRmYjFlNWEtY2NhYzMyMDk=, workerId: [3:7592676915458943445:2515], local sessions count: 0 2026-01-07T17:44:13.593700Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry90r5svpq2v1a1sdnkcy", Request has 18444976265855.957949s seconds to be completed 2026-01-07T17:44:13.596485Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry90r5svpq2v1a1sdnkcy", Created new session, sessionId: ydb://session/3?node_id=3&id=YzJjY2I2OTctZGQ5MmMxYzEtMzQyNTBmYTktZTQyZWFlY2U=, workerId: [3:7592676915458943476:2529], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-07T17:44:13.596728Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry90r5svpq2v1a1sdnkcy 2026-01-07T17:44:13.633540Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecry920cyze11pmexv4tksc, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=YzJjY2I2OTctZGQ5MmMxYzEtMzQyNTBmYTktZTQyZWFlY2U=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7592676915458943476:2529] 2026-01-07T17:44:13.633598Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7592676915458943479:2959] 2026-01-07T17:44:13.662123Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:13.685080Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecry920cyze11pmexv4tksc", Forwarded response to sender actor, requestId: 42, sender: [3:7592676915458943478:2530], selfId: [3:7592676885394170873:2243], source: [3:7592676915458943476:2529] --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:13.700905Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. Describe result: PathErrorUnknown 2026-01-07T17:44:13.700951Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. Creating table 2026-01-07T17:44:13.701005Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-07T17:44:13.717646Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:13.721415Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-07T17:44:13.721466Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. Subscribe on create table tx: 281474976710687 2026-01-07T17:44:13.734233Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. Subscribe on tx: 281474976710687 registered 2026-01-07T17:44:13.843127Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. Request: create. Transaction completed: 281474976710687. Doublechecking... 2026-01-07T17:44:13.909362Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-07T17:44:13.909408Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676915458943490:2966] Owner: [3:7592676915458943489:2965]. Column diff is empty, finishing 2026-01-07T17:44:13.940001Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry9bkakjs4056p144h5ek", Request has 18444976265855.611644s seconds to be completed 2026-01-07T17:44:13.942471Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry9bkakjs4056p144h5ek", Created new session, sessionId: ydb://session/3?node_id=3&id=YzRiMDBiMzgtYTQ2OGEwMjUtM2I5YjhmZmQtNTUxMWMxODI=, workerId: [3:7592676915458943622:2539], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:13.942679Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry9bkakjs4056p144h5ek 2026-01-07T17:44:13.961789Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7592676915458943633:3073] Owner: [3:7592676915458943632:3072]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-07T17:44:13.961822Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7592676915458943633:3073] Owner: [3:7592676915458943632:3072]. Column diff is empty, finishing 2026-01-07T17:44:13.961906Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7592676915458943633:3073] Owner: [3:7592676915458943632:3072]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-07T17:44:13.962887Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:13.964758Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7592676915458943633:3073] Owner: [3:7592676915458943632:3072]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710688 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:44:13.964774Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7592676915458943633:3073] Owner: [3:7592676915458943632:3072]. Successful alter request: ExecComplete 2026-01-07T17:44:13.967152Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YzRiMDBiMzgtYTQ2OGEwMjUtM2I5YjhmZmQtNTUxMWMxODI=, workerId: [3:7592676915458943622:2539], local sessions count: 1 2026-01-07T17:44:13.978126Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry9cs27mfjghh9ebhns14", Request has 18444976265855.573517s seconds to be completed 2026-01-07T17:44:13.980591Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry9cs27mfjghh9ebhns14", Created new session, sessionId: ydb://session/3?node_id=3&id=OGJjMDhmZWEtYTU0ZDBjZTEtYjZiZDA4LTZjNDIzZjll, workerId: [3:7592676915458943647:2546], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:13.980811Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry9cs27mfjghh9ebhns14 2026-01-07T17:44:14.005433Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=OGJjMDhmZWEtYTU0ZDBjZTEtYjZiZDA4LTZjNDIzZjll, workerId: [3:7592676915458943647:2546], local sessions count: 1 2026-01-07T17:44:14.010887Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YzJjY2I2OTctZGQ5MmMxYzEtMzQyNTBmYTktZTQyZWFlY2U=, workerId: [3:7592676915458943476:2529], local sessions count: 0 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2026-01-07T17:43:46.159603Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676802468007322:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:46.159682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:46.573131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:46.650588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:46.650682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:46.829166Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676802468007299:2081] 1767807826158621 != 1767807826158624 2026-01-07T17:43:46.861473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:46.880532Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:46.916891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.175514Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:47.255081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:47.260046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:43:50.324770Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:50.338389Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2026-01-07T17:43:50.342031Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2026-01-07T17:43:50.342166Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:50.342192Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:50.342221Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:50.342455Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 2, sender: [1:7592676806762975391:2481], selfId: [1:7592676802468007561:2266], source: [1:7592676802468007561:2266] 2026-01-07T17:43:50.347106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676819647877315:2304], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:50.347396Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2026-01-07T17:43:50.347432Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2026-01-07T17:43:50.347805Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [1:7592676806762975391:2481], selfId: [1:7592676802468007561:2266], source: [1:7592676802468007561:2266] 2026-01-07T17:43:50.347872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:50.349543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676819647877339:2305], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:50.349755Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2026-01-07T17:43:50.349790Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq } 2026-01-07T17:43:50.349868Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 4, sender: [1:7592676806762975391:2481], selfId: [1:7592676802468007561:2266], source: [1:7592676802468007561:2266] 2026-01-07T17:43:50.349942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:50.352819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676819647877341:2306], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:50.352910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... waiting for SysViewsRoster update finished 2026-01-07T17:43:54.522950Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:54.529354Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:54.533823Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:54.534187Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:54.534375Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:54.798473Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:54.798578Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:54.819058Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767807831313985 != 1767807831313989 2026-01-07T17:43:54.822521Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:54.872440Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:54.941209Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:55.230734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:55.244009Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:55.245118Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:279:2326], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:43:55.245413Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:279:2326], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:43:55.246878Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:279:2326], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-07T17:43:55.246994Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:279:2326], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:840:2735] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:43:55.247119Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:279:2326], cacheItem# { Subscriber: { Subscriber: [2:840:2735] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivat ... 316:3323], IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [5:7592676920403394382:2365] 2026-01-07T17:44:14.372205Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [5:7592676920403394386:3368] 2026-01-07T17:44:14.372244Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=5&id=OTdjOWQyNTEtMTcyMTY5ZS05YmJjYWM3NC03NjM4YTcwNw==, workerId: [5:7592676920403394323:2354], local sessions count: 1 2026-01-07T17:44:14.372262Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976265855.179360s seconds to be completed 2026-01-07T17:44:14.373751Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592676920403394387:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:14.373829Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:14.375492Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=5&id=YmNlODE5NDgtMTkyNTY4ZWYtMzU2NmRmZWEtZDgzYWY4NWE=, workerId: [5:7592676920403394397:2370], database: /Root, longSession: 1, local sessions count: 2 2026-01-07T17:44:14.375608Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592676920403394393:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:14.375705Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:14.375740Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-07T17:44:14.376283Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptProgressActor] OwnerId: [5:7592676920403394316:3323], ActorId: [5:7592676920403394384:3367], TraceId: ExecutionId: d1335145-ffac5d5-ae6f2c5b-c25cdbe4, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=YmNlODE5NDgtMTkyNTY4ZWYtMzU2NmRmZWEtZDgzYWY4NWE=, TxId: , text: -- TScriptProgressActor::OnRunQuery DECLARE $execution_id AS Text; DECLARE $database AS Text; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $execution_status AS Int32; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, execution_status = $execution_status WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2026-01-07T17:44:14.376499Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592676920403394399:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:14.377101Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=YmNlODE5NDgtMTkyNTY4ZWYtMzU2NmRmZWEtZDgzYWY4NWE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [5:7592676920403394397:2370] 2026-01-07T17:44:14.377136Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [5:7592676920403394404:3378] 2026-01-07T17:44:14.380665Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715666:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:14.410260Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecry9t917wc905rp8ym0bes", Request has 18444976265855.141379s seconds to be completed 2026-01-07T17:44:14.412292Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecry9t917wc905rp8ym0bes", Created new session, sessionId: ydb://session/3?node_id=5&id=YTMwZGYzYTQtMzJkYTM2MDktOTM5MWJiNTMtNjQzNjhhZmU=, workerId: [5:7592676920403394466:2378], database: /Root, longSession: 1, local sessions count: 3 2026-01-07T17:44:14.412433Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecry9t917wc905rp8ym0bes 2026-01-07T17:44:14.412543Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592676920403394402:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715666 completed, doublechecking } 2026-01-07T17:44:14.423837Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecry9tqb4204v7tscc10z5s, Database: /Root, SessionId: ydb://session/3?node_id=5&id=YTMwZGYzYTQtMzJkYTM2MDktOTM5MWJiNTMtNjQzNjhhZmU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [5:7592676920403394466:2378] 2026-01-07T17:44:14.423885Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [5:7592676920403394470:3424] 2026-01-07T17:44:14.440626Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7592676920403394487:3429], for# user@builtin, access# DescribeSchema 2026-01-07T17:44:14.440677Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7592676920403394487:3429], for# user@builtin, access# DescribeSchema 2026-01-07T17:44:14.442871Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7592676920403394477:2384], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:14.445483Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=YTMwZGYzYTQtMzJkYTM2MDktOTM5MWJiNTMtNjQzNjhhZmU=, ActorId: [5:7592676920403394466:2378], ActorState: ExecuteState, LegacyTraceId: 01kecry9tqb4204v7tscc10z5s, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:14.445908Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecry9tqb4204v7tscc10z5s", Forwarded response to sender actor, requestId: 11, sender: [5:7592676920403394469:2379], selfId: [5:7592676903223523494:2269], source: [5:7592676920403394466:2378] 2026-01-07T17:44:14.507685Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592676920403394493:3433] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:14.592539Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecry97b1k3hj50emxb85n3d", Forwarded response to sender actor, requestId: 7, sender: [5:7592676920403394316:3323], selfId: [5:7592676903223523494:2269], source: [5:7592676920403394382:2365] 2026-01-07T17:44:14.593222Z node 5 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7592676920403394316:3323], ActorId: [5:7592676920403394524:3446], TraceId: ExecutionId: d1335145-ffac5d5-ae6f2c5b-c25cdbe4, RequestDatabase: /Root, LeaseGeneration: 1, Starting query actor #1 [5:7592676920403394525:3447] 2026-01-07T17:44:14.593258Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7592676920403394524:3446], ActorId: [5:7592676920403394525:3447], TraceId: ExecutionId: d1335145-ffac5d5-ae6f2c5b-c25cdbe4, RequestDatabase: /Root, LeaseGeneration: 1, Bootstrap. Database: /Root, IsSystemUser: 1, run create session 2026-01-07T17:44:14.593414Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976265854.958220s seconds to be completed 2026-01-07T17:44:14.595812Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=5&id=ZWYzZjA5NzMtZjVjMWRjOTMtM2M3ZjIwMWEtMTc2MWJjOTQ=, workerId: [5:7592676920403394530:2391], database: /Root, longSession: 1, local sessions count: 4 2026-01-07T17:44:14.596046Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-07T17:44:14.596443Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7592676920403394524:3446], ActorId: [5:7592676920403394525:3447], TraceId: ExecutionId: d1335145-ffac5d5-ae6f2c5b-c25cdbe4, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=ZWYzZjA5NzMtZjVjMWRjOTMtM2M3ZjIwMWEtMTc2MWJjOTQ=, TxId: , text: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2026-01-07T17:44:14.596830Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZWYzZjA5NzMtZjVjMWRjOTMtM2M3ZjIwMWEtMTc2MWJjOTQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 13, targetId: [5:7592676920403394530:2391] 2026-01-07T17:44:14.596868Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 13 timeout: 300.000000s actor id: [5:7592676920403394532:3448] |90.7%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2026-01-07T17:43:44.696969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:44.697040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... .985997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720669 2026-01-07T17:43:46.986263Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pool_classifiers', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.986383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720670 2026-01-07T17:43:46.986507Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/nodes', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.986615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720671 2026-01-07T17:43:46.986688Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_hour', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.986845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720672 2026-01-07T17:43:46.986891Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.987014Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720673 2026-01-07T17:43:46.987167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720674 2026-01-07T17:43:46.987218Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.987256Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_hour', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.987444Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720675 2026-01-07T17:43:46.987520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720676 2026-01-07T17:43:46.987563Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_hour', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.987691Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_hour', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.987779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720677 2026-01-07T17:43:46.987831Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720678 2026-01-07T17:43:46.987879Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/pg_tables', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.987914Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_duration_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.987954Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720679 2026-01-07T17:43:46.988002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720680 2026-01-07T17:43:46.988064Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_group_members', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.988113Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_users', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.988203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720681 2026-01-07T17:43:46.988260Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pools', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.988299Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720682 2026-01-07T17:43:46.988336Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/streaming_queries', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.988432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720683 2026-01-07T17:43:46.988471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720684 2026-01-07T17:43:46.988517Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_permissions', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.988560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720685 2026-01-07T17:43:46.988623Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_minute', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.988765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720686 2026-01-07T17:43:46.988883Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_effective_permissions', reason: Schema is in ReadOnly mode 2026-01-07T17:43:46.988968Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:692:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_groups', reason: Schema is in ReadOnly mode TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2026-01-07T17:43:46.989597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5805: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:43:46.989650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5805: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:43:46.990057Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2026-01-07T17:43:46.990205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:573) TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:43:47.009537Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2026-01-07T17:43:47.010075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-07T17:43:47.066194Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409549 OwnerIdx: 4 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:778:2067] recipient: [2:775:2669] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:778:2067] recipient: [2:775:2669] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:780:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:780:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:782:2673] sender: [2:783:2067] recipient: [2:775:2669] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2026-01-07T17:43:49.585318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:43:49.585409Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:49.644763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:43:49.644845Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TestSqsTopicHttpProxy::TestDeleteMessage >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty >> ReadOnlyVDisk::TestSync [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::DropBackupCollectionSqlNonExistent |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2026-01-07T17:43:44.987524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:44.987603Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-07T17:43:46.195860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 10032858258227097344 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2026-01-07T17:44:01.748752Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8829:948] 2026-01-07T17:44:01.749081Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8836:955] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-07T17:44:03.706790Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8843:962] 2026-01-07T17:44:03.706946Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8836:955] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-07T17:44:08.632099Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8857:976] 2026-01-07T17:44:08.632214Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8850:969] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-07T17:44:11.235422Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8864:983] 2026-01-07T17:44:11.235531Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8857:976] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2026-01-07T17:44:13.989013Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8871:990] 2026-01-07T17:44:13.989105Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8864:983] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2026-01-07T17:44:17.010623Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8871:990] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse |90.7%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |90.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2026-01-07T17:43:45.830222Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676794275243476:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:45.830276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:45.919224Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.017534s 2026-01-07T17:43:46.047637Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676801918507649:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:46.049964Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:46.063932Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592676802332925792:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:46.064046Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:46.114990Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592676801504415274:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:46.115087Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:46.145876Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592676802533824980:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:46.145944Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:46.202939Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:46.210868Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:46.773009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:46.772649Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:46.771808Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:46.776696Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:46.777564Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:46.861115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:46.982660Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:47.008269Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.028996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.029166Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.042410Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.085223Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:47.141303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:47.141404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:47.142560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:47.142615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:47.144533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:47.144586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:47.145083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:47.145129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:47.145949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:47.145982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:47.150971Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.152345Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-07T17:43:47.152393Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:43:47.152424Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:43:47.152557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:47.153607Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:47.153651Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:47.165961Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:47.166095Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:47.167479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:47.167752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:47.167126Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:47.174849Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:47.185481Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:47.196668Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:47.199678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:47.210131Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:43:47.213903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:47.484743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.522786Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.577032Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.616723Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.625053Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:47.972744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311 ... 820Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:11.654954Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:11.657667Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-07T17:44:11.659587Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:11.666543Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-01-07T17:44:11.697544Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:11.697647Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:11.698084Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.698246Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.698337Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.698426Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.698525Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.698614Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.698731Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.698857Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.698965Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:44:11.701759Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:11.815509Z node 10 :STATISTICS WARN: tx_init.cpp:298: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2026-01-07T17:44:11.815669Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:12.061085Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7592676889049795773:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:12.061186Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:44:12.076301Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:12.074471Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:12.074500Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:12.074509Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:12.074596Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:12.127911Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:44:12.327513Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:12.510178Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7592676912844090371:2966], Database: /Root/test-serverless, Start database fetching 2026-01-07T17:44:12.510393Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7592676912844090371:2966], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2026-01-07T17:44:12.659880Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:15.971209Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:44:15.971299Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:44:15.971330Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7592676926708647896:2359], Start check tables existence, number paths: 2 2026-01-07T17:44:15.971976Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:44:15.974605Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7592676926708647896:2359], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:44:15.974694Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7592676926708647896:2359], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:44:15.974733Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7592676926708647896:2359], Successfully finished 2026-01-07T17:44:15.974814Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:44:15.974854Z node 11 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:44:16.069047Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592676909528777754:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:16.069146Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:44:16.128948Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-07T17:44:16.130122Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-07T17:44:16.130141Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-07T17:44:16.130438Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7592676930023959713:2389], Start check tables existence, number paths: 2 2026-01-07T17:44:16.134093Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7592676930023959713:2389], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-07T17:44:16.134171Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7592676930023959713:2389], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-07T17:44:16.134204Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7592676930023959713:2389], Successfully finished 2026-01-07T17:44:16.134273Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-07T17:44:16.134577Z node 10 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-07T17:44:16.639799Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7592676908549121647:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:16.639904Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:44:17.413242Z node 9 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.111140s 2026-01-07T17:44:17.413306Z node 9 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.111244s 2026-01-07T17:44:17.662300Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2026-01-07T17:44:17.664048Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:44:17.664222Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2026-01-07T17:44:17.664421Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:44:17.672542Z node 9 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=9&id=M2NmNWQzN2EtNDZjYmJmZmItMTQ1NWQ3ODUtN2FkNTc5ODI=, ActorId: [9:7592676901934698945:2331], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:44:17.672614Z node 9 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=9&id=M2NmNWQzN2EtNDZjYmJmZmItMTQ1NWQ3ODUtN2FkNTc5ODI=, ActorId: [9:7592676901934698945:2331], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-07T17:44:17.672647Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=9&id=M2NmNWQzN2EtNDZjYmJmZmItMTQ1NWQ3ODUtN2FkNTc5ODI=, ActorId: [9:7592676901934698945:2331], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-07T17:44:17.672668Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=9&id=M2NmNWQzN2EtNDZjYmJmZmItMTQ1NWQ3ODUtN2FkNTc5ODI=, ActorId: [9:7592676901934698945:2331], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-07T17:44:17.672729Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=9&id=M2NmNWQzN2EtNDZjYmJmZmItMTQ1NWQ3ODUtN2FkNTc5ODI=, ActorId: [9:7592676901934698945:2331], ActorState: unknown state, Session actor destroyed trace_id# |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageTooBig [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBatchLong |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> TSyncBrokerTests::ShouldProcessAfterRelease >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue >> TSyncNeighborsTests::SerDes3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2026-01-07T17:42:37.532193Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676503938975002:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:37.533401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:37.604028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:38.125000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:42:38.170405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:38.170539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:38.171968Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:1581) connection closed with error: Connection refused 2026-01-07T17:42:38.173551Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:42:38.174953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:38.212720Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:38.419028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:42:38.537150Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:41.745241Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676521516350243:2223];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:41.745326Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:41.788206Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:42.006720Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:42:42.022470Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:42.023607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:42.023714Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:42.033311Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592676521516350049:2081] 1767807761706724 != 1767807761706727 2026-01-07T17:42:42.063173Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:42.063576Z node 2 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:24651) connection closed with error: Connection refused 2026-01-07T17:42:42.075495Z node 2 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:42:42.246931Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:42:42.701936Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:46.169547Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:42:46.169697Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:46.397052Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:42:46.418977Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:46.437067Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592676541225587883:2081] 1767807766102192 != 1767807766102195 2026-01-07T17:42:46.439602Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:46.439671Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:46.439983Z node 3 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:10374) connection closed with error: Connection refused 2026-01-07T17:42:46.442744Z node 3 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:42:46.475916Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:46.873220Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:42:47.145136Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:50.732776Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:50.753013Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:42:50.862933Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:50.863020Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:50.867478Z node 4 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:9678) connection closed with error: Connection refused 2026-01-07T17:42:50.868576Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:50.877430Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592676560311491561:2081] 1767807770608351 != 1767807770608354 2026-01-07T17:42:50.882405Z node 4 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:42:50.914838Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:51.031276Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:42:51.609557Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:55.335155Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592676579683344966:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:55.338203Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:55.416022Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:42:55.511698Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:55.550780Z node 5 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:29063) connection closed with error: Connection refused 2026-01-07T17:42:55.554133Z node 5 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:42:55.559693Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:55.559791Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:55.567586Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:55.707697Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:42:56.339583Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:59.555031Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592676600637644296:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:42:59.557318Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:42:59.567327Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:59.681179Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592676600637644185:2081] 1767807779547822 != 1767807779547825 2026-01-07T17:42:59.688193Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:59.704233Z node 6 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#32,[::1]:13304) connection closed with error: Connection refused 2026-01-07T17:42:59.705175Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#7205759403796889 ... t execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:46.213223Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:52.160573Z node 19 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7592676824647889085:2132];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:52.160618Z node 19 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:52.194804Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:52.355898Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:52.384995Z node 19 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [19:7592676824647888993:2081] 1767807832157544 != 1767807832157547 2026-01-07T17:43:52.398274Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:52.398368Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:52.415001Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:52.426230Z node 19 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:21277) connection closed with error: Connection refused 2026-01-07T17:43:52.432175Z node 19 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:43:52.433033Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:52.631461Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:53.181503Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:59.137342Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7592676855215332358:2253];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:59.137404Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:59.169690Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:59.285033Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:59.511021Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:59.511149Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:59.512873Z node 20 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#32,[::1]:31542) connection closed with error: Connection refused 2026-01-07T17:43:59.513088Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:59.522009Z node 20 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:43:59.552018Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:59.564175Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:59.856352Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:00.097725Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:05.009558Z node 21 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7592676881542994192:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:05.009638Z node 21 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:05.030454Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:05.129152Z node 21 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [21:7592676881542994163:2081] 1767807845008464 != 1767807845008467 2026-01-07T17:44:05.138292Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:05.147748Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:05.147892Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:05.161691Z node 21 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#34,[::1]:11083) connection closed with error: Connection refused 2026-01-07T17:44:05.172447Z node 21 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:44:05.173263Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:05.293881Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:06.015522Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:10.884565Z node 22 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7592676904403576378:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:10.884620Z node 22 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:10.912732Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:11.006389Z node 22 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:11.025124Z node 22 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [22:7592676904403576347:2081] 1767807850883055 != 1767807850883058 2026-01-07T17:44:11.025398Z node 22 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#36,[::1]:8828) connection closed with error: Connection refused 2026-01-07T17:44:11.026573Z node 22 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:44:11.027007Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:11.027112Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:11.061248Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:11.081928Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:11.890243Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:17.082606Z node 23 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7592676934295184562:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:17.082937Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:17.109458Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:17.285050Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:17.294320Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:17.297259Z node 23 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [23:7592676934295184353:2081] 1767807857056252 != 1767807857056255 2026-01-07T17:44:17.313131Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:17.313338Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:17.351065Z node 23 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#38,[::1]:3860) connection closed with error: Connection refused 2026-01-07T17:44:17.352414Z node 23 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-07T17:44:17.354230Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:17.445190Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:18.079997Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2026-01-07T17:44:24.637354Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-07T17:44:24.637443Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2026-01-07T17:44:24.637486Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:111: ProcessQueue(), token sent: VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 0 waiting# 1 2026-01-07T17:44:24.637516Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:122: ProcessQueue() done: active# 1 waiting# 0 2026-01-07T17:44:24.637554Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-07T17:44:24.722156Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-07T17:44:24.722304Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2026-01-07T17:44:24.722360Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:161: TEvReleaseSyncToken, removed from queue: VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 0 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> IncrementalBackup::DropBackupCollectionSqlNonExistent [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> BsControllerConfig::MergeBoxes [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] Test command err: 2026-01-07T17:44:21.233109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676949053353852:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:21.233188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:21.443224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:21.553982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:21.554118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:21.567400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:21.605810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:21.608000Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676949053353825:2081] 1767807861231460 != 1767807861231463 2026-01-07T17:44:21.637848Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2026-01-07T17:44:21.637946Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d60297003d0] Connect to grpc://localhost:1627 2026-01-07T17:44:21.642826Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d60297003d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2026-01-07T17:44:21.653790Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d60297003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:44:21.654644Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:44:21.655921Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: 2026-01-07T17:44:21.705181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:23.617600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:23.617699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:23.617746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:23.617800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:23.617845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:23.617876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:23.617938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:23.618007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:23.618958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:23.619277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:23.689224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:23.689275Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:23.704815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:23.705243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:23.705442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:23.715913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:23.716357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:23.717252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:23.717564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:23.721008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:23.721221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:23.722414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:23.722479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:23.722599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:23.722650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:23.722768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:23.722996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:23.847282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.848959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.849017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.849098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.849162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:23.849221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 110 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:44:24.837710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 110:0, at schemeshard: 72057594046678944 2026-01-07T17:44:24.837925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 110:1, propose status:StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", at schemeshard: 72057594046678944 2026-01-07T17:44:24.840615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 110, response: Status: StatusInvalidParameter Reason: "Last patrition 2 doesn\'t have the highest bound \"AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\"" TxId: 110 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:44:24.840883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 110, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2026-01-07T17:44:24.841285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2026-01-07T17:44:24.841332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2026-01-07T17:44:24.841811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2026-01-07T17:44:24.841924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-01-07T17:44:24.841971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:1013:2932] TestWaitNotification: OK eventTxId 110 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } TestModificationResults wait txId: 112 2026-01-07T17:44:24.845616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:44:24.845874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 112:0, at schemeshard: 72057594046678944 2026-01-07T17:44:24.846345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 112:1, propose status:StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, at schemeshard: 72057594046678944 2026-01-07T17:44:24.849096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 112, response: Status: StatusInvalidParameter Reason: "Only 1 root partitions has new bounds, required: 3" TxId: 112 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:44:24.849404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 112, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-07T17:44:24.849778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-07T17:44:24.849855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-07T17:44:24.850303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-07T17:44:24.850428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-07T17:44:24.850472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1020:2939] TestWaitNotification: OK eventTxId 112 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } TestModificationResults wait txId: 114 2026-01-07T17:44:24.854455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } } } TxId: 114 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:44:24.854736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 114:0, at schemeshard: 72057594046678944 2026-01-07T17:44:24.854904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 114:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2026-01-07T17:44:24.857591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 114, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 114 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:44:24.857876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 114, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 114, wait until txId: 114 TestWaitNotification wait txId: 114 2026-01-07T17:44:24.858237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 114: send EvNotifyTxCompletion 2026-01-07T17:44:24.858298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 114 2026-01-07T17:44:24.858780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 114, at schemeshard: 72057594046678944 2026-01-07T17:44:24.858912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-07T17:44:24.858948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:1027:2946] TestWaitNotification: OK eventTxId 114 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } TestModificationResults wait txId: 116 2026-01-07T17:44:24.863143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } } } TxId: 116 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:44:24.863438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 116:0, at schemeshard: 72057594046678944 2026-01-07T17:44:24.863643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 116:1, propose status:StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", at schemeshard: 72057594046678944 2026-01-07T17:44:24.866419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 116, response: Status: StatusInvalidParameter Reason: "Partitions 0 and 0 have overlapped bounds at point \"-inf\"" TxId: 116 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:44:24.866693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 116, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 116, wait until txId: 116 TestWaitNotification wait txId: 116 2026-01-07T17:44:24.867116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 116: send EvNotifyTxCompletion 2026-01-07T17:44:24.867165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 116 2026-01-07T17:44:24.867900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2026-01-07T17:44:24.868010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-07T17:44:24.868049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:1034:2953] TestWaitNotification: OK eventTxId 116 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2026-01-07T17:43:49.633362Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676812637287191:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:49.633461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:50.080361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:43:50.141296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:50.141430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:50.226100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:50.233771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:50.320645Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:43:50.550164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:50.565780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:43:50.646330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:53.623842Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:53.643279Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=1&id=OWEwM2Q0MGYtMjYzNzUxYi1mYjU1YmI4Yy0xMGFmOWQxNw==, workerId: [1:7592676829817157150:2303], database: , longSession: 0, local sessions count: 1 2026-01-07T17:43:53.643605Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=1&id=OWEwM2Q0MGYtMjYzNzUxYi1mYjU1YmI4Yy0xMGFmOWQxNw==, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7592676829817157150:2303] 2026-01-07T17:43:53.643625Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2026-01-07T17:43:53.644133Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2718} SessionId: ydb://session/3?node_id=1&id=OWEwM2Q0MGYtMjYzNzUxYi1mYjU1YmI4Yy0xMGFmOWQxNw==, ActorId: [1:7592676829817157150:2303], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxy_request_id# 2 trace_id# 2026-01-07T17:43:53.644299Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:53.644326Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:53.644342Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:53.644463Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 2, sender: [1:7592676816932255207:2482], selfId: [1:7592676812637287384:2266], source: [1:7592676829817157150:2303] 2026-01-07T17:43:53.647512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676829817157151:2304], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:53.647647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:53.649195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676829817157163:2305], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:53.649302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:53.657243Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Handle TEvPrivate::TEvOnRequestTimeout(2) 2026-01-07T17:43:53.657269Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1215: Invalid request info while on request timeout handle. RequestId: 2 ... waiting for SysViewsRoster update finished 2026-01-07T17:44:01.831427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:01.850163Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:01.852648Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:44:01.862528Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:44:01.865313Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:44:01.866119Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:44:01.866466Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:44:01.866616Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:44:01.868367Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:44:01.868541Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:44:02.238569Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:02.345352Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:02.345486Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:02.346140Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:02.346198Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:02.390758Z node 2 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:44:02.391283Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:02.391699Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:02.462174Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:02.476290Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1488:2977] 2026-01-07T17:44:03.401393Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=3&id=YTA5N2I4MDktMTZjZWIyNWEtNzM4ZTA4YjgtNjM4N2ExNDQ=, workerId: [3:1489:2375], database: , longSession: 1, local sessions count: 1 2026-01-07T17:44:03.401708Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=YTA5N2I4MDktMTZjZWIyNWEtNzM4ZTA4YjgtNjM4N2ExNDQ= 2026-01-07T17:44:03.402496Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=3&id=YTA5N2I4MDktMTZjZWIyNWEtNzM4ZTA4YjgtNjM4N2ExNDQ=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2026-01-07T17:44:03.402607Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2026-01-07T17:44:03.403505Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=3&id=YTA5N2I4MDktMTZjZWIyNWEtNzM4ZTA4YjgtNjM4N2ExNDQ=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1489:2375] 2026-01-07T17:44:03.403557Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2026-01-07T17:44:03.692450Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1490:2978], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:03.692592Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:03.693212Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_acto ... 2026-01-07T17:44:22.680629Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=OGZjZjRhNTItYWVhMGEwOTMtNzNmZjlmMjAtZWMzNGZiMjg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 50, targetId: [7:7592676955819767892:2574] 2026-01-07T17:44:22.680670Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 50 timeout: 300.000000s actor id: [7:7592676955819767913:3069] 2026-01-07T17:44:22.690460Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 50, sender: [7:7592676955819767912:2581], selfId: [7:7592676912870093043:2083], source: [7:7592676955819767892:2574] 2026-01-07T17:44:22.690794Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7592676955819767889:2571], ActorId: [7:7592676955819767890:2572], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=7&id=OGZjZjRhNTItYWVhMGEwOTMtNzNmZjlmMjAtZWMzNGZiMjg=, TxId: 2026-01-07T17:44:22.690899Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7592676955819767889:2571], ActorId: [7:7592676955819767890:2572], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=OGZjZjRhNTItYWVhMGEwOTMtNzNmZjlmMjAtZWMzNGZiMjg=, TxId: 2026-01-07T17:44:22.690952Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4311: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7592676955819767889:2571], ActorId: [7:7592676955819767890:2572], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2026-01-07T17:44:22.691064Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [7:7592676955819767888:2570], ActorId: [7:7592676955819767889:2571], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7592676955819767890:2572] SUCCESS 2026-01-07T17:44:22.691299Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=OGZjZjRhNTItYWVhMGEwOTMtNzNmZjlmMjAtZWMzNGZiMjg=, workerId: [7:7592676955819767892:2574], local sessions count: 1 2026-01-07T17:44:22.691410Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1443: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7592676951524800547:3041] ActorId: [7:7592676951524800548:3042] Database: /dc-1 ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590. Successfully finalized script execution operation, WaitingRetry: 0 2026-01-07T17:44:22.691467Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1789: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7592676951524800547:3041] ActorId: [7:7592676951524800548:3042] Database: /dc-1 ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590. Reply success 2026-01-07T17:44:22.704685Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecryhxg542a5qwaerze5nf4, Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=YWYyMTUwZGYtYWYwZTE2YjEtM2FjZjFmNTUtZGFjZjA1Zg==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 51, targetId: [7:7592676947229833126:2531] 2026-01-07T17:44:22.704754Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 51 timeout: 300.000000s actor id: [7:7592676955819767937:3077] 2026-01-07T17:44:23.583588Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecryhxg542a5qwaerze5nf4", Forwarded response to sender actor, requestId: 51, sender: [7:7592676955819767936:2586], selfId: [7:7592676912870093043:2083], source: [7:7592676947229833126:2531] 2026-01-07T17:44:23.585590Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:833: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7592676960114735271:3094] ActorId: [7:7592676960114735272:3095] Database: /dc-1 ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590. Bootstrap. Start TLeaseUpdateRetryActor [7:7592676960114735273:3096] 2026-01-07T17:44:23.585669Z node 7 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735272:3095], ActorId: [7:7592676960114735273:3096], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, Starting query actor #1 [7:7592676960114735274:3097] 2026-01-07T17:44:23.585700Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735273:3096], ActorId: [7:7592676960114735274:3097], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-01-07T17:44:23.585862Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976265845.965766s seconds to be completed 2026-01-07T17:44:23.587549Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=7&id=NDllZDViOGItM2U4MzkyZDAtMmZiMzZhOGItMzA0NjNkNTk=, workerId: [7:7592676960114735276:2600], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T17:44:23.587715Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-07T17:44:23.587929Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:695: [ScriptExecutions] [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735273:3096], ActorId: [7:7592676960114735274:3097], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, Update lease on duration: 1.000000s 2026-01-07T17:44:23.588048Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735273:3096], ActorId: [7:7592676960114735274:3097], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RunDataQuery with SessionId: ydb://session/3?node_id=7&id=NDllZDViOGItM2U4MzkyZDAtMmZiMzZhOGItMzA0NjNkNTk=, TxId: , text: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-07T17:44:23.588367Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=NDllZDViOGItM2U4MzkyZDAtMmZiMzZhOGItMzA0NjNkNTk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 53, targetId: [7:7592676960114735276:2600] 2026-01-07T17:44:23.588399Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 53 timeout: 300.000000s actor id: [7:7592676960114735278:3098] 2026-01-07T17:44:23.852666Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 53, sender: [7:7592676960114735277:2601], selfId: [7:7592676912870093043:2083], source: [7:7592676960114735276:2600] 2026-01-07T17:44:23.852879Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735273:3096], ActorId: [7:7592676960114735274:3097], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=NDllZDViOGItM2U4MzkyZDAtMmZiMzZhOGItMzA0NjNkNTk=, TxId: 01kecryk184eeemhqx36s874v7 2026-01-07T17:44:23.853078Z node 7 :KQP_PROXY WARN: query_actor.cpp:376: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735273:3096], ActorId: [7:7592676960114735274:3097], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=7&id=NDllZDViOGItM2U4MzkyZDAtMmZiMzZhOGItMzA0NjNkNTk=, TxId: 01kecryk184eeemhqx36s874v7 2026-01-07T17:44:23.853122Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:432: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735273:3096], ActorId: [7:7592676960114735274:3097], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Rollback transaction: 01kecryk184eeemhqx36s874v7 in session: ydb://session/3?node_id=7&id=NDllZDViOGItM2U4MzkyZDAtMmZiMzZhOGItMzA0NjNkNTk= 2026-01-07T17:44:23.853202Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735272:3095], ActorId: [7:7592676960114735273:3096], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7592676960114735274:3097] NOT_FOUND 2026-01-07T17:44:23.853310Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:843: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7592676960114735271:3094] ActorId: [7:7592676960114735272:3095] Database: /dc-1 ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590. Lease update [7:7592676960114735274:3097] finished NOT_FOUND, issues: {
: Error: No such execution } 2026-01-07T17:44:23.853368Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=NDllZDViOGItM2U4MzkyZDAtMmZiMzZhOGItMzA0NjNkNTk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 54, targetId: [7:7592676960114735276:2600] 2026-01-07T17:44:23.853392Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 54 timeout: 600.000000s actor id: [7:7592676960114735300:3108] 2026-01-07T17:44:23.854000Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 54, sender: [7:7592676960114735299:2608], selfId: [7:7592676912870093043:2083], source: [7:7592676960114735276:2600] 2026-01-07T17:44:23.854093Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:441: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7592676960114735273:3096], ActorId: [7:7592676960114735274:3097], TraceId: ExecutionId: 432bb358-64f58784-ca7b39d2-6da79590, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2026-01-07T17:44:23.854251Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=NDllZDViOGItM2U4MzkyZDAtMmZiMzZhOGItMzA0NjNkNTk=, workerId: [7:7592676960114735276:2600], local sessions count: 1 2026-01-07T17:44:23.862796Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=YWYyMTUwZGYtYWYwZTE2YjEtM2FjZjFmNTUtZGFjZjA1Zg==, workerId: [7:7592676947229833126:2531], local sessions count: 0 >> TestSqsTopicHttpProxy::TestDeleteMessageBatch |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::BackgroundOperationRestart [GOOD] >> ScriptExecutionsTest::BackgroundOperationFinalization |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10935:2167] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10935:2167] Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11117:2156] recipient: [1:10935:2167] 2026-01-07T17:43:08.109734Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:08.110770Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:08.111061Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:43:08.112772Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:08.113109Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:43:08.113345Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:08.113362Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:43:08.113660Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:43:08.120369Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:43:08.120487Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:43:08.120632Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:43:08.120712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:08.120772Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:43:08.120832Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11138:2156] recipient: [1:110:2157] 2026-01-07T17:43:08.133368Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:43:08.133463Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:08.182332Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:43:08.182455Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:08.182535Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:43:08.182598Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:08.182691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:43:08.182741Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:08.182773Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:43:08.182814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:08.193575Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:43:08.193743Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:08.204493Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:43:08.204601Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:43:08.205626Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:43:08.205662Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:43:08.205858Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:43:08.205905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:43:08.219272Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... } Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2026-01-07T17:44:16.122280Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2026-01-07T17:44:16.122309Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2026-01-07T17:44:16.122336Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2026-01-07T17:44:16.122378Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2026-01-07T17:44:16.122409Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2026-01-07T17:44:16.122434Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2026-01-07T17:44:16.122461Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2026-01-07T17:44:16.122498Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2026-01-07T17:44:16.122531Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2026-01-07T17:44:16.122558Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2026-01-07T17:44:16.122585Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2026-01-07T17:44:16.122611Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2026-01-07T17:44:16.122637Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2026-01-07T17:44:16.122681Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2026-01-07T17:44:16.122722Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2026-01-07T17:44:16.122774Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2026-01-07T17:44:16.122802Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2026-01-07T17:44:16.122830Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2026-01-07T17:44:16.122870Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2026-01-07T17:44:16.122899Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2026-01-07T17:44:16.122926Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2026-01-07T17:44:16.122952Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2026-01-07T17:44:16.122980Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2026-01-07T17:44:16.123007Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2026-01-07T17:44:16.123035Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2026-01-07T17:44:16.123063Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2026-01-07T17:44:16.123089Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2026-01-07T17:44:16.123115Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2026-01-07T17:44:16.123141Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2026-01-07T17:44:16.123168Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2026-01-07T17:44:16.123194Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2026-01-07T17:44:16.123219Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2026-01-07T17:44:16.123243Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2026-01-07T17:44:16.123271Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2026-01-07T17:44:16.123296Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2026-01-07T17:44:16.123348Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2026-01-07T17:44:16.123378Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2026-01-07T17:44:16.123404Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2026-01-07T17:44:16.123430Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2026-01-07T17:44:16.123455Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2026-01-07T17:44:16.123482Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2026-01-07T17:44:16.123508Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2026-01-07T17:44:16.123556Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2026-01-07T17:44:16.123590Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2026-01-07T17:44:16.123632Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2026-01-07T17:44:16.123660Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2026-01-07T17:44:16.123686Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2026-01-07T17:44:16.123711Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2026-01-07T17:44:16.123751Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2026-01-07T17:44:16.123794Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2026-01-07T17:44:16.123821Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2026-01-07T17:44:16.123874Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2026-01-07T17:44:16.123912Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2026-01-07T17:44:16.123957Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2026-01-07T17:44:16.123998Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2026-01-07T17:44:16.124038Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2026-01-07T17:44:16.124066Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2026-01-07T17:44:16.124095Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2026-01-07T17:44:16.124141Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2026-01-07T17:44:16.124171Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2026-01-07T17:44:16.124213Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2026-01-07T17:44:16.124270Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2026-01-07T17:44:16.124313Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2026-01-07T17:44:16.124356Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2026-01-07T17:44:16.124388Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2026-01-07T17:44:16.124419Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2026-01-07T17:44:16.124445Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2026-01-07T17:44:16.124495Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2026-01-07T17:44:16.124543Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2026-01-07T17:44:16.124582Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2026-01-07T17:44:16.124611Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2026-01-07T17:44:16.124636Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2026-01-07T17:44:16.124661Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2026-01-07T17:44:16.124688Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2026-01-07T17:44:16.124714Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2026-01-07T17:44:16.418048Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.298228s 2026-01-07T17:44:16.418248Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.298452s 2026-01-07T17:44:16.462447Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2026-01-07T17:44:16.486587Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> TSyncNeighborsTests::SerDes2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TSyncBrokerTests::ShouldEnqueue >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty [GOOD] >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] >> IncrementalBackup::E2EMultipleBackupRestoreCycles [GOOD] >> IncrementalBackup::DropBackupCollectionSqlPathResolution >> TSyncBrokerTests::ShouldReleaseToken [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2026-01-07T17:44:27.805399Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-07T17:44:27.805500Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2026-01-07T17:44:27.929134Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-07T17:44:27.929261Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2026-01-07T17:44:27.929322Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:82: TEvQuerySyncToken, enqueued (1): VDisk actor id# [0:1:2] actor id# [2:7:2054] active# 1 waiting# 1 >> TestSqsTopicHttpProxy::TestReceiveMessageGroup |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2026-01-07T17:44:28.021982Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-07T17:44:28.022110Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:51: TEvQuerySyncToken, token sent (1): VDisk actor id# [0:1:1] actor id# [1:6:2053] active# 1 waiting# 0 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2026-01-07T17:44:27.966631Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-07T17:44:28.076377Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-07T17:44:28.076540Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 0 waiting# 0 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore [GOOD] >> IncrementalBackup::OmitIndexesIncrementalBackup >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] |90.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] Test command err: 2026-01-07T17:43:48.165597Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676811160049023:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:48.165659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:48.438911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:48.438992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:48.615703Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676811160048986:2081] 1767807828155340 != 1767807828155343 2026-01-07T17:43:48.617176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:48.618057Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:48.955265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:48.955278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:48.955285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:48.955328Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:49.005168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:49.185459Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:52.130781Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:52.145279Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:52.145338Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:52.145364Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:52.147590Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Describe result: PathErrorUnknown 2026-01-07T17:43:52.147612Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Creating table 2026-01-07T17:43:52.147670Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:52.147793Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Describe result: PathErrorUnknown 2026-01-07T17:43:52.147800Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Creating table 2026-01-07T17:43:52.147826Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:52.148499Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7592676828339918995:2488] Owner: [1:7592676828339918994:2487]. Describe result: PathErrorUnknown 2026-01-07T17:43:52.148506Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7592676828339918995:2488] Owner: [1:7592676828339918994:2487]. Creating table 2026-01-07T17:43:52.148533Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676828339918995:2488] Owner: [1:7592676828339918994:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:52.154620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:52.161597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:52.164059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:52.172741Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676828339918995:2488] Owner: [1:7592676828339918994:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-07T17:43:52.172820Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7592676828339918995:2488] Owner: [1:7592676828339918994:2487]. Subscribe on create table tx: 281474976710660 2026-01-07T17:43:52.173126Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-07T17:43:52.173157Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Subscribe on create table tx: 281474976710658 2026-01-07T17:43:52.173229Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-07T17:43:52.173253Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Subscribe on create table tx: 281474976710659 2026-01-07T17:43:52.176822Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Subscribe on tx: 281474976710659 registered 2026-01-07T17:43:52.178499Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Subscribe on tx: 281474976710658 registered 2026-01-07T17:43:52.178672Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7592676828339918995:2488] Owner: [1:7592676828339918994:2487]. Subscribe on tx: 281474976710660 registered 2026-01-07T17:43:52.343224Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-07T17:43:52.413345Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7592676828339918995:2488] Owner: [1:7592676828339918994:2487]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-07T17:43:52.413431Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-07T17:43:52.422893Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-07T17:43:52.422945Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Column diff is empty, finishing 2026-01-07T17:43:52.424100Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:52.425258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:52.427059Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:43:52.427073Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7592676828339918996:2489] Owner: [1:7592676828339918994:2487]. Successful alter request: ExecComplete 2026-01-07T17:43:52.509897Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-07T17:43:52.509942Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Column diff is empty, finishing 2026-01-07T17:43:52.510023Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676828339918997:2490] Owner: [1:7592676828339918994:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:52.511086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:52.511733Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7592676828339918995:2488] Owner: [1:7592676828339918994:24 ... ata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-07T17:44:26.510498Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=YWMwYTY5MzItNzg1NDRkYTgtN2UyNWVkZDctODU4YjJlNWE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 91, targetId: [3:7592676971631836772:2705] 2026-01-07T17:44:26.510538Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 91 timeout: 300.000000s actor id: [3:7592676971631836774:3275] 2026-01-07T17:44:26.522072Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 91, sender: [3:7592676971631836773:2706], selfId: [3:7592676920092226955:2186], source: [3:7592676971631836772:2705] 2026-01-07T17:44:26.522688Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676971631836769:3273], ActorId: [3:7592676971631836770:3274], TraceId: ExecutionId: 9829a9cc-1d3b521c-d4464db-b6eaea61, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YWMwYTY5MzItNzg1NDRkYTgtN2UyNWVkZDctODU4YjJlNWE=, TxId: 2026-01-07T17:44:26.523385Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676971631836769:3273], ActorId: [3:7592676971631836770:3274], TraceId: ExecutionId: 9829a9cc-1d3b521c-d4464db-b6eaea61, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YWMwYTY5MzItNzg1NDRkYTgtN2UyNWVkZDctODU4YjJlNWE=, TxId: 2026-01-07T17:44:26.523420Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676971631836769:3273], ActorId: [3:7592676971631836770:3274], TraceId: ExecutionId: 9829a9cc-1d3b521c-d4464db-b6eaea61, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-07T17:44:26.523545Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7592676971631836768:3272], ActorId: [3:7592676971631836769:3273], TraceId: ExecutionId: 9829a9cc-1d3b521c-d4464db-b6eaea61, RequestDatabase: /dc-1, Got response [3:7592676971631836770:3274] SUCCESS 2026-01-07T17:44:26.523599Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7592676971631836767:3271] ActorId: [3:7592676971631836768:3272] Database: /dc-1 ExecutionId: 9829a9cc-1d3b521c-d4464db-b6eaea61. Extracted script execution operation [3:7592676971631836770:3274], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7592676967336869278:3201], LeaseGeneration: 0 2026-01-07T17:44:26.523626Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7592676971631836767:3271] ActorId: [3:7592676971631836768:3272] Database: /dc-1 ExecutionId: 9829a9cc-1d3b521c-d4464db-b6eaea61. Reply success 2026-01-07T17:44:26.524777Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YWMwYTY5MzItNzg1NDRkYTgtN2UyNWVkZDctODU4YjJlNWE=, workerId: [3:7592676971631836772:2705], local sessions count: 1 2026-01-07T17:44:27.546267Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecrypms3q3dpgrktw7jdmj7, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZjBlZjkwYzctZjU3MjE5ZjEtOWI1NjIyNTctYjY3NzM3OTc=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 92, targetId: [3:7592676950156999620:2537] 2026-01-07T17:44:27.546320Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 92 timeout: 300.000000s actor id: [3:7592676975926804098:3286] 2026-01-07T17:44:27.574725Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7592676975926804102:3288], for# root@builtin, access# DescribeSchema 2026-01-07T17:44:27.574765Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7592676975926804102:3288], for# root@builtin, access# DescribeSchema 2026-01-07T17:44:27.577417Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7592676975926804099:2716], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:27.577877Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=ZjBlZjkwYzctZjU3MjE5ZjEtOWI1NjIyNTctYjY3NzM3OTc=, ActorId: [3:7592676950156999620:2537], ActorState: ExecuteState, LegacyTraceId: 01kecrypms3q3dpgrktw7jdmj7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:27.578053Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecrypms3q3dpgrktw7jdmj7", Forwarded response to sender actor, requestId: 92, sender: [3:7592676975926804097:2715], selfId: [3:7592676920092226955:2186], source: [3:7592676950156999620:2537] 2026-01-07T17:44:27.599451Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecryppe1j4b557w31k0ej68, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZjBlZjkwYzctZjU3MjE5ZjEtOWI1NjIyNTctYjY3NzM3OTc=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 93, targetId: [3:7592676950156999620:2537] 2026-01-07T17:44:27.599503Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 93 timeout: 300.000000s actor id: [3:7592676975926804105:3289] 2026-01-07T17:44:27.619758Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7592676975926804109:3291], for# root@builtin, access# DescribeSchema 2026-01-07T17:44:27.619791Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7592676975926804109:3291], for# root@builtin, access# DescribeSchema 2026-01-07T17:44:27.622282Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7592676975926804106:2719], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_execution_leases]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:27.623956Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=ZjBlZjkwYzctZjU3MjE5ZjEtOWI1NjIyNTctYjY3NzM3OTc=, ActorId: [3:7592676950156999620:2537], ActorState: ExecuteState, LegacyTraceId: 01kecryppe1j4b557w31k0ej68, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_execution_leases]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:27.624126Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecryppe1j4b557w31k0ej68", Forwarded response to sender actor, requestId: 93, sender: [3:7592676975926804104:2718], selfId: [3:7592676920092226955:2186], source: [3:7592676950156999620:2537] 2026-01-07T17:44:27.638023Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecrypqn4dk3z7xaj0nyy387, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZjBlZjkwYzctZjU3MjE5ZjEtOWI1NjIyNTctYjY3NzM3OTc=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 94, targetId: [3:7592676950156999620:2537] 2026-01-07T17:44:27.638074Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 94 timeout: 300.000000s actor id: [3:7592676975926804112:3292] 2026-01-07T17:44:27.661498Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7592676975926804116:3294], for# root@builtin, access# DescribeSchema 2026-01-07T17:44:27.661527Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7592676975926804116:3294], for# root@builtin, access# DescribeSchema 2026-01-07T17:44:27.664065Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7592676975926804113:2722], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/result_sets]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:27.667000Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=ZjBlZjkwYzctZjU3MjE5ZjEtOWI1NjIyNTctYjY3NzM3OTc=, ActorId: [3:7592676950156999620:2537], ActorState: ExecuteState, LegacyTraceId: 01kecrypqn4dk3z7xaj0nyy387, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/result_sets]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:27.667590Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecrypqn4dk3z7xaj0nyy387", Forwarded response to sender actor, requestId: 94, sender: [3:7592676975926804111:2721], selfId: [3:7592676920092226955:2186], source: [3:7592676950156999620:2537] 2026-01-07T17:44:27.693670Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZjBlZjkwYzctZjU3MjE5ZjEtOWI1NjIyNTctYjY3NzM3OTc=, workerId: [3:7592676950156999620:2537], local sessions count: 0 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] Test command err: 2026-01-07T17:44:15.043491Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676926580571982:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:15.055763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:15.359147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:15.359286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:15.375534Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:15.380952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:15.471471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:15.473608Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676922285604495:2081] 1767807854991534 != 1767807854991537 2026-01-07T17:44:15.521829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:15.521853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:15.521859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:15.521946Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:15.539295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:15.708229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:15.908087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:15.914172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:44:15.916786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:16.028503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.065416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:16.137153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.184612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.226445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-07T17:44:16.233806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.273294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.314958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.354632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.390863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.432821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.465638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:18.437012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676939465475346:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:18.437013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676939465475334:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:18.437083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:18.438504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676939465475349:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:18.438599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:18.441436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:18.468009Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676939465475348:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:44:18.550597Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676939465475401:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:18.973226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:19.046120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:19.094754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:19.155866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:19.241286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... p_proxy_incoming.cpp:97: (#40,[::1]:38516) incoming connection opened 2026-01-07T17:44:28.261528Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#40,[::1]:38516) -> (POST /Root, 100 bytes) 2026-01-07T17:44:28.261658Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [98d5:a412:1a7c:0:80d5:a412:1a7c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 3c58e3fa-7f1a866d-4edf3623-5479e079 2026-01-07T17:44:28.261984Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [3c58e3fa-7f1a866d-4edf3623-5479e079] got new request from [98d5:a412:1a7c:0:80d5:a412:1a7c:0] database '/Root' stream '' 2026-01-07T17:44:28.262607Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [3c58e3fa-7f1a866d-4edf3623-5479e079] [auth] Authorized successfully 2026-01-07T17:44:28.262637Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [3c58e3fa-7f1a866d-4edf3623-5479e079] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:28.268377Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [3c58e3fa-7f1a866d-4edf3623-5479e079] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-07T17:44:28.268512Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [3c58e3fa-7f1a866d-4edf3623-5479e079] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is less than 1 2026-01-07T17:44:28.270179Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:38516) <- (400 InvalidParameterValue, 81 bytes) 2026-01-07T17:44:28.270244Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#40,[::1]:38516) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":0, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-07T17:44:28.270437Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#40,[::1]:38516) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 3c58e3fa-7f1a866d-4edf3623-5479e079 Content-Type: application/x-amz-json-1.1 Content-Length: 81 2026-01-07T17:44:28.270551Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:38516) connection closed Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2026-01-07T17:44:28.277247Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:38532) incoming connection opened 2026-01-07T17:44:28.277310Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:38532) -> (POST /Root, 101 bytes) 2026-01-07T17:44:28.277474Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f818:a712:1a7c:0:e018:a712:1a7c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: a549222-ada8e00a-327038eb-76fc2a30 2026-01-07T17:44:28.277825Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [a549222-ada8e00a-327038eb-76fc2a30] got new request from [f818:a712:1a7c:0:e018:a712:1a7c:0] database '/Root' stream '' 2026-01-07T17:44:28.281429Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [a549222-ada8e00a-327038eb-76fc2a30] [auth] Authorized successfully 2026-01-07T17:44:28.281486Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [a549222-ada8e00a-327038eb-76fc2a30] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is greater than 10"} 2026-01-07T17:44:28.282047Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [a549222-ada8e00a-327038eb-76fc2a30] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-07T17:44:28.282123Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [a549222-ada8e00a-327038eb-76fc2a30] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is greater than 10 2026-01-07T17:44:28.282349Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:38532) <- (400 InvalidParameterValue, 85 bytes) 2026-01-07T17:44:28.282405Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:38532) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":50, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-07T17:44:28.282448Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:38532) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: a549222-ada8e00a-327038eb-76fc2a30 Content-Type: application/x-amz-json-1.1 Content-Length: 85 2026-01-07T17:44:28.282555Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:38532) connection closed 2026-01-07T17:44:28.289123Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#40,[::1]:38548) incoming connection opened 2026-01-07T17:44:28.289219Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#40,[::1]:38548) -> (POST /Root, 109 bytes) 2026-01-07T17:44:28.289407Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [9866:a712:1a7c:0:8066:a712:1a7c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: a711203b-e627ddd2-25dc1f7c-6cf6b171 2026-01-07T17:44:28.289798Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [a711203b-e627ddd2-25dc1f7c-6cf6b171] got new request from [9866:a712:1a7c:0:8066:a712:1a7c:0] database '/Root' stream '' 2026-01-07T17:44:28.290727Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [a711203b-e627ddd2-25dc1f7c-6cf6b171] [auth] Authorized successfully 2026-01-07T17:44:28.290768Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [a711203b-e627ddd2-25dc1f7c-6cf6b171] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is greater than 10"} 2026-01-07T17:44:28.291173Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [a711203b-e627ddd2-25dc1f7c-6cf6b171] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-07T17:44:28.291247Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [a711203b-e627ddd2-25dc1f7c-6cf6b171] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is greater than 10 2026-01-07T17:44:28.291455Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:38548) <- (400 InvalidParameterValue, 85 bytes) 2026-01-07T17:44:28.291501Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#40,[::1]:38548) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":2147483647, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-07T17:44:28.291525Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#40,[::1]:38548) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: a711203b-e627ddd2-25dc1f7c-6cf6b171 Content-Type: application/x-amz-json-1.1 Content-Length: 85 2026-01-07T17:44:28.291638Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:38548) connection closed 2026-01-07T17:44:28.297270Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:38564) incoming connection opened 2026-01-07T17:44:28.297362Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:38564) -> (POST /Root, 110 bytes) 2026-01-07T17:44:28.297489Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [7817:a712:1a7c:0:6017:a712:1a7c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 13cd3ee4-de61fd2e-ba6ded14-e02dd18b 2026-01-07T17:44:28.297853Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [13cd3ee4-de61fd2e-ba6ded14-e02dd18b] got new request from [7817:a712:1a7c:0:6017:a712:1a7c:0] database '/Root' stream '' 2026-01-07T17:44:28.298594Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [13cd3ee4-de61fd2e-ba6ded14-e02dd18b] [auth] Authorized successfully 2026-01-07T17:44:28.298632Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [13cd3ee4-de61fd2e-ba6ded14-e02dd18b] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2026-01-07T17:44:28.299032Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [13cd3ee4-de61fd2e-ba6ded14-e02dd18b] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-07T17:44:28.299090Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [13cd3ee4-de61fd2e-ba6ded14-e02dd18b] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is less than 1 2026-01-07T17:44:28.299345Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:38564) <- (400 InvalidParameterValue, 81 bytes) 2026-01-07T17:44:28.299385Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:38564) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":-2147483648, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-07T17:44:28.299409Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:38564) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 13cd3ee4-de61fd2e-ba6ded14-e02dd18b Content-Type: application/x-amz-json-1.1 Content-Length: 81 2026-01-07T17:44:28.299485Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:38564) connection closed 2026-01-07T17:44:28.323482Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:28.323520Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:28.323530Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:28.323545Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:28.323556Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:28.423656Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:28.423688Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:28.423701Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:28.423720Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:28.423732Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] Test command err: 2026-01-07T17:44:15.559586Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676926080964909:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:15.559689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:15.794963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:15.796829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:15.797025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:15.801361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:15.895080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676926080964878:2081] 1767807855557419 != 1767807855557422 2026-01-07T17:44:15.921513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:16.006780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:16.006803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:16.006819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:16.006914Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:16.097066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:16.241995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:16.448972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:16.457053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:16.481135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:44:16.565954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.575876Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:16.744607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.836983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.890879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-07T17:44:16.896915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.961049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:16.998800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:17.056645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:17.143908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:17.224913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:17.289648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:19.314736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676943260835722:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:19.314876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:19.315331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676943260835734:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:19.315366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676943260835735:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:19.315393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:19.319952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:19.336728Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676943260835738:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:44:19.426035Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676943260835789:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:19.876124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:19.937571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:19.994855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.049816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.105715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... 30.435235Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:30.435470Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:44:30.436948Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:44:30.437120Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-01-07T17:44:30.437134Z node 2 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037907][Partition][0][StateIdle] Updateing MLP consumer 'consumer' config 2026-01-07T17:44:30.437227Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:44:30.437249Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:30.437284Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:30.437296Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:30.437310Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:30.437321Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:30.437343Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:44:30.437716Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 20 } MonitoringProjectId: "" 2026-01-07T17:44:30.437783Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:947: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-01-07T17:44:30.437904Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:44:30.441188Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:15450 { status: SUCCESS, issues: }consumer 2026-01-07T17:44:30.472654Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:41948) incoming connection opened 2026-01-07T17:44:30.473821Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:41948) -> (POST /Root, 1406 bytes) 2026-01-07T17:44:30.473988Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d868:bea5:37c:0:c068:bea5:37c:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: 6664d1f2-f3e0004e-3de95434-1d3bb737 2026-01-07T17:44:30.475867Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessageBatch] requestId [6664d1f2-f3e0004e-3de95434-1d3bb737] got new request from [d868:bea5:37c:0:c068:bea5:37c:0] database '/Root' stream '' 2026-01-07T17:44:30.477169Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-07T17:44:30.477255Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-07T17:44:30.477284Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-07T17:44:30.477310Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-07T17:44:30.477338Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-07T17:44:30.477363Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-07T17:44:30.477386Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-07T17:44:30.504104Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:30.504173Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:30.506838Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-07T17:44:30.506923Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:30.506980Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:30.509629Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:30.509687Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:30.509773Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-07T17:44:30.513115Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessageBatch] requestId [6664d1f2-f3e0004e-3de95434-1d3bb737] [auth] Authorized successfully 2026-01-07T17:44:30.513201Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessageBatch] requestId [6664d1f2-f3e0004e-3de95434-1d3bb737] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"__type":"AWS.SimpleQueueService.TooManyEntriesInBatchRequest","message":"The batch request contains more entries than permissible."} 2026-01-07T17:44:30.519150Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessageBatch] requestId [6664d1f2-f3e0004e-3de95434-1d3bb737] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.TooManyEntriesInBatchRequest 2026-01-07T17:44:30.519274Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessageBatch] requestId [6664d1f2-f3e0004e-3de95434-1d3bb737] reply with status: STATUS_UNDEFINED message: The batch request contains more entries than permissible. 2026-01-07T17:44:30.519563Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:41948) <- (400 AWS.SimpleQueueService.TooManyEntriesInBatchRequest, 134 bytes) 2026-01-07T17:44:30.519630Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#38,[::1]:41948) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/6/topic1/8/consumer", "Entries": [ { "Id":"Id-1", "MessageGroupId":"MessageGroupId-1", "MessageBody":"MessageBody-1" }, { "Id":"Id-2", "MessageGroupId":"MessageGroupId-2", "MessageBody":"MessageBody-2" }, { "Id":"Id-3", "MessageGroupId":"MessageGroupId-3", "MessageBody":"MessageBody-3" }, { "Id":"Id-4", "MessageGroupId":"MessageGroupId-4", "MessageBody":"MessageBody-4" }, { "Id":"Id-5", "MessageGroupId":"MessageGroupId-5", "MessageBody":"MessageBody-5" }, { "Id":"Id-6", "MessageGroupId":"MessageGroupId-6", "MessageBody":"MessageBody-6" 2026-01-07T17:44:30.519664Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#38,[::1]:41948) Response: HTTP/1.1 400 AWS.SimpleQueueService.TooManyEntriesInBatchRequest Connection: close x-amzn-requestid: 6664d1f2-f3e0004e-3de95434-1d3bb737 Content-Type: application/x-amz-json-1.1 Content-Length: 134 2026-01-07T17:44:30.519801Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:41948) connection closed 2026-01-07T17:44:30.531741Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:30.531771Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:30.531784Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:30.531799Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:30.531810Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:30.632338Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:30.632369Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:30.632388Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:30.632418Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:30.632431Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:30.734175Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:30.734208Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:30.734221Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:30.734239Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:30.734251Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:30.096075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:30.096178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:30.096217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:30.096255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:30.096290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:30.096319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:30.096372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:30.096481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:30.097436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:30.097713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:30.193534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:30.193597Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:30.208999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:30.209357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:30.209535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:30.216232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:30.216576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:30.217274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:30.217527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:30.220542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:30.220847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:30.222182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:30.222240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:30.222369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:30.222426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:30.222545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:30.222840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:30.367206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.368995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.369098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.369181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.369266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.369372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... on for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-07T17:44:31.641897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-07T17:44:31.641932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-07T17:44:31.641963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-07T17:44:31.641987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:44:31.642011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:44:31.642218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.642334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.642463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-07T17:44:31.642517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:44:31.642551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:44:31.642573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-07T17:44:31.642609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:44:31.642794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.642882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.643135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-07T17:44:31.643383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:44:31.643789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.643915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.644400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.644483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.644798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.645030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.645093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.645216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.645441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.645529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.646008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.646307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.646390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.646455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.646597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.646659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.646717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.651730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:31.663288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:31.663519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:31.664587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:31.664829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:31.665025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:31.668573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1072:2973] sender: [1:1132:2058] recipient: [1:15:2062] 2026-01-07T17:44:31.718637Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:44:31.719284Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 752us result status StatusSuccess 2026-01-07T17:44:31.720536Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TestSetCloudPermissions::CanSetAllPermissions >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:30.456310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:30.456403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:30.456450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:30.456501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:30.456543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:30.456572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:30.456635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:30.456711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:30.457680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:30.457978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:30.555305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:30.555358Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:30.574065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:30.574490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:30.574695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:30.581988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:30.582363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:30.583203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:30.583490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:30.586387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:30.586567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:30.587812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:30.587878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:30.588005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:30.588061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:30.588203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:30.588405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:30.754400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.759481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.759707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.759823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.759915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.760817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944 2026-01-07T17:44:32.089172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.089290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.089372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.089528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.089594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.089646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.095409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:32.101323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:32.101412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:32.102273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:32.102359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:32.102416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:32.103900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1128:3033] sender: [1:1188:2058] recipient: [1:15:2062] 2026-01-07T17:44:32.190146Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:44:32.190460Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 347us result status StatusSuccess 2026-01-07T17:44:32.191221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 109 2026-01-07T17:44:32.196982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:44:32.197280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.197431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2026-01-07T17:44:32.200716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:44:32.201070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2026-01-07T17:44:32.201480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2026-01-07T17:44:32.201528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2026-01-07T17:44:32.201971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-01-07T17:44:32.202082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-07T17:44:32.202120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:1193:3085] TestWaitNotification: OK eventTxId 109 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 111 2026-01-07T17:44:32.205750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 111 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:44:32.205981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 111:0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.206178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 111:1, propose status:StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:32.208400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 111, response: Status: StatusInvalidParameter Reason: "Unable to change bounds of non-root partition: 1" TxId: 111 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:44:32.208764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 111, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 111, wait until txId: 111 TestWaitNotification wait txId: 111 2026-01-07T17:44:32.209151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 111: send EvNotifyTxCompletion 2026-01-07T17:44:32.209194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 111 2026-01-07T17:44:32.209641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2026-01-07T17:44:32.209719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2026-01-07T17:44:32.209753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:1200:3092] TestWaitNotification: OK eventTxId 111 |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageBatch [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:29.847345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:29.847463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:29.847503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:29.847555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:29.847600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:29.847626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:29.847679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:29.847747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:29.848624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:29.853318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:30.037811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:30.037878Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:30.054707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:30.055118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:30.055357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:30.062657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:30.063070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:30.063850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:30.064165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:30.067782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:30.068029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:30.069349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:30.069415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:30.069538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:30.069586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:30.069698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:30.069902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:30.248356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.252888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.253994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.254124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.254215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... PQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.959513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:44:32.959694Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:44:32.959877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:44:32.959945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:44:32.963537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.963926Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:32.963978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:44:32.964200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:44:32.964413Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:32.964459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-07T17:44:32.964507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:44:32.965119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.965171Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:44:32.965311Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:44:32.965355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:44:32.965402Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:44:32.965442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:44:32.965481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:44:32.965523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:44:32.965566Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:44:32.965597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:44:32.965747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:44:32.965790Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2026-01-07T17:44:32.965826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:44:32.965863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:44:32.967352Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:44:32.967468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:44:32.967522Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:44:32.967564Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:44:32.967606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:44:32.968410Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:44:32.968492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:44:32.968524Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:44:32.968555Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:44:32.968585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:44:32.968656Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2026-01-07T17:44:32.968704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:822:2788] 2026-01-07T17:44:32.971546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:44:32.971842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:44:32.971921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:44:32.971961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:958:2893] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } TestModificationResults wait txId: 105 2026-01-07T17:44:32.975233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:44:32.975534Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.975782Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:32.978054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:44:32.978273Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:44:32.978571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:44:32.978612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:44:32.978943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:44:32.979037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:44:32.979065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1002:2929] TestWaitNotification: OK eventTxId 105 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:31.664532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:31.664623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:31.664666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:31.664709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:31.664747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:31.664785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:31.664836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:31.664917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:31.665754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:31.666035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:31.748882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:31.748921Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:31.761930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:31.762287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:31.762497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:31.768754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:31.769034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:31.769675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:31.769925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:31.772199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:31.772344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:31.773159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:31.773198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:31.773275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:31.773307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:31.773452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:31.773579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.908563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.909512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.909649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.909731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.909827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.909907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.909964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.910033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.910125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.910194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.910249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.910307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.910379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.910469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.910545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 0 2026-01-07T17:44:33.201334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 0 2026-01-07T17:44:33.201360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2026-01-07T17:44:33.201382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2026-01-07T17:44:33.201406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2026-01-07T17:44:33.201436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2026-01-07T17:44:33.201469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-07T17:44:33.201507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-07T17:44:33.201543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-07T17:44:33.201569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-07T17:44:33.201599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-07T17:44:33.201624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-07T17:44:33.201652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-07T17:44:33.201675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-07T17:44:33.201700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-07T17:44:33.201725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-07T17:44:33.201765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-07T17:44:33.201792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-07T17:44:33.201826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-07T17:44:33.201856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-07T17:44:33.201878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-07T17:44:33.201902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-07T17:44:33.201925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-07T17:44:33.201948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-07T17:44:33.201978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-07T17:44:33.202003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-07T17:44:33.202025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-07T17:44:33.202056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-07T17:44:33.202080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-07T17:44:33.202122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-07T17:44:33.202158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-07T17:44:33.202195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:44:33.202233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:44:33.202469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.202633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.202770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-07T17:44:33.202829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:44:33.202862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:44:33.202892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-07T17:44:33.202925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:44:33.203069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.203154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.203396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-07T17:44:33.203661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:44:33.204113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.204262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.204797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.204911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.205290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.205513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.205585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.205687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.205939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.206033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.206608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.207002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.207092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.207161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.207343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.207415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.207478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.213946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:33.222012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:33.222138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:33.222600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:33.222679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:33.222757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:33.222858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:32.122245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:32.122345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:32.122396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:32.122443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:32.122489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:32.122519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:32.122586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:32.122660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:32.123718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:32.123998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:32.221590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:32.221654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:32.238225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:32.238585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:32.238800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:32.246717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:32.247120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:32.247915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:32.248170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:32.251084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:32.251276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:32.252499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:32.252556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:32.252699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:32.252748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:32.252905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:32.253170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.386115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.387959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.388021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.388093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.388162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.388256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.388347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ecute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:44:33.479482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-07T17:44:33.479553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.479612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:44:33.479783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-07T17:44:33.479984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:44:33.485648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.486063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:33.486138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:44:33.486491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:33.486558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-07T17:44:33.487008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.487068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:44:33.487167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:44:33.487223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:44:33.487267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:44:33.487308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:44:33.487349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-07T17:44:33.487387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:44:33.487432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:44:33.487472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:44:33.487621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:44:33.487662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-07T17:44:33.487696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-07T17:44:33.488439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:44:33.488545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:44:33.488587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:44:33.488629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-07T17:44:33.488681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:44:33.488789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-07T17:44:33.488832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:810:2776] 2026-01-07T17:44:33.493351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:44:33.493521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:44:33.493564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1021:2944] TestWaitNotification: OK eventTxId 105 2026-01-07T17:44:33.494201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:44:33.494510Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 330us result status StatusSuccess 2026-01-07T17:44:33.495442Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 2 ChildPartitionIds: 3 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 4 ChildPartitionIds: 5 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { ToBound: "?" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\277" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "?" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\277" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IncrementalBackup::DropBackupCollectionSqlPathResolution [GOOD] >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:32.615986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:32.616095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:32.616148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:32.616200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:32.616246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:32.616278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:32.616341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:32.616420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:32.620032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:32.620329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:32.701488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:32.701534Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:32.714319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:32.714598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:32.714776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:32.720512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:32.720860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:32.721629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:32.721875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:32.724491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:32.724675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:32.725793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:32.725852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:32.725981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:32.726025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:32.726124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:32.726285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.847854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.851790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.851936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.851993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.852708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.871680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:44:33.871873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:44:33.872033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:44:33.872098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:44:33.875463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.876433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:33.876488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:44:33.876638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:44:33.876842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:33.876882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-07T17:44:33.876919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:44:33.877438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.877499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:44:33.877604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:44:33.877655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:44:33.877706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:44:33.877739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:44:33.877775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:44:33.877834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:44:33.877890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:44:33.877929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:44:33.878081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:44:33.878121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-07T17:44:33.878153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:44:33.878200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:44:33.880403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:44:33.880510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:44:33.880540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:44:33.880581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:44:33.880616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:44:33.881396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:44:33.881468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:44:33.881511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:44:33.881548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:44:33.881592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:44:33.881684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:44:33.885340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:44:33.885648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:44:33.886056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:44:33.886107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:44:33.886518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:44:33.886614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:44:33.886653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:992:2911] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } TestModificationResults wait txId: 106 2026-01-07T17:44:33.891136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:44:33.891500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.891673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split and merge operations disabled, at schemeshard: 72057594046678944 2026-01-07T17:44:33.894057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split and merge operations disabled" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:44:33.894456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split and merge operations disabled, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:44:33.894841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:44:33.894889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:44:33.895373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:44:33.895477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:44:33.895517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:999:2918] TestWaitNotification: OK eventTxId 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:30.582558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:30.582620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:30.582652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:30.582688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:30.582720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:30.582741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:30.582782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:30.582850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:30.583580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:30.583808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:30.683379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:30.683451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:30.699583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:30.699919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:30.700139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:30.706925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:30.707283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:30.708074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:30.708341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:30.711006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:30.711207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:30.712675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:30.712741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:30.712899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:30.712982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:30.713090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:30.713283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:30.886320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.887392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.887542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.887642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.887726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.887787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.887853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.887955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.888034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.888124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.888182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.888297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.888370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.888472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:30.888567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-07T17:44:33.876149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:44:33.876166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:44:33.876284Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.876350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.876440Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-07T17:44:33.876474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:44:33.876498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:44:33.876519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-07T17:44:33.876540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:44:33.876626Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.876682Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.876836Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-07T17:44:33.877007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:44:33.877310Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.877398Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.877743Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.877799Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.878070Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.878199Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.878242Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.878350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.878487Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.878614Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.878978Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.879198Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.879258Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.879300Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.879397Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.879439Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.879479Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.884028Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:33.888477Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:33.888550Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:33.889222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:33.889271Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:33.889307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:33.891437Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1104:2997] sender: [2:1164:2058] recipient: [2:15:2062] 2026-01-07T17:44:33.959867Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:44:33.960106Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 291us result status StatusSuccess 2026-01-07T17:44:33.960723Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:43.874762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:43.874849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:43.874885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:43.874921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:43.874978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:43.875026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:43.875111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:43.875200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:43.875880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:43.876160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:43.970709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:43.970767Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:43.988248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:43.988661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:43.988837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:43.995709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:43.996137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:43.996880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:43.997189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:43.999653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:43.999824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:44.001031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:44.001097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:44.001228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:44.001284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:44.001322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:44.001504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:44.162893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.164999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.165066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.165128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:44.165178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2.906830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-07T17:44:32.906979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-07T17:44:32.907111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:44:32.918196Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:3861:5822], attempt# 0 2026-01-07T17:44:32.940403Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3861:5822], sender# [1:3860:5821] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:25765 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3A0CFF67-0A39-4C03-A7DD-66C09A0179B4 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-07T17:44:32.946619Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3861:5822], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:25765 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 33848BF7-CDFB-4CDA-8025-1D4F21E81A22 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-07T17:44:32.950904Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3861:5822], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-07T17:44:32.951701Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3860:5821] 2026-01-07T17:44:32.962863Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3861:5822], sender# [1:3860:5821], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-07T17:44:32.966058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:32.966115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:25765 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F32187E9-BCE3-4DFF-8EE9-505409367DA1 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD 2026-01-07T17:44:32.966404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2026-01-07T17:44:32.966475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:44:32.967491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.967568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:44:32.968090Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3861:5822], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2026-01-07T17:44:32.968154Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3861:5822], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-07T17:44:32.968914Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3860:5821], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:44:32.976189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:44:32.976318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:44:32.976360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:44:32.976402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:44:32.976443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:44:32.976538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:44:32.983909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:44:33.010339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:33.010410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:44:33.010597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:33.010706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 716 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:33.010782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:33.010838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.010902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:44:33.010944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:44:33.011146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:33.015052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.015417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:33.015471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:44:33.015577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:44:33.015611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:33.015668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:44:33.015703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:33.015738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:44:33.015810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2719] message: TxId: 102 2026-01-07T17:44:33.015857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:33.015892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:44:33.015922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:44:33.016062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:44:33.020251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:44:33.020315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3846:5808] TestWaitNotification: OK eventTxId 102 >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> TestSetCloudPermissions::CanSetPermissionsForRootDb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2026-01-07T17:44:19.085876Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676942126802725:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:19.093811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:19.577334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:19.577570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:19.583503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:19.719595Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:19.725003Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:19.730994Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676937831835210:2081] 1767807858986884 != 1767807858986887 2026-01-07T17:44:19.797888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:19.797910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:19.797916Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:19.798004Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:19.909003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:20.022808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:20.025120Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:20.029787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:44:20.262399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:20.274067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:44:20.276656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:20.297147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:44:20.361147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.519227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.560119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.601297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.626535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.651243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.675380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.702899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.730735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.758990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:22.558208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676955011706059:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.558220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676955011706051:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.558345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.560895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676955011706066:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.561049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.563203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:22.576138Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676955011706065:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:44:22.646367Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676955011706118:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:23.071689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.104151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.136155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.166297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... ition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2026-01-07T17:44:33.332746Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [SendMessageBatch] requestId [d49dc4ef-dfd779d6-1cb07830-22798847] reply ok 2026-01-07T17:44:33.333011Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:47514) <- (200 , 436 bytes) 2026-01-07T17:44:33.333147Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:47514) connection closed Http output full {"Successful":[{"SequenceNumber":"0","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"},{"SequenceNumber":"0","Id":"Id-2","MD5OfMessageBody":"7034dd2039d12b6dd94a9e6dfb820b77","MessageId":"E43C2D66-7305-5166-8EE2-D86E421B5F98"},{"SequenceNumber":"0","Id":"Id-3","MD5OfMessageBody":"f23251df60f088df56a4be0a5fb1ae75","MessageId":"F4B995B7-701A-5815-BE20-755014677779"}]} 2026-01-07T17:44:33.334225Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#40,[::1]:47524) incoming connection opened 2026-01-07T17:44:33.334286Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#40,[::1]:47524) -> (POST /Root, 101 bytes) 2026-01-07T17:44:33.334451Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d821:cdb7:a47b:0:c021:cdb7:a47b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: b323d8a7-160112ec-f4dd87c-7abea6f4 2026-01-07T17:44:33.334797Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [b323d8a7-160112ec-f4dd87c-7abea6f4] got new request from [d821:cdb7:a47b:0:c021:cdb7:a47b:0] database '/Root' stream '' 2026-01-07T17:44:33.335273Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [b323d8a7-160112ec-f4dd87c-7abea6f4] [auth] Authorized successfully 2026-01-07T17:44:33.335329Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [b323d8a7-160112ec-f4dd87c-7abea6f4] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:33.336414Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1767807878336 VisibilityDeadlineMilliseconds: 1767807903336 MaxNumberOfMessages: 10 2026-01-07T17:44:33.337928Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-07T17:44:33.337958Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-07T17:44:33.338050Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 5 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 3 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2026-01-07T17:44:33.338204Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 195 count 3 last offset 0, current partition end offset: 3 2026-01-07T17:44:33.338217Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-07T17:44:33.338260Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 195 accessed 2 times before, last time 2026-01-07T17:44:33.000000Z 2026-01-07T17:44:33.338294Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-07T17:44:33.338320Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:44:33.338398Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 175 from pos 0 cbcount 3 2026-01-07T17:44:33.338486Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:44:33.339418Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' Http output full {"Messages":[{"MD5OfBody":"3bf7e6d806a0b8062135ae945eca30bf","Attributes":{"SentTimestamp":"1767807873278","MessageGroupId":"MessageGroupId-1"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-1","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"},{"MD5OfBody":"7034dd2039d12b6dd94a9e6dfb820b77","Attributes":{"SentTimestamp":"1767807873323"},"ReceiptHandle":"CAAQAQ==","Body":"MessageBody-2","MessageId":"E43C2D66-7305-5166-8EE2-D86E421B5F98"},{"MD5OfBody":"f23251df60f088df56a4be0a5fb1ae75","Attributes":{"SentTimestamp":"1767807873323"},"ReceiptHandle":"CAAQAg==","Body":"MessageBody-3","MessageId":"F4B995B7-701A-5815-BE20-755014677779"}]} 2026-01-07T17:44:33.339796Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [b323d8a7-160112ec-f4dd87c-7abea6f4] reply ok 2026-01-07T17:44:33.340040Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:47524) <- (200 , 641 bytes) 2026-01-07T17:44:33.340155Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:47524) connection closed 2026-01-07T17:44:33.341335Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:47534) incoming connection opened 2026-01-07T17:44:33.341397Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:47534) -> (POST /Root, 397 bytes) 2026-01-07T17:44:33.341502Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d80c:cdb7:a47b:0:c00c:cdb7:a47b:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: d2e65047-36f1d61a-ee4511f-c33e36fd 2026-01-07T17:44:33.342024Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [DeleteMessageBatch] requestId [d2e65047-36f1d61a-ee4511f-c33e36fd] got new request from [d80c:cdb7:a47b:0:c00c:cdb7:a47b:0] database '/Root' stream '' 2026-01-07T17:44:33.342589Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DeleteMessageBatch] requestId [d2e65047-36f1d61a-ee4511f-c33e36fd] [auth] Authorized successfully 2026-01-07T17:44:33.342640Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [DeleteMessageBatch] requestId [d2e65047-36f1d61a-ee4511f-c33e36fd] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:33.349790Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:50: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPCommitRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 Offset: 2 Offset: 0 Offset: 1 2026-01-07T17:44:33.350854Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.350872Z node 2 :PERSQUEUE DEBUG: partition.cpp:2409: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:44:33.350897Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:44:33.350907Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.350922Z node 2 :PERSQUEUE DEBUG: partition.cpp:2473: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:44:33.350964Z node 2 :PERSQUEUE DEBUG: partition.cpp:3880: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer offset is set to 3 (startOffset 0) session 2026-01-07T17:44:33.350990Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:44:33.351001Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:44:33.351014Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.351155Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:44:33.352451Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessageBatch] requestId [d2e65047-36f1d61a-ee4511f-c33e36fd] reply ok 2026-01-07T17:44:33.352760Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:47534) <- (200 , 219 bytes) 2026-01-07T17:44:33.352887Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:47534) connection closed Http output full {"Successful":[{"Id":"delete-id-2"},{"Id":"delete-id-0"},{"Id":"delete-id-1"}],"Failed":[{"Message":"The specified receipt handle isn't valid.","Id":"delete-invalid","Code":"ReceiptHandleIsInvalid","SenderFault":true}]} 2026-01-07T17:44:33.354441Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:44:33.354506Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:44:33.354550Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:44:33.354572Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.354584Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.354593Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.354605Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.354614Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.354641Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:44:33.429059Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.429097Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.429110Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.429126Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.429136Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.531140Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.531171Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.531181Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.531205Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.531218Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:31.493503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:31.493587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:31.493631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:31.493685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:31.493741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:31.493770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:31.493823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:31.493901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:31.494748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:31.495066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:31.592043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:31.592111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:31.612038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:31.612313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:31.612477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:31.621461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:31.621862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:31.622508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:31.622741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:31.624981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:31.625175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:31.626400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:31.626473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:31.626607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:31.626667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:31.626758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:31.626896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:31.737545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.738645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.738753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.738816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.738875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.738942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.738999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.739058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.739144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.739204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.739267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.739324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.739376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.739434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:31.739491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ionPlan, step: 300, at tablet: 72057594046678944 2026-01-07T17:44:34.574610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 107:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-07T17:44:34.600785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2026-01-07T17:44:34.605020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2026-01-07T17:44:34.605175Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2026-01-07T17:44:34.605243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 107:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.605390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:44:34.605604Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 128 -> 240 2026-01-07T17:44:34.605818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:44:34.610888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.611864Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:34.611916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:44:34.612224Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:34.612301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 107, path id: 39 2026-01-07T17:44:34.613125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.613184Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:0 ProgressState 2026-01-07T17:44:34.613303Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-07T17:44:34.613345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:44:34.613391Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-07T17:44:34.613429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:44:34.613474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2026-01-07T17:44:34.613524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:44:34.613569Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-07T17:44:34.613605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-07T17:44:34.613795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:44:34.613849Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 1 2026-01-07T17:44:34.613887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-07T17:44:34.616498Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:44:34.616625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:44:34.616668Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2026-01-07T17:44:34.616703Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:44:34.616736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:44:34.616819Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 1 2026-01-07T17:44:34.616849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:812:2778] 2026-01-07T17:44:34.620452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:44:34.620552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:44:34.620601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:1089:3002] TestWaitNotification: OK eventTxId 107 2026-01-07T17:44:34.621370Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:44:34.621694Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 337us result status StatusSuccess 2026-01-07T17:44:34.622515Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:44:31.935133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:44:31.935257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:31.935303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:44:31.935355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:44:31.935421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:44:31.935472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:44:31.935535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:44:31.935629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:44:31.936581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:44:31.936898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:44:32.039853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:32.039909Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:32.058410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:44:32.058687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:44:32.058909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:44:32.066911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:44:32.067411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:44:32.068242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:32.068546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:44:32.072227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:32.072442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:32.073653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:32.073723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:32.073871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:32.073927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:32.074049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:32.074271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:44:32.217849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.218905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.219959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.220048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:44:32.220138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... lPathId: 27] was 0 2026-01-07T17:44:34.674155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-07T17:44:34.674178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-07T17:44:34.674202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-07T17:44:34.674225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-07T17:44:34.674250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-07T17:44:34.674290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-07T17:44:34.674317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-07T17:44:34.674342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-07T17:44:34.674368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:44:34.674396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:44:34.674573Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.674664Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.674780Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-07T17:44:34.674830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:44:34.674862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:44:34.674885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-07T17:44:34.674909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:44:34.675036Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.675116Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.675344Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-07T17:44:34.675558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:44:34.675927Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.676049Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.676442Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.676512Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.676849Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.677052Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.677118Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.677213Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.677427Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.677518Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.677987Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.678299Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.678389Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.678460Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.678646Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.678703Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.678761Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.685915Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:44:34.691104Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:44:34.691199Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:44:34.691995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:44:34.692064Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:34.692118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:44:34.694433Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1140:3043] sender: [2:1200:2058] recipient: [2:15:2062] 2026-01-07T17:44:34.775308Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:44:34.775695Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 419us result status StatusSuccess 2026-01-07T17:44:34.776380Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [GOOD] >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] >> TestSqsTopicHttpProxy::TestSendMessage >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 2076388947592074217 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2026-01-07T17:44:04.038582Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.041456Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.043877Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.049022Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.049203Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.063145Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.295556Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.541433Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.556576Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.583121Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.751312Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.927401Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.927925Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.946912Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:04.985932Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.006275Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.056103Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.079969Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.158454Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.466967Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.468703Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.480396Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.591803Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.601764Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.779156Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.906776Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.920799Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.935717Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:05.949410Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.057025Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.071939Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.073928Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.101421Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.112461Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.248427Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.259340Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.285195Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.297786Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.505502Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.506205Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.821817Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:06.836155Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.024139Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.050112Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.217928Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.359870Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.389768Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.390393Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.504946Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.518066Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.534657Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.545023Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.741535Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.797360Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.812365Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.923898Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.936494Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:07.950921Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.065430Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.078298Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.312905Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.326980Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.533738Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.579886Z 1 00h02m44.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.930288Z 1 00h02m45.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.951758Z 1 00h02m45.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:08.965388Z 1 00h02m45.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:09.230745Z 1 00h02m45.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:09.243482Z 1 00h02m45.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:09.392542Z 1 00h02m45.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:09.408429Z 1 00h02m45.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2026-01-07T17:44:09.673940Z 1 00h02m46.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1 ... k read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2026-01-07T17:44:22.425981Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:22.430402Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:22.436245Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:22.443206Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:22.444089Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:22.780177Z 8 00h20m54.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:22.793496Z 8 00h20m54.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.033831Z 8 00h20m54.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.046874Z 8 00h20m54.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.345830Z 8 00h20m55.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.363328Z 8 00h20m55.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.418494Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.420471Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.439708Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.787984Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.884142Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.910371Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:23.920720Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.056072Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.075951Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.077038Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.165615Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.373560Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.390795Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.404994Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.536366Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.552317Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.568807Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.587485Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.656176Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.656337Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.797793Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:24.808322Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.166588Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.180025Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.265104Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.282990Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.306911Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.308517Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.499907Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.519012Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.871316Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.891331Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.971719Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:25.994762Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.181061Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.183828Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.227189Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.289184Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.634880Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.688781Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.798914Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.896321Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.959605Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.962209Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.982033Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:26.997638Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.160918Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.176792Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.194588Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.240311Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.265625Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.317664Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.346032Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.365935Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.672509Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.686399Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.747696Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.765924Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.966893Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2026-01-07T17:44:27.968809Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2026-01-07T17:44:05.036816Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.036856Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.036896Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.037322Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.060844Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.061252Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.062116Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.062732Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.064066Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.064236Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.064288Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:44:05.065206Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.065231Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.065254Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.065642Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.066967Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.067153Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.067425Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.067916Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.068074Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.068182Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.068230Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:44:05.069366Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.069401Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.069437Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.069784Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.070475Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.070639Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.070874Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.071744Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.071946Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.072072Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.072117Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:44:05.073235Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.073304Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.073348Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.073673Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.074260Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.077546Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.077814Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.100214Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.103242Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.103508Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.103564Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:44:05.105330Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.105396Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.105429Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.105734Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.106419Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.106702Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.106947Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.107267Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.107371Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.107446Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.107476Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-07T17:44:05.107979Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.108000Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.108055Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.108372Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.109112Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.109258Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.109438Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.109723Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.109817Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.109931Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.109966Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-07T17:44:05.110795Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.110822Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.110844Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.111176Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.111893Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.111981Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.112127Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.112770Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.117680Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.118087Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.118146Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-07T17:44:05.119254Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.119277Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.119302Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:05.119634Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:44:05.120211Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:44:05.120336Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.120514Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:44:05.122082Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:05.122490Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:44:05.122607Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:44:05.122711Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-07T17:44:05.143546Z :ReadSession INFO: Random seed for debugging is 1767807845143505 2026-01-07T17:44:05.397921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676881872460995:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:05.398267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:05.434184Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676882719260330:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:05.434509Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;p ... 37892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.030069Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.030081Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.030097Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.030126Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.130367Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.130397Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.130408Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.130426Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.130438Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.230695Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.230724Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.230744Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.230766Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.230779Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.331588Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.331623Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.331634Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.331649Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.331658Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.433944Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.433975Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.433985Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.434002Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.434012Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.534344Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.534374Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.534394Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.534410Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.534420Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.624997Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_1_1_14750475019609652769_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2026-01-07T17:44:33.634748Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.634775Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.634784Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.634794Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.634801Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.723712Z :INFO: [/Root] [/Root] [de63fbc1-a3353142-bacea738-f8910475] Closing read session. Close timeout: 0.000000s 2026-01-07T17:44:33.723794Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2026-01-07T17:44:33.723855Z :INFO: [/Root] [/Root] [de63fbc1-a3353142-bacea738-f8910475] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16538 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:44:33.723958Z :NOTICE: [/Root] [/Root] [de63fbc1-a3353142-bacea738-f8910475] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:44:33.724023Z :DEBUG: [/Root] [/Root] [de63fbc1-a3353142-bacea738-f8910475] [dc1] Abort session to cluster 2026-01-07T17:44:33.724698Z :NOTICE: [/Root] [/Root] [de63fbc1-a3353142-bacea738-f8910475] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:44:33.735045Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.735077Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.735089Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.735104Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.735115Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.749184Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_14750475019609652769_v1 grpc read done: success# 0, data# { } 2026-01-07T17:44:33.760982Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_14750475019609652769_v1 grpc read failed 2026-01-07T17:44:33.761124Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_14750475019609652769_v1 grpc closed 2026-01-07T17:44:33.761172Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_14750475019609652769_v1 is DEAD 2026-01-07T17:44:33.762144Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7592676933412070885:2470] disconnected. 2026-01-07T17:44:33.762451Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_14750475019609652769_v1 2026-01-07T17:44:33.762496Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7592676933412070888:2473] destroyed 2026-01-07T17:44:33.762563Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_14750475019609652769_v1 2026-01-07T17:44:33.762195Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7592676933412070885:2470] disconnected; active server actors: 1 2026-01-07T17:44:33.762218Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7592676933412070885:2470] client user disconnected session shared/user_1_1_14750475019609652769_v1 2026-01-07T17:44:33.835402Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.835439Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.835450Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.835466Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.835475Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:33.935754Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:33.935793Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.935809Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:33.935839Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:33.935850Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:44:34.399118Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [1:7592677006426515789:2634] TxId: 281474976715692. Ctx: { TraceId: 01kecryx170sad90qba859dk2w, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y5ZDk0NDctMjEzNjZjZjctZWVhMWI2OGQtMzUyNDgxMGM=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 trace_id# 2026-01-07T17:44:34.399941Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7592677006426515793:2634], TxId: 281474976715692, task: 3. Ctx: { CheckpointId : . TraceId : 01kecryx170sad90qba859dk2w. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2Y5ZDk0NDctMjEzNjZjZjctZWVhMWI2OGQtMzUyNDgxMGM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7592677006426515789:2634], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TestSqsTopicHttpProxy::TestSendMessageBatch |90.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:43:45.337687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:43:45.337790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:45.337833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:43:45.337870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:43:45.337927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:43:45.337963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:43:45.338019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:43:45.338102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:43:45.338982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:43:45.339266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:43:45.433260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:45.433306Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:45.451193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:43:45.451623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:43:45.451811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:43:45.459190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:43:45.459581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:43:45.460401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:43:45.460687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:43:45.463710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:45.463902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:43:45.465201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:43:45.465270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:43:45.465402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:43:45.465459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:43:45.465509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:43:45.465702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:43:45.617422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.621730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.621953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:43:45.622966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 3866:5827], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:26325 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5C92D343-F254-46E5-B062-037C6F3B365E amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2026-01-07T17:44:34.310427Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3867:5828], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2026-01-07T17:44:34.310667Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3866:5827] 2026-01-07T17:44:34.310742Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3867:5828], sender# [1:3866:5827], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:26325 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AA66C48A-E12A-4C0F-A159-EB9C0DD68897 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2026-01-07T17:44:34.314482Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3867:5828], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2026-01-07T17:44:34.314546Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3867:5828], success# 1, error# , multipart# 1, uploadId# 1 2026-01-07T17:44:34.322575Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:538: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3867:5828], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:26325 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0C5199AF-B682-4B4A-BD56-57B6AC1995D7 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2026-01-07T17:44:34.334800Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:635: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3867:5828], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2026-01-07T17:44:34.335290Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3866:5827], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-07T17:44:34.351917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:34.352007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:44:34.352206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:34.352326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 717 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-07T17:44:34.352412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:44:34.352463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.352522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:44:34.352592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:44:34.352791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:44:34.357585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.358372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:44:34.358453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:44:34.358582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:44:34.358630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:34.358681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:44:34.358720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:34.358776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:44:34.358859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:746:2723] message: TxId: 102 2026-01-07T17:44:34.358918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:44:34.358962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:44:34.359021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:44:34.359161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:44:34.364701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:44:34.364789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3850:5812] TestWaitNotification: OK eventTxId 102 >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] Test command err: 2026-01-07T17:44:33.323005Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677000196549334:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:33.323608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:33.542364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:33.542458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:33.579338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:33.599256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:33.605914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:33.607279Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677000196549230:2081] 1767807873312136 != 1767807873312139 2026-01-07T17:44:33.655935Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.developerApi.update ydb.tables.write ydb.databases.create ydb.databases.connect ydb.developerApi.get ydb.tables.select) 2026-01-07T17:44:33.656033Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cd0e9ef07d0] Connect to grpc://localhost:12864 2026-01-07T17:44:33.660665Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cd0e9ef07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2026-01-07T17:44:33.677903Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cd0e9ef07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:44:33.678298Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:44:33.678594Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue |90.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 13598917598378790817 Reassign# 4 -- VSlotId { NodeId: 5 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 4 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 5 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } Status: "READY" Ready: true Put# [1:1:1:0:0:97:0] Put# [1:1:2:0:0:6:0] Put# [1:1:3:0:0:77:0] 2026-01-07T17:41:32.841461Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-07T17:41:32.844030Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13552979253169462455] 2026-01-07T17:41:32.856855Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:2:0:0:6:1] 2026-01-07T17:41:32.856967Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:3:0:0:77:2] 2026-01-07T17:41:32.857263Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 2 PartsResurrected# 2 Put# [1:1:4:0:0:2:0] Put# [1:1:5:0:0:64:0] Put# [1:1:6:0:0:57:0] Put# [1:1:7:0:0:31:0] Put# [1:1:8:0:0:28:0] Put# [1:1:9:0:0:78:0] Put# [1:1:10:0:0:28:0] Put# [1:1:11:0:0:94:0] Put# [1:1:12:0:0:81:0] Put# [1:1:13:0:0:21:0] Put# [1:1:14:0:0:27:0] Put# [1:1:15:0:0:91:0] Put# [1:1:16:0:0:96:0] Put# [1:1:17:0:0:58:0] Put# [1:1:18:0:0:95:0] Put# [1:1:19:0:0:95:0] Put# [1:1:20:0:0:88:0] Put# [1:1:21:0:0:81:0] Put# [1:1:22:0:0:62:0] Put# [1:1:23:0:0:7:0] Put# [1:1:24:0:0:28:0] Put# [1:1:25:0:0:45:0] Put# [1:1:26:0:0:25:0] Put# [1:1:27:0:0:66:0] Put# [1:1:28:0:0:29:0] Put# [1:1:29:0:0:73:0] Put# [1:1:30:0:0:34:0] Put# [1:1:31:0:0:10:0] Put# [1:1:32:0:0:15:0] Put# [1:1:33:0:0:89:0] Put# [1:1:34:0:0:77:0] Put# [1:1:35:0:0:91:0] Put# [1:1:36:0:0:83:0] Put# [1:1:37:0:0:38:0] Put# [1:1:38:0:0:99:0] Put# [1:1:39:0:0:54:0] Put# [1:1:40:0:0:23:0] Put# [1:1:41:0:0:12:0] Put# [1:1:42:0:0:95:0] Put# [1:1:43:0:0:39:0] Put# [1:1:44:0:0:2:0] Put# [1:1:45:0:0:27:0] Put# [1:1:46:0:0:42:0] Put# [1:1:47:0:0:92:0] Put# [1:1:48:0:0:61:0] Put# [1:1:49:0:0:67:0] Put# [1:1:50:0:0:14:0] Put# [1:1:51:0:0:60:0] Put# [1:1:52:0:0:63:0] Put# [1:1:53:0:0:7:0] Put# [1:1:54:0:0:47:0] Put# [1:1:55:0:0:36:0] Put# [1:1:56:0:0:99:0] Put# [1:1:57:0:0:82:0] Put# [1:1:58:0:0:61:0] Put# [1:1:59:0:0:24:0] Put# [1:1:60:0:0:80:0] Put# [1:1:61:0:0:38:0] Put# [1:1:62:0:0:24:0] Put# [1:1:63:0:0:53:0] Put# [1:1:64:0:0:79:0] Put# [1:1:65:0:0:61:0] Put# [1:1:66:0:0:60:0] Put# [1:1:67:0:0:60:0] Put# [1:1:68:0:0:68:0] Put# [1:1:69:0:0:31:0] Put# [1:1:70:0:0:56:0] Put# [1:1:71:0:0:74:0] Put# [1:1:72:0:0:48:0] Put# [1:1:73:0:0:64:0] Put# [1:1:74:0:0:61:0] Put# [1:1:75:0:0:21:0] Put# [1:1:76:0:0:39:0] Put# [1:1:77:0:0:76:0] Put# [1:1:78:0:0:89:0] Put# [1:1:79:0:0:70:0] Put# [1:1:80:0:0:14:0] Put# [1:1:81:0:0:29:0] Put# [1:1:82:0:0:100:0] Put# [1:1:83:0:0:7:0] Put# [1:1:84:0:0:4:0] Put# [1:1:85:0:0:83:0] Put# [1:1:86:0:0:31:0] Put# [1:1:87:0:0:53:0] Put# [1:1:88:0:0:74:0] Put# [1:1:89:0:0:25:0] Put# [1:1:90:0:0:9:0] Put# [1:1:91:0:0:23:0] Put# [1:1:92:0:0:86:0] Put# [1:1:93:0:0:59:0] Put# [1:1:94:0:0:75:0] Put# [1:1:95:0:0:39:0] Put# [1:1:96:0:0:34:0] Put# [1:1:97:0:0:81:0] Put# [1:1:98:0:0:12:0] Put# [1:1:99:0:0:81:0] Put# [1:1:100:0:0:90:0] Put# [1:1:101:0:0:90:0] Put# [1:1:102:0:0:13:0] Put# [1:1:103:0:0:26:0] Put# [1:1:104:0:0:96:0] Put# [1:1:105:0:0:85:0] Put# [1:1:106:0:0:44:0] Put# [1:1:107:0:0:72:0] Put# [1:1:108:0:0:43:0] Put# [1:1:109:0:0:38:0] Put# [1:1:110:0:0:38:0] Put# [1:1:111:0:0:33:0] Put# [1:1:112:0:0:34:0] Put# [1:1:113:0:0:29:0] Put# [1:1:114:0:0:82:0] Put# [1:1:115:0:0:14:0] Put# [1:1:116:0:0:64:0] Put# [1:1:117:0:0:60:0] Put# [1:1:118:0:0:30:0] Put# [1:1:119:0:0:44:0] Put# [1:1:120:0:0:28:0] Put# [1:1:121:0:0:49:0] Put# [1:1:122:0:0:98:0] Put# [1:1:123:0:0:17:0] Put# [1:1:124:0:0:25:0] Put# [1:1:125:0:0:21:0] Put# [1:1:126:0:0:51:0] Put# [1:1:127:0:0:87:0] Put# [1:1:128:0:0:49:0] Put# [1:1:129:0:0:49:0] Put# [1:1:130:0:0:17:0] Put# [1:1:131:0:0:73:0] Put# [1:1:132:0:0:99:0] Put# [1:1:133:0:0:33:0] Put# [1:1:134:0:0:86:0] Put# [1:1:135:0:0:57:0] Put# [1:1:136:0:0:93:0] Put# [1:1:137:0:0:75:0] Put# [1:1:138:0:0:32:0] Put# [1:1:139:0:0:74:0] Put# [1:1:140:0:0:65:0] Put# [1:1:141:0:0:51:0] Put# [1:1:142:0:0:24:0] Put# [1:1:143:0:0:92:0] Put# [1:1:144:0:0:33:0] Put# [1:1:145:0:0:81:0] Put# [1:1:146:0:0:87:0] Put# [1:1:147:0:0:96:0] Put# [1:1:148:0:0:35:0] Put# [1:1:149:0:0:55:0] Put# [1:1:150:0:0:34:0] Put# [1:1:151:0:0:24:0] Put# [1:1:152:0:0:96:0] Put# [1:1:153:0:0:20:0] Put# [1:1:154:0:0:65:0] Put# [1:1:155:0:0:57:0] Put# [1:1:156:0:0:15:0] Put# [1:1:157:0:0:82:0] Put# [1:1:158:0:0:57:0] Put# [1:1:159:0:0:30:0] Put# [1:1:160:0:0:69:0] Put# [1:1:161:0:0:19:0] Put# [1:1:162:0:0:92:0] Put# [1:1:163:0:0:70:0] Put# [1:1:164:0:0:69:0] Put# [1:1:165:0:0:73:0] Put# [1:1:166:0:0:80:0] Put# [1:1:167:0:0:33:0] Put# [1:1:168:0:0:56:0] Put# [1:1:169:0:0:75:0] Put# [1:1:170:0:0:100:0] Put# [1:1:171:0:0:65:0] Put# [1:1:172:0:0:88:0] Put# [1:1:173:0:0:72:0] Put# [1:1:174:0:0:81:0] Put# [1:1:175:0:0:34:0] Put# [1:1:176:0:0:91:0] Put# [1:1:177:0:0:80:0] Put# [1:1:178:0:0:82:0] Put# [1:1:179:0:0:11:0] Put# [1:1:180:0:0:12:0] Put# [1:1:181:0:0:62:0] Put# [1:1:182:0:0:94:0] Put# [1:1:183:0:0:18:0] Put# [1:1:184:0:0:95:0] Put# [1:1:185:0:0:13:0] Put# [1:1:186:0:0:2:0] Put# [1:1:187:0:0:54:0] Put# [1:1:188:0:0:22:0] Put# [1:1:189:0:0:73:0] Put# [1:1:190:0:0:72:0] Put# [1:1:191:0:0:90:0] Put# [1:1:192:0:0:45:0] Put# [1:1:193:0:0:31:0] Put# [1:1:194:0:0:92:0] Put# [1:1:195:0:0:57:0] Put# [1:1:196:0:0:85:0] Put# [1:1:197:0:0:72:0] Put# [1:1:198:0:0:54:0] Put# [1:1:199:0:0:36:0] Put# [1:1:200:0:0:71:0] Put# [1:1:201:0:0:99:0] Put# [1:1:202:0:0:98:0] Put# [1:1:203:0:0:35:0] Put# [1:1:204:0:0:46:0] Put# [1:1:205:0:0:40:0] Put# [1:1:206:0:0:76:0] Put# [1:1:207:0:0:81:0] Put# [1:1:208:0:0:82:0] Put# [1:1:209:0:0:70:0] Put# [1:1:210:0:0:99:0] Put# [1:1:211:0:0:94:0] Put# [1:1:212:0:0:54:0] Put# [1:1:213:0:0:91:0] Put# [1:1:214:0:0:56:0] Put# [1:1:215:0:0:90:0] Put# [1:1:216:0:0:37:0] Put# [1:1:217:0:0:93:0] Put# [1:1:218:0:0:9:0] Put# [1:1:219:0:0:88:0] Put# [1:1:220:0:0:52:0] Put# [1:1:221:0:0:14:0] Put# [1:1:222:0:0:46:0] Put# [1:1:223:0:0:78:0] Put# [1:1:224:0:0:27:0] Put# [1:1:225:0:0:44:0] Put# [1:1:226:0:0:62:0] Put# [1:1:227:0:0:37:0] Put# [1:1:228:0:0:18:0] Put# [1:1:229:0:0:82:0] Put# [1:1:230:0:0:92:0] Put# [1:1:231:0:0:89:0] Put# [1:1:232:0:0:94:0] Put# [1:1:233:0:0:89:0] Put# [1:1:234:0:0:91:0] Put# [1:1:235:0:0:41:0] Put# [1:1:236:0:0:52:0] Put# [1:1:237:0:0:29:0] Put# [1:1:238:0:0:80:0] Put# [1:1:239:0:0:74:0] Put# [1:1:240:0:0:88:0] Put# [1:1:241:0:0:6:0] Put# [1:1:242:0:0:58:0] Put# [1:1:243:0:0:84:0] Put# [1:1:244:0:0:25:0] Put# [1:1:245:0:0:41:0] Put# [1:1:246:0:0:1:0] Put# [1:1:247:0:0:91:0] Put# [1:1:248:0:0:70:0] Put# [1:1:249:0:0:33:0] Put# [1:1:250:0:0:61:0] Put# [1:1:251:0:0:18:0] Put# [1:1:252:0:0:83:0] Put# [1:1:253:0:0:89:0] Put# [1:1:254:0:0:93:0] Put# [1:1:255:0:0:36:0] Put# [1:1:256:0:0:22:0] Put# [1:1:257:0:0:41:0] Put# [1:1:258:0:0:37:0] Put# [1:1:259:0:0:39:0] Put# [1:1:260:0:0:32:0] Put# [1:1:261:0:0:29:0] Put# [1:1:262:0:0:91:0] Put# [1:1:263:0:0:28:0] Put# [1:1:264:0:0:18:0] Put# [1:1:265:0:0:26:0] Put# [1:1:266:0:0:63:0] Put# [1:1:267:0:0:47:0] Put# [1:1:268:0:0:71:0] Put# [1:1:269:0:0:52:0] Put# [1:1:270:0:0:5:0] Put# [1:1:271:0:0:8:0] Put# [1:1:272:0:0:6:0] Put# [1:1:273:0:0:95:0] Put# [1:1:274:0:0:9:0] Put# [1:1:275:0:0:76:0] Put# [1:1:276:0:0:15:0] Put# [1:1:277:0:0:66:0] Put# [1:1:278:0:0:78:0] Put# [1:1:279:0:0:71:0] Put# [1:1:280:0:0:82:0] Put# [1:1:281:0:0:85:0] Put# [1:1:282:0:0:34:0] Put# [1:1:283:0:0:43:0] Put# [1:1:284:0:0:80:0] Put# [1:1:285:0:0:44:0] Put# [1:1:286:0:0:14:0] Put# [1:1:287:0:0:89:0] Put# [1:1:288:0:0:79:0] Put# [1:1:289:0:0:44:0] Put# [1:1:290:0:0:53:0] Put# [1:1:291:0:0:18:0] Put# [1:1:292:0:0:23:0] Put# [1:1:293:0:0:17:0] Put# [1:1:294:0:0:20:0] Put# [1:1:295:0:0:64:0] Put# [1:1:296:0:0:62:0] Put# [1:1:297:0:0:69:0] Put# [1:1:298:0:0:52:0] Put# [1:1:299:0:0:20:0] Put# [1:1:300:0:0:96:0] Put# [1:1:301:0:0:3:0] Put# [1:1:302:0:0:96:0] Put# [1:1:303:0:0:3:0] Put# [1:1:304:0:0:39:0] Put# [1:1:305:0:0:80:0] Put# [1:1:306:0:0:22:0] Put# [1:1:307:0:0:67:0] Put# [1:1:308:0:0:8:0] Put# [1:1:309:0:0:64:0] Put# [1:1:310:0:0:48:0] Put# [1:1:311:0:0:16:0] Put# [1:1:312:0:0:20:0] Put# [1:1:313:0:0:54:0] Put# [1:1:314:0:0:29:0] Put# [1:1:315:0:0:100:0] Put# [1:1:316:0:0:44:0] Put# [1:1:317:0:0:53:0] Put# [1:1:318:0:0:43:0] Put# [1:1:319:0:0:14:0] Put# [1:1:320:0:0:37:0] Put# [1:1:321:0:0:70:0] Put# [1:1:322:0:0:93:0] Put# [1:1:323:0:0:34:0] Put# [1:1:324:0:0:32:0] Put# [1:1:325:0:0:71:0] Put# [1:1:326:0:0:67:0] Put# [1:1:327:0:0:40:0] Put# [1:1:328:0:0:25:0] Put# [1:1:329:0:0:74:0] Put# [1:1:330:0:0:32:0] Put# [1:1:331:0:0:10:0] Put# [1:1:332:0:0:65:0] Put# [1:1:333:0:0:73:0] Put# [1:1:334:0:0:94:0] Put# [1:1:335:0:0:69:0] Put# [1:1:336:0:0:18:0] Put# [1:1:337:0:0:62:0] Put# [1:1:338:0:0:68:0] Put# [1:1:339:0:0:27:0] Put# [1:1:340:0:0:60:0] Put# [1:1:341:0:0:78:0] Put# [1:1:342:0:0:2:0] Put# [1:1:343:0:0:46:0] Put# [1:1:344:0:0:44:0] Put# [1:1:345:0:0:35:0] Put# [1:1:346:0:0:46:0] Put# [1:1:347:0:0:94:0] Put# [1:1:348:0:0:71:0] Put# [1:1:349:0:0:24:0] Put# [1:1:350:0:0:11:0] Put# [1:1:351:0:0:70:0] Put# [1:1:352:0:0:14:0] Put# [1:1:353:0:0:19:0] Put# [1:1:354:0:0:71:0] Put# [1:1:355:0:0:68:0] Put# [1:1:356:0:0:11:0] Put# [1:1:357:0:0:87:0] Put# [1:1:358:0:0:3:0] Put# [1:1:359:0:0:10:0] Put# [1:1:360:0:0:55:0] Put# [1:1:361:0:0:100:0] Put# [1:1:362:0:0:48:0] Put# [1:1:363:0:0:7:0] Put# [1:1:364:0:0:8:0] Put# [1:1:365:0:0:15:0] Put# [1:1:366:0:0:33:0] Put# [1:1:367:0:0:18:0] Put# [1:1:368:0:0:92:0] Put# [1:1:369:0:0:39:0] Put# [1:1:370:0:0:15:0] Put# [1:1:371:0:0:10:0] Put# [1:1:372:0:0:70:0] Put# [1:1:373:0:0:55:0] Put# [1:1:374:0:0:74:0] Put# [1:1:375:0:0:15:0] Put# [1:1:376:0:0:29:0] Put# [1:1:377:0:0:1:0] Put# [1:1:378:0:0:51:0] Put# [1:1:379:0:0:40:0] Put# [1:1:380:0:0:28:0] Put# [1:1:381:0:0:36:0] Put# [1:1:382:0:0:44:0] Put# [1:1:383:0:0:3:0] Put# [1:1:384:0:0:100:0] Put# [1:1:385:0:0:33:0] Put# [1:1:386:0:0:74:0] Put# [1:1:387:0:0:81:0] Put# [1:1:388:0:0:95:0] Put# [1:1:389:0:0:48:0] Put# [1:1:390:0:0:39:0] Put# [1:1:391:0:0:46:0] Put# [1:1:392:0:0:4:0] Put# [1:1:393:0:0:63:0] Put# [1:1:394:0:0:94:0] Put# [1:1:395:0:0:10:0] Put# [1:1:396:0:0:12:0] Put# [1:1:397:0:0:87:0] Put# [1:1:398:0:0:45:0] Put# [1:1:399:0:0:81:0] Put# [1:1:400:0:0:71:0] Put# [1:1:401:0:0:13:0] Put# [1:1:402:0:0:51:0] Put# [1:1:403:0:0:5:0] Put# [1:1:404:0:0:82:0] Put# [1:1:405:0:0:44:0] Put# [1:1:406:0:0:30:0] Put# [1:1:407:0:0:26:0] Put# [1:1:408:0:0:51:0] Put# [1:1:409:0:0:16:0] Put# [1:1:410:0:0:72:0] Put# [1:1:411:0:0:41:0] Put# [1:1:412:0:0:11:0] Put# [1:1:413:0:0:44:0] Put# [1:1:414:0:0:41:0] Put# [1:1:415:0:0:85:0] Put# [1:1:416:0:0:23:0] Put# [1:1:417:0:0:9:0] Put# [1:1:418:0:0:50:0] Put# [1:1:419:0:0:53:0] Put# [1:1:420:0:0:80:0] Put# [1:1:421:0:0:7:0] Put# [1:1:422:0:0:9:0] Put# [1:1:423:0:0:89:0] Put# [1:1:424:0:0:49:0] Put# [1:1:425:0:0:81:0] Put# [1:1:426:0:0:7:0] Put# [1:1:427:0:0:58:0] Put# [1:1:428:0:0:8:0] Put# [1:1:429:0:0:78:0] Put# [1:1:430:0:0:53:0] Put# [1:1:431:0:0:2:0] Put# [1:1:432:0:0:33:0] Put# [1:1:433:0:0:1:0] Put# [1:1:434:0:0:86:0] Put# [1:1:435:0:0:27:0] Put# [1:1:436:0:0:98:0] Put# [1:1:437:0:0:77:0] Put# [1:1:438:0:0:80:0] Put# [1:1:439:0:0:28:0] Put# [1:1:440:0:0:65:0] Put# [1:1:441:0:0:94:0] Put# [1:1:442:0:0:47:0] Put# [1:1:443:0:0:57:0] Put# [1:1:444:0:0:44:0] Put# [1:1:445:0:0:82:0] Put# [1:1:446:0:0:23:0] Put# [1:1:447:0:0:38:0] Put# [1:1:448:0:0:36:0] Put# [1:1:449:0:0:57:0] Put# [1:1:450:0:0:76:0] Put# [1:1:451:0:0:9:0] Put# [1:1:452:0:0:56:0] Put# [ ... 0:69:0] Put# [1:4:9520:0:0:69:0] Put# [1:4:9521:0:0:29:0] Put# [1:4:9522:0:0:78:0] Put# [1:4:9523:0:0:18:0] Put# [1:4:9524:0:0:63:0] Put# [1:4:9525:0:0:21:0] Put# [1:4:9526:0:0:92:0] Put# [1:4:9527:0:0:89:0] Put# [1:4:9528:0:0:34:0] Put# [1:4:9529:0:0:87:0] Put# [1:4:9530:0:0:52:0] Put# [1:4:9531:0:0:97:0] Put# [1:4:9532:0:0:92:0] Put# [1:4:9533:0:0:15:0] Put# [1:4:9534:0:0:44:0] Put# [1:4:9535:0:0:69:0] Put# [1:4:9536:0:0:62:0] Put# [1:4:9537:0:0:72:0] Put# [1:4:9538:0:0:65:0] Put# [1:4:9539:0:0:98:0] Put# [1:4:9540:0:0:55:0] Put# [1:4:9541:0:0:32:0] Put# [1:4:9542:0:0:54:0] Put# [1:4:9543:0:0:65:0] Put# [1:4:9544:0:0:88:0] Put# [1:4:9545:0:0:28:0] Put# [1:4:9546:0:0:42:0] Put# [1:4:9547:0:0:47:0] Put# [1:4:9548:0:0:84:0] Put# [1:4:9549:0:0:69:0] Put# [1:4:9550:0:0:27:0] Put# [1:4:9551:0:0:33:0] Put# [1:4:9552:0:0:7:0] Put# [1:4:9553:0:0:60:0] Put# [1:4:9554:0:0:76:0] Put# [1:4:9555:0:0:62:0] Put# [1:4:9556:0:0:12:0] Put# [1:4:9557:0:0:40:0] Put# [1:4:9558:0:0:20:0] Put# [1:4:9559:0:0:57:0] Put# [1:4:9560:0:0:40:0] Put# [1:4:9561:0:0:94:0] Put# [1:4:9562:0:0:68:0] Put# [1:4:9563:0:0:25:0] Put# [1:4:9564:0:0:55:0] Put# [1:4:9565:0:0:54:0] Put# [1:4:9566:0:0:61:0] Put# [1:4:9567:0:0:10:0] Put# [1:4:9568:0:0:40:0] Put# [1:4:9569:0:0:26:0] Put# [1:4:9570:0:0:85:0] Put# [1:4:9571:0:0:75:0] Put# [1:4:9572:0:0:98:0] Put# [1:4:9573:0:0:34:0] Put# [1:4:9574:0:0:85:0] Put# [1:4:9575:0:0:89:0] Put# [1:4:9576:0:0:23:0] Put# [1:4:9577:0:0:16:0] Put# [1:4:9578:0:0:48:0] Put# [1:4:9579:0:0:87:0] Put# [1:4:9580:0:0:24:0] Put# [1:4:9581:0:0:18:0] Put# [1:4:9582:0:0:80:0] Put# [1:4:9583:0:0:1:0] Put# [1:4:9584:0:0:68:0] Put# [1:4:9585:0:0:15:0] Put# [1:4:9586:0:0:43:0] Put# [1:4:9587:0:0:74:0] Put# [1:4:9588:0:0:25:0] Put# [1:4:9589:0:0:20:0] Put# [1:4:9590:0:0:9:0] Put# [1:4:9591:0:0:94:0] Put# [1:4:9592:0:0:18:0] Put# [1:4:9593:0:0:25:0] Put# [1:4:9594:0:0:33:0] Put# [1:4:9595:0:0:98:0] Put# [1:4:9596:0:0:26:0] Put# [1:4:9597:0:0:42:0] Put# [1:4:9598:0:0:16:0] Put# [1:4:9599:0:0:23:0] Put# [1:4:9600:0:0:32:0] Put# [1:4:9601:0:0:84:0] Put# [1:4:9602:0:0:59:0] Put# [1:4:9603:0:0:70:0] Put# [1:4:9604:0:0:88:0] Put# [1:4:9605:0:0:71:0] Put# [1:4:9606:0:0:35:0] Put# [1:4:9607:0:0:82:0] Put# [1:4:9608:0:0:26:0] Put# [1:4:9609:0:0:56:0] Put# [1:4:9610:0:0:84:0] Put# [1:4:9611:0:0:15:0] Put# [1:4:9612:0:0:63:0] Put# [1:4:9613:0:0:6:0] Put# [1:4:9614:0:0:79:0] Put# [1:4:9615:0:0:88:0] Put# [1:4:9616:0:0:16:0] Put# [1:4:9617:0:0:50:0] Put# [1:4:9618:0:0:55:0] Put# [1:4:9619:0:0:29:0] Put# [1:4:9620:0:0:67:0] Put# [1:4:9621:0:0:23:0] Put# [1:4:9622:0:0:63:0] Put# [1:4:9623:0:0:76:0] Put# [1:4:9624:0:0:44:0] Put# [1:4:9625:0:0:72:0] Put# [1:4:9626:0:0:59:0] Put# [1:4:9627:0:0:10:0] Put# [1:4:9628:0:0:79:0] Put# [1:4:9629:0:0:16:0] Put# [1:4:9630:0:0:70:0] Put# [1:4:9631:0:0:71:0] Put# [1:4:9632:0:0:94:0] Put# [1:4:9633:0:0:40:0] Put# [1:4:9634:0:0:22:0] Put# [1:4:9635:0:0:1:0] Put# [1:4:9636:0:0:11:0] Put# [1:4:9637:0:0:30:0] Put# [1:4:9638:0:0:77:0] Put# [1:4:9639:0:0:9:0] Put# [1:4:9640:0:0:93:0] Put# [1:4:9641:0:0:85:0] Put# [1:4:9642:0:0:50:0] Put# [1:4:9643:0:0:86:0] Put# [1:4:9644:0:0:5:0] Put# [1:4:9645:0:0:20:0] Put# [1:4:9646:0:0:97:0] Put# [1:4:9647:0:0:60:0] Put# [1:4:9648:0:0:74:0] Put# [1:4:9649:0:0:54:0] Put# [1:4:9650:0:0:72:0] Put# [1:4:9651:0:0:84:0] Put# [1:4:9652:0:0:41:0] Put# [1:4:9653:0:0:7:0] Put# [1:4:9654:0:0:74:0] Put# [1:4:9655:0:0:21:0] Put# [1:4:9656:0:0:38:0] Put# [1:4:9657:0:0:2:0] Put# [1:4:9658:0:0:69:0] Put# [1:4:9659:0:0:19:0] Put# [1:4:9660:0:0:66:0] Put# [1:4:9661:0:0:39:0] Put# [1:4:9662:0:0:92:0] Put# [1:4:9663:0:0:46:0] Put# [1:4:9664:0:0:28:0] Put# [1:4:9665:0:0:88:0] Put# [1:4:9666:0:0:53:0] Put# [1:4:9667:0:0:67:0] Put# [1:4:9668:0:0:8:0] Put# [1:4:9669:0:0:33:0] Put# [1:4:9670:0:0:19:0] Put# [1:4:9671:0:0:99:0] Put# [1:4:9672:0:0:23:0] Put# [1:4:9673:0:0:74:0] Put# [1:4:9674:0:0:79:0] Put# [1:4:9675:0:0:47:0] Put# [1:4:9676:0:0:10:0] Put# [1:4:9677:0:0:41:0] Put# [1:4:9678:0:0:21:0] Put# [1:4:9679:0:0:89:0] Put# [1:4:9680:0:0:84:0] Put# [1:4:9681:0:0:32:0] Put# [1:4:9682:0:0:23:0] Put# [1:4:9683:0:0:12:0] Put# [1:4:9684:0:0:2:0] Put# [1:4:9685:0:0:19:0] Put# [1:4:9686:0:0:33:0] Put# [1:4:9687:0:0:48:0] Put# [1:4:9688:0:0:90:0] Put# [1:4:9689:0:0:7:0] Put# [1:4:9690:0:0:53:0] Put# [1:4:9691:0:0:80:0] Put# [1:4:9692:0:0:6:0] Put# [1:4:9693:0:0:60:0] Put# [1:4:9694:0:0:70:0] Put# [1:4:9695:0:0:15:0] Put# [1:4:9696:0:0:14:0] Put# [1:4:9697:0:0:16:0] Put# [1:4:9698:0:0:25:0] Put# [1:4:9699:0:0:14:0] Put# [1:4:9700:0:0:33:0] Put# [1:4:9701:0:0:51:0] Put# [1:4:9702:0:0:67:0] Put# [1:4:9703:0:0:87:0] Put# [1:4:9704:0:0:72:0] Put# [1:4:9705:0:0:70:0] Put# [1:4:9706:0:0:47:0] Put# [1:4:9707:0:0:72:0] Put# [1:4:9708:0:0:95:0] Put# [1:4:9709:0:0:93:0] Put# [1:4:9710:0:0:76:0] Put# [1:4:9711:0:0:47:0] Put# [1:4:9712:0:0:65:0] Put# [1:4:9713:0:0:43:0] Put# [1:4:9714:0:0:19:0] Put# [1:4:9715:0:0:81:0] Put# [1:4:9716:0:0:45:0] Put# [1:4:9717:0:0:51:0] Put# [1:4:9718:0:0:74:0] Put# [1:4:9719:0:0:3:0] Put# [1:4:9720:0:0:30:0] Put# [1:4:9721:0:0:82:0] Put# [1:4:9722:0:0:93:0] Put# [1:4:9723:0:0:63:0] Put# [1:4:9724:0:0:57:0] Put# [1:4:9725:0:0:66:0] Put# [1:4:9726:0:0:41:0] Put# [1:4:9727:0:0:48:0] Put# [1:4:9728:0:0:9:0] Put# [1:4:9729:0:0:74:0] Put# [1:4:9730:0:0:28:0] Put# [1:4:9731:0:0:78:0] Put# [1:4:9732:0:0:50:0] Put# [1:4:9733:0:0:66:0] Put# [1:4:9734:0:0:66:0] Put# [1:4:9735:0:0:44:0] Put# [1:4:9736:0:0:50:0] Put# [1:4:9737:0:0:97:0] Put# [1:4:9738:0:0:20:0] Put# [1:4:9739:0:0:23:0] Put# [1:4:9740:0:0:73:0] Put# [1:4:9741:0:0:33:0] Put# [1:4:9742:0:0:70:0] Put# [1:4:9743:0:0:74:0] Put# [1:4:9744:0:0:28:0] Put# [1:4:9745:0:0:4:0] Put# [1:4:9746:0:0:67:0] Put# [1:4:9747:0:0:19:0] Put# [1:4:9748:0:0:11:0] Put# [1:4:9749:0:0:84:0] Put# [1:4:9750:0:0:75:0] Put# [1:4:9751:0:0:28:0] Put# [1:4:9752:0:0:2:0] Put# [1:4:9753:0:0:78:0] Put# [1:4:9754:0:0:89:0] Put# [1:4:9755:0:0:25:0] Put# [1:4:9756:0:0:71:0] Put# [1:4:9757:0:0:32:0] Put# [1:4:9758:0:0:73:0] Put# [1:4:9759:0:0:77:0] Put# [1:4:9760:0:0:80:0] Put# [1:4:9761:0:0:4:0] Put# [1:4:9762:0:0:10:0] Put# [1:4:9763:0:0:25:0] Put# [1:4:9764:0:0:40:0] Put# [1:4:9765:0:0:22:0] Put# [1:4:9766:0:0:42:0] Put# [1:4:9767:0:0:92:0] Put# [1:4:9768:0:0:20:0] Put# [1:4:9769:0:0:51:0] Put# [1:4:9770:0:0:12:0] Put# [1:4:9771:0:0:37:0] Put# [1:4:9772:0:0:47:0] Put# [1:4:9773:0:0:63:0] Put# [1:4:9774:0:0:20:0] Put# [1:4:9775:0:0:22:0] Put# [1:4:9776:0:0:23:0] Put# [1:4:9777:0:0:46:0] Put# [1:4:9778:0:0:36:0] Put# [1:4:9779:0:0:99:0] Put# [1:4:9780:0:0:96:0] Put# [1:4:9781:0:0:98:0] Put# [1:4:9782:0:0:43:0] Put# [1:4:9783:0:0:63:0] Put# [1:4:9784:0:0:83:0] Put# [1:4:9785:0:0:56:0] Put# [1:4:9786:0:0:15:0] Put# [1:4:9787:0:0:55:0] Put# [1:4:9788:0:0:49:0] Put# [1:4:9789:0:0:73:0] Put# [1:4:9790:0:0:77:0] Put# [1:4:9791:0:0:38:0] Put# [1:4:9792:0:0:41:0] Put# [1:4:9793:0:0:16:0] Put# [1:4:9794:0:0:79:0] Put# [1:4:9795:0:0:66:0] Put# [1:4:9796:0:0:43:0] Put# [1:4:9797:0:0:90:0] Put# [1:4:9798:0:0:14:0] Put# [1:4:9799:0:0:38:0] Put# [1:4:9800:0:0:65:0] Put# [1:4:9801:0:0:24:0] Put# [1:4:9802:0:0:35:0] Put# [1:4:9803:0:0:39:0] Put# [1:4:9804:0:0:80:0] Put# [1:4:9805:0:0:95:0] Put# [1:4:9806:0:0:81:0] Put# [1:4:9807:0:0:95:0] Put# [1:4:9808:0:0:75:0] Put# [1:4:9809:0:0:85:0] Put# [1:4:9810:0:0:20:0] Put# [1:4:9811:0:0:22:0] Put# [1:4:9812:0:0:31:0] Put# [1:4:9813:0:0:86:0] Put# [1:4:9814:0:0:95:0] Put# [1:4:9815:0:0:31:0] Put# [1:4:9816:0:0:32:0] Put# [1:4:9817:0:0:95:0] Put# [1:4:9818:0:0:69:0] Put# [1:4:9819:0:0:92:0] Put# [1:4:9820:0:0:28:0] Put# [1:4:9821:0:0:24:0] Put# [1:4:9822:0:0:1:0] Put# [1:4:9823:0:0:53:0] Put# [1:4:9824:0:0:53:0] Put# [1:4:9825:0:0:61:0] Put# [1:4:9826:0:0:62:0] Put# [1:4:9827:0:0:19:0] Put# [1:4:9828:0:0:92:0] Put# [1:4:9829:0:0:41:0] Put# [1:4:9830:0:0:39:0] Put# [1:4:9831:0:0:33:0] Put# [1:4:9832:0:0:17:0] Put# [1:4:9833:0:0:48:0] Put# [1:4:9834:0:0:99:0] Put# [1:4:9835:0:0:35:0] Put# [1:4:9836:0:0:10:0] Put# [1:4:9837:0:0:82:0] Put# [1:4:9838:0:0:21:0] Put# [1:4:9839:0:0:6:0] Put# [1:4:9840:0:0:80:0] Put# [1:4:9841:0:0:39:0] Put# [1:4:9842:0:0:94:0] Put# [1:4:9843:0:0:11:0] Put# [1:4:9844:0:0:57:0] Put# [1:4:9845:0:0:22:0] Put# [1:4:9846:0:0:57:0] Put# [1:4:9847:0:0:79:0] Put# [1:4:9848:0:0:80:0] Put# [1:4:9849:0:0:56:0] Put# [1:4:9850:0:0:74:0] Put# [1:4:9851:0:0:5:0] Put# [1:4:9852:0:0:42:0] Put# [1:4:9853:0:0:8:0] Put# [1:4:9854:0:0:77:0] Put# [1:4:9855:0:0:27:0] Put# [1:4:9856:0:0:51:0] Put# [1:4:9857:0:0:12:0] Put# [1:4:9858:0:0:84:0] Put# [1:4:9859:0:0:50:0] Put# [1:4:9860:0:0:2:0] Put# [1:4:9861:0:0:19:0] Put# [1:4:9862:0:0:41:0] Put# [1:4:9863:0:0:73:0] Put# [1:4:9864:0:0:100:0] Put# [1:4:9865:0:0:53:0] Put# [1:4:9866:0:0:43:0] Put# [1:4:9867:0:0:41:0] Put# [1:4:9868:0:0:47:0] Put# [1:4:9869:0:0:60:0] Put# [1:4:9870:0:0:31:0] Put# [1:4:9871:0:0:86:0] Put# [1:4:9872:0:0:12:0] Put# [1:4:9873:0:0:4:0] Put# [1:4:9874:0:0:74:0] Put# [1:4:9875:0:0:94:0] Put# [1:4:9876:0:0:50:0] Put# [1:4:9877:0:0:73:0] Put# [1:4:9878:0:0:27:0] Put# [1:4:9879:0:0:39:0] Put# [1:4:9880:0:0:72:0] Put# [1:4:9881:0:0:82:0] Put# [1:4:9882:0:0:97:0] Put# [1:4:9883:0:0:64:0] Put# [1:4:9884:0:0:20:0] Put# [1:4:9885:0:0:31:0] Put# [1:4:9886:0:0:8:0] Put# [1:4:9887:0:0:5:0] Put# [1:4:9888:0:0:28:0] Put# [1:4:9889:0:0:91:0] Put# [1:4:9890:0:0:65:0] Put# [1:4:9891:0:0:69:0] Put# [1:4:9892:0:0:28:0] Put# [1:4:9893:0:0:14:0] Put# [1:4:9894:0:0:88:0] Put# [1:4:9895:0:0:42:0] Put# [1:4:9896:0:0:89:0] Put# [1:4:9897:0:0:87:0] Put# [1:4:9898:0:0:76:0] Put# [1:4:9899:0:0:89:0] Put# [1:4:9900:0:0:50:0] Put# [1:4:9901:0:0:28:0] Put# [1:4:9902:0:0:49:0] Put# [1:4:9903:0:0:72:0] Put# [1:4:9904:0:0:70:0] Put# [1:4:9905:0:0:44:0] Put# [1:4:9906:0:0:60:0] Put# [1:4:9907:0:0:30:0] Put# [1:4:9908:0:0:100:0] Put# [1:4:9909:0:0:96:0] Put# [1:4:9910:0:0:2:0] Put# [1:4:9911:0:0:33:0] Put# [1:4:9912:0:0:26:0] Put# [1:4:9913:0:0:40:0] Put# [1:4:9914:0:0:15:0] Put# [1:4:9915:0:0:87:0] Put# [1:4:9916:0:0:17:0] Put# [1:4:9917:0:0:60:0] Put# [1:4:9918:0:0:19:0] Put# [1:4:9919:0:0:60:0] Put# [1:4:9920:0:0:26:0] Put# [1:4:9921:0:0:97:0] Put# [1:4:9922:0:0:90:0] Put# [1:4:9923:0:0:42:0] Put# [1:4:9924:0:0:51:0] Put# [1:4:9925:0:0:53:0] Put# [1:4:9926:0:0:68:0] Put# [1:4:9927:0:0:44:0] Put# [1:4:9928:0:0:12:0] Put# [1:4:9929:0:0:20:0] Put# [1:4:9930:0:0:21:0] Put# [1:4:9931:0:0:11:0] Put# [1:4:9932:0:0:86:0] Put# [1:4:9933:0:0:51:0] Put# [1:4:9934:0:0:46:0] Put# [1:4:9935:0:0:3:0] Put# [1:4:9936:0:0:40:0] Put# [1:4:9937:0:0:95:0] Put# [1:4:9938:0:0:44:0] Put# [1:4:9939:0:0:89:0] Put# [1:4:9940:0:0:38:0] Put# [1:4:9941:0:0:20:0] Put# [1:4:9942:0:0:84:0] Put# [1:4:9943:0:0:54:0] Put# [1:4:9944:0:0:2:0] Put# [1:4:9945:0:0:57:0] Put# [1:4:9946:0:0:86:0] Put# [1:4:9947:0:0:62:0] Put# [1:4:9948:0:0:20:0] Put# [1:4:9949:0:0:87:0] Put# [1:4:9950:0:0:87:0] Put# [1:4:9951:0:0:6:0] Put# [1:4:9952:0:0:17:0] Put# [1:4:9953:0:0:9:0] Put# [1:4:9954:0:0:31:0] Put# [1:4:9955:0:0:33:0] Put# [1:4:9956:0:0:27:0] Put# [1:4:9957:0:0:91:0] Put# [1:4:9958:0:0:77:0] Put# [1:4:9959:0:0:17:0] Put# [1:4:9960:0:0:67:0] Put# [1:4:9961:0:0:84:0] Put# [1:4:9962:0:0:61:0] Put# [1:4:9963:0:0:41:0] Put# [1:4:9964:0:0:41:0] Put# [1:4:9965:0:0:65:0] Put# [1:4:9966:0:0:8:0] Put# [1:4:9967:0:0:36:0] Put# [1:4:9968:0:0:57:0] Put# [1:4:9969:0:0:7:0] Put# [1:4:9970:0:0:53:0] Put# [1:4:9971:0:0:34:0] Put# [1:4:9972:0:0:7:0] Put# [1:4:9973:0:0:55:0] Put# [1:4:9974:0:0:95:0] Put# [1:4:9975:0:0:29:0] Put# [1:4:9976:0:0:97:0] Put# [1:4:9977:0:0:92:0] Put# [1:4:9978:0:0:6:0] Put# [1:4:9979:0:0:75:0] Put# [1:4:9980:0:0:95:0] Put# [1:4:9981:0:0:76:0] Put# [1:4:9982:0:0:34:0] Put# [1:4:9983:0:0:71:0] Put# [1:4:9984:0:0:100:0] Put# [1:4:9985:0:0:37:0] Put# [1:4:9986:0:0:41:0] Put# [1:4:9987:0:0:2:0] Put# [1:4:9988:0:0:93:0] Put# [1:4:9989:0:0:62:0] Put# [1:4:9990:0:0:56:0] Put# [1:4:9991:0:0:40:0] Put# [1:4:9992:0:0:14:0] Put# [1:4:9993:0:0:58:0] Put# [1:4:9994:0:0:29:0] Put# [1:4:9995:0:0:2:0] Put# [1:4:9996:0:0:34:0] Put# [1:4:9997:0:0:32:0] Put# [1:4:9998:0:0:35:0] Put# [1:4:9999:0:0:53:0] Put# [1:4:10000:0:0:14:0] |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [GOOD] Test command err: 2026-01-07T17:44:19.831728Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676943163512038:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:19.831796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:20.129021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:20.237987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:20.238122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:20.240798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:20.306322Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:20.309134Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676943163512004:2081] 1767807859830366 != 1767807859830369 2026-01-07T17:44:20.328567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:20.370330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:20.370357Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:20.370366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:20.370460Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:20.578976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:20.779318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:20.785089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:20.853020Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:20.867957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.946523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.986022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:21.029233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:21.061587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:21.093394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:21.130101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:21.164435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:21.198362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:21.233468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:22.846285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676956048415555:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.846407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676956048415547:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.846494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.846805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676956048415562:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.846898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.850147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:22.862591Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676956048415561:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-07T17:44:22.933235Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676956048415614:3099] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:23.331665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.374866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.411600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.443945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.473251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.505125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [b48b3c42-8b70e147-2f4657b4-ab002291] reply ok 2026-01-07T17:44:36.150707Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:38260) <- (200 , 1208 bytes) 2026-01-07T17:44:36.150817Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:38260) connection closed Http output full {"Messages":[{"MD5OfBody":"fdef397828e382cbd826fa29c1de10d4","Attributes":{"SentTimestamp":"1767807875770"},"ReceiptHandle":"CAAQnAI=","Body":"MessageBody-284","MessageId":"CAF243AE-308A-561C-BF17-1659068910C2"},{"MD5OfBody":"c5ab62c2c62758bcdc00575221cf95d1","Attributes":{"SentTimestamp":"1767807875770"},"ReceiptHandle":"CAAQnQI=","Body":"MessageBody-285","MessageId":"FF425FA9-673C-534C-8CC4-FB08061A4BDB"},{"MD5OfBody":"87ca2c7863c07d0cebeff0918336b083","Attributes":{"SentTimestamp":"1767807875770"},"ReceiptHandle":"CAAQngI=","Body":"MessageBody-286","MessageId":"6C768E3E-5676-5C06-90CC-1BDF44D7061D"},{"MD5OfBody":"430252778030489a7c3d8ca3838d7104","Attributes":{"SentTimestamp":"1767807875771"},"ReceiptHandle":"CAAQnwI=","Body":"MessageBody-287","MessageId":"36308770-7F20-506A-BDDC-9CF75E1C45C4"},{"MD5OfBody":"6cb708942d5ce9a51debb4666964ce37","Attributes":{"SentTimestamp":"1767807875771"},"ReceiptHandle":"CAAQoAI=","Body":"MessageBody-288","MessageId":"D27028A6-C515-5C13-AB3A-DA59700C77F6"},{"MD5OfBody":"7fa6a836149b1c7d27cc8d77b658df0b","Attributes":{"SentTimestamp":"1767807875771"},"ReceiptHandle":"CAAQoQI=","Body":"MessageBody-289","MessageId":"5EDB906A-FC22-50FE-8DFD-BD2858742EA9"}]} 2026-01-07T17:44:36.151455Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:38266) incoming connection opened 2026-01-07T17:44:36.151533Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:38266) -> (POST /Root, 100 bytes) 2026-01-07T17:44:36.151665Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38c1:2d50:387c:0:20c1:2d50:387c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 66888f8a-707c4d94-8facbf53-349d2559 2026-01-07T17:44:36.152044Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [66888f8a-707c4d94-8facbf53-349d2559] got new request from [38c1:2d50:387c:0:20c1:2d50:387c:0] database '/Root' stream '' 2026-01-07T17:44:36.152488Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [66888f8a-707c4d94-8facbf53-349d2559] [auth] Authorized successfully 2026-01-07T17:44:36.152504Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [66888f8a-707c4d94-8facbf53-349d2559] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:36.153623Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1767807881153 VisibilityDeadlineMilliseconds: 1767807906153 MaxNumberOfMessages: 7 2026-01-07T17:44:36.155843Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-07T17:44:36.155875Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-07T17:44:36.155986Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 75 Topic 'topic1' partition 0 user consumer offset 290 partno 0 count 7 size 26214400 endOffset 300 max time lag 0ms effective offset 290 2026-01-07T17:44:36.156147Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 75 added 1 blobs, size 377 count 10 last offset 290, current partition end offset: 300 2026-01-07T17:44:36.156166Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 75. Send blob request. 2026-01-07T17:44:36.156217Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 290 partno 0 count 10 parts_count 0 source 1 size 377 accessed 1 times before, last time 2026-01-07T17:44:35.000000Z 2026-01-07T17:44:36.156250Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 75. All 1 blobs are from cache. 2026-01-07T17:44:36.156282Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:44:36.156317Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 290 partno 0 count 10 parts 0 suffix '63' 2026-01-07T17:44:36.156352Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 290 totakecount 10 count 10 size 356 from pos 0 cbcount 10 2026-01-07T17:44:36.156466Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:44:36.157639Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [66888f8a-707c4d94-8facbf53-349d2559] reply ok 2026-01-07T17:44:36.157814Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:38266) <- (200 , 1407 bytes) Http output full {"Messages":[{"MD5OfBody":"4ebde9bfeb2c142908b6897295e27d7d","Attributes":{"SentTimestamp":"1767807875777"},"ReceiptHandle":"CAAQogI=","Body":"MessageBody-290","MessageId":"B665CFF5-0D15-5E6E-B795-7F7F031BADFC"},{"MD5OfBody":"00716a52e19ced3758e9add7738a4de6","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQowI=","Body":"MessageBody-291","MessageId":"686D0DEA-9B4B-5B7D-A517-ECDB47DF33C7"},{"MD5OfBody":"6b2cce807faa840ccd5a8f944df80bad","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQpAI=","Body":"MessageBody-292","MessageId":"A9887F98-B0B1-5F36-BEA7-95EBF1DD25E9"},{"MD5OfBody":"c59fd6ecc9a283019c9179d342110fcb","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQpQI=","Body":"MessageBody-293","MessageId":"DCE69C82-8CDA-5A5D-91F0-0AF6FFD574C6"},{"MD5OfBody":"000dd65dc815f7e13c7ab8922f0418be","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQpgI=","Body":"MessageBody-294","MessageId":"46CD018C-4816-5E0E-9483-13F4A15BAB58"},{"MD5OfBody":"042479648840e5a3c4e86196590acb75","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQpwI=","Body":"MessageBody-295","MessageId":"FAE15508-B62A-5219-9518-9937762A66B2"},{"MD5OfBody":"a11e0f7a28004b695b04e3899672981b","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQqAI=","Body":"MessageBody-296","MessageId":"95BA32E8-4312-5A6C-85D8-3477673707AA"}]} 2026-01-07T17:44:36.157932Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:38266) connection closed 2026-01-07T17:44:36.158541Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:38282) incoming connection opened 2026-01-07T17:44:36.158618Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:38282) -> (POST /Root, 100 bytes) 2026-01-07T17:44:36.158725Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b8a4:2d50:387c:0:a0a4:2d50:387c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 177fae48-8a9d3302-4fe64203-2ade37f7 2026-01-07T17:44:36.159157Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [177fae48-8a9d3302-4fe64203-2ade37f7] got new request from [b8a4:2d50:387c:0:a0a4:2d50:387c:0] database '/Root' stream '' 2026-01-07T17:44:36.159577Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [177fae48-8a9d3302-4fe64203-2ade37f7] [auth] Authorized successfully 2026-01-07T17:44:36.159588Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [177fae48-8a9d3302-4fe64203-2ade37f7] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:36.160151Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1767807881159 VisibilityDeadlineMilliseconds: 1767807906159 MaxNumberOfMessages: 8 2026-01-07T17:44:36.161096Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-07T17:44:36.161122Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-07T17:44:36.161207Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 76 Topic 'topic1' partition 0 user consumer offset 297 partno 0 count 3 size 26214400 endOffset 300 max time lag 0ms effective offset 297 2026-01-07T17:44:36.161359Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 76 added 1 blobs, size 0 count 3 last offset 290, current partition end offset: 300 2026-01-07T17:44:36.161379Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 76. Send blob request. 2026-01-07T17:44:36.161428Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 290 partno 0 count 10 parts_count 0 source 1 size 377 accessed 2 times before, last time 2026-01-07T17:44:36.000000Z 2026-01-07T17:44:36.161461Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 76. All 1 blobs are from cache. 2026-01-07T17:44:36.161494Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:44:36.161563Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 290 totakecount 10 count 10 size 356 from pos 7 cbcount 10 2026-01-07T17:44:36.161649Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:44:36.161818Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 290 partno 0 count 10 parts 0 suffix '63' 2026-01-07T17:44:36.162329Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [177fae48-8a9d3302-4fe64203-2ade37f7] reply ok 2026-01-07T17:44:36.162440Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:38282) <- (200 , 611 bytes) 2026-01-07T17:44:36.162541Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:38282) connection closed Http output full {"Messages":[{"MD5OfBody":"ed83ae0894ecf1dcca98701cefa96b63","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQqQI=","Body":"MessageBody-297","MessageId":"A9F7CE8B-9B04-59FC-8942-BC1375C9CABC"},{"MD5OfBody":"1199cbe1edbbd44c325cfce6309d033e","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQqgI=","Body":"MessageBody-298","MessageId":"42A487A9-840A-5640-8D1B-996F67C88717"},{"MD5OfBody":"5b9e997bca262b61080f0ec85590ea89","Attributes":{"SentTimestamp":"1767807875778"},"ReceiptHandle":"CAAQqwI=","Body":"MessageBody-299","MessageId":"ABB10D4F-48AA-55E2-BEC7-BD08AF90AFDC"}]} batchSizesHistogram (9): 1: 6 2: 7 3: 8 4: 7 5: 7 6: 7 7: 7 8: 6 9: 6 2026-01-07T17:44:36.185177Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:36.185209Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:36.185234Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:36.185251Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:36.185262Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> ScriptExecutionsTest::BackgroundOperationFinalization [GOOD] >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb |90.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> KqpRm::Reduce >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> KqpRm::ManyTasks >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] |90.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> KqpRm::DisonnectNodes >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] Test command err: 2026-01-07T17:44:35.493611Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677010806414398:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:35.494348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:35.721128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:35.726600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:35.726704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:35.768320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:35.824694Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:35.850671Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2026-01-07T17:44:35.850775Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d64897003d0] Connect to grpc://localhost:4581 2026-01-07T17:44:35.870617Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d64897003d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.databases.l...(truncated) } 2026-01-07T17:44:35.881419Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d64897003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:44:35.881916Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:44:35.882030Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root, user: user1@as, from ip: 2026-01-07T17:44:35.938198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames [GOOD] >> IncrementalBackup::IncrementalBackupNonExistentTable >> KqpRm::Reduce [GOOD] >> KqpRm::NotEnoughExecutionUnits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] Test command err: 2026-01-07T17:44:35.962767Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677010992371580:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:35.963491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:36.028213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:36.255352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:36.255468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:36.259818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:36.352646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:36.360685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:36.408157Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.clusters.get ydb.clusters.manage ydb.clusters.monitor) 2026-01-07T17:44:36.408236Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c877a9f07d0] Connect to grpc://localhost:12179 2026-01-07T17:44:36.422918Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c877a9f07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.monitor" } } result_filter: ALL_FAILED } 2026-01-07T17:44:36.446118Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c877a9f07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-07T17:44:36.446542Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-07T17:44:36.447035Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> KqpRm::ManyTasks [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] |90.8%| [TA] $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2026-01-07T17:44:39.797495Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:39.798113Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b3b/r3tmp/tmpveSr1I/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:39.798723Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b3b/r3tmp/tmpveSr1I/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003b3b/r3tmp/tmpveSr1I/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9871563909292452853 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:39.857225Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:39.857612Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:39.873015Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:39.873135Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:39.873188Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:39.873251Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:39.873329Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:39.873355Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:39.873391Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:39.873407Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:39.873497Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:39.893843Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807879 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:39.894130Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:39.894205Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807879 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:39.894582Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:39.894798Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:39.894927Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:39.894958Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:39.895089Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807879 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:39.895272Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:39.895316Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:39.895377Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807879 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:39.895907Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:39.896021Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:39.896426Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:39.897097Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:39.897270Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:39.897432Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:39.897472Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:39.897737Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:39.897896Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:39.897998Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:39.900753Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:39.900834Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:39.900889Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:39.900962Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:39.901029Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:472:2352])) 2026-01-07T17:44:39.901256Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:39.901448Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task kqp-1-1-1 (1 by [1:472:2352]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2026-01-07T17:44:39.901504Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:39.901545Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:472:2352])) 2026-01-07T17:44:39.901585Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] Test command err: 2026-01-07T17:42:33.500329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:33.500383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:33.689153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:34.853773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:35.034235Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:35.034869Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:35.035622Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4408857285328569338 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:35.084784Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:35.085539Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:35.085782Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16702181088195496040 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:35.185555Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:35.186052Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:35.186341Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16583630385311189062 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:35.270758Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:35.271542Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:35.271732Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpZDrmXE/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 673757066319678376 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:35.275754Z node 2 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 Hu ... Data# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:44:32.349098Z node 151 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:32.349804Z node 151 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:32.350123Z node 151 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8138031170529768751 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:32.388744Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:32.389641Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:32.389926Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7136076697452328950 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:32.426366Z node 148 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:32.427029Z node 148 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:32.427295Z node 148 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6075794792343123280 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:32.525231Z node 147 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:32.525891Z node 147 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:32.526142Z node 147 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018a0/r3tmp/tmpaic3nW/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5676343108542277162 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:32.961465Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:32.961571Z node 145 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:33.068777Z node 145 :STATISTICS WARN: tx_init.cpp:298: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2026-01-07T17:44:36.596300Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:44:36.596409Z node 154 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:36.723568Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] |90.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRm::NotEnoughExecutionUnits [GOOD] |90.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2026-01-07T17:44:40.093046Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:40.093675Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b38/r3tmp/tmpZYhnKi/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:40.094563Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b38/r3tmp/tmpZYhnKi/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003b38/r3tmp/tmpZYhnKi/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11811882237813046565 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:40.166878Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:40.167307Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:40.211785Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:40.211986Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:40.212076Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:40.212212Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:40.212370Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:40.212501Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:40.212582Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:40.212617Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:40.212785Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.234073Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.234955Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.235107Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.235693Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:40.235912Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:40.236093Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:40.236140Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.236282Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.236584Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:40.236638Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.236740Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.238985Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:40.239129Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.239773Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.240321Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.240442Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.240698Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.240730Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:40.241021Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:40.241197Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:40.241294Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:40.243713Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.243777Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.243822Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.243858Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.243939Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:472:2352])) 2026-01-07T17:44:40.244201Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.244447Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.244493Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.244529Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.244560Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-2 (2 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.244593Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:472:2352])) 2026-01-07T17:44:40.244625Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.244755Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-3-3 (3 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.244791Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-3-3 (3 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.244834Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.244864Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-3-3 (3 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.244895Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:472:2352])) 2026-01-07T17:44:40.244961Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.245091Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-4-4 (4 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.245129Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-4-4 (4 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.245159Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.245203Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-4-4 (4 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.245236Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:472:2352])) 2026-01-07T17:44:40.245265Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.245373Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-5-5 (5 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.245399Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-5-5 (5 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.245433Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.245458Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-5-5 (5 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.245484Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:472:2352])) 2026-01-07T17:44:40.245511Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.245668Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-6-6 (6 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.245716Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-6-6 (6 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.245746Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.245777Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-6-6 (6 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.245804Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:472:2352])) 2026-01-07T17:44:40.245843Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.245993Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-7-7 (7 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.246033Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-7-7 (7 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.246068Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.246099Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-7-7 (7 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.246124Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:472:2352])) 2026-01-07T17:44:40.246149Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.246257Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-8-8 (8 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.246294Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-8-8 (8 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.246320Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.246344Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-8-8 (8 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.246371Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:472:2352])) 2026-01-07T17:44:40.246396Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.246494Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-9-9 (9 by [1:472:2352]) priority=0 resources={0, 100} 2026-01-07T17:44:40.246519Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-9-9 (9 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.246551Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:472:2352]) from queue queue_kqp_resource_manager 2026-01-07T17:44:40.246575Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-9-9 (9 by [1:472:2352]) to queue queue_kqp_resource_manager 2026-01-07T17:44:40.246622Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:472:2352])) 2026-01-07T17:44:40.246651Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-07T17:44:40.246750Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:472:2352]) (release resources {0, 100}) 2026-01-07T17:44:40.246813Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:472:2352])) 2026-01-07T17:44:40.246873Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue [GOOD] >> IncrementalBackup::OmitIndexesIncrementalBackup [GOOD] >> IncrementalBackup::MultipleIndexesIncrementalRestore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2026-01-07T17:44:40.854309Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:40.854922Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003a93/r3tmp/tmpk5RuIF/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:40.855670Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003a93/r3tmp/tmpk5RuIF/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003a93/r3tmp/tmpk5RuIF/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2856024163332505848 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:40.902808Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:40.903164Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:40.917248Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:40.917465Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:40.917549Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:40.917637Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:40.917775Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:40.917845Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:40.917912Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:40.917942Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:40.918090Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.933079Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.933327Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.933382Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.933701Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:40.933809Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:40.933899Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:40.933930Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.934027Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.934174Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:40.934203Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.934246Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.934631Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:40.934724Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.935051Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.935544Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.935642Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.935766Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.935799Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:40.936058Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:40.936175Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:40.936263Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 |90.8%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |90.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |90.9%| [TA] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] Test command err: 2026-01-07T17:44:18.891169Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676938149665327:2237];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:18.891761Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:19.347595Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:19.379884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:19.380000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:19.424477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:19.576119Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:19.590477Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676938149665128:2081] 1767807858858898 != 1767807858858901 2026-01-07T17:44:19.669341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:19.715105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:19.715133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:19.715139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:19.715241Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:19.892756Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:20.076440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:20.097310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:44:20.303658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:20.308987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:44:20.310903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:20.407767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.549904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.586592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.621826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.649936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.677534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.711767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.739712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.768294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:20.796137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:22.396486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676955329535971:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.396486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676955329535963:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.396573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.396911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676955329535978:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.397086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:22.400225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:22.416778Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676955329535977:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:44:22.471237Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676955329536030:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:22.883739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.004158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.041424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.076758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:23.104376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... 4:40.297766Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 0 times before, last time 2026-01-07T17:44:40.000000Z 2026-01-07T17:44:40.297832Z node 3 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2026-01-07T17:44:40.297859Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 4 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 100 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-07T17:44:40.297886Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:44:40.297969Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 4 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2026-01-07T17:44:40.297984Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 4. Send blob request. 2026-01-07T17:44:40.298017Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:44:40.298154Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-07T17:44:40.298159Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [SendMessage] requestId [fae6482a-2eaab4cf-d1dbbc1c-6a80e768] reply ok 2026-01-07T17:44:40.298220Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 1 times before, last time 2026-01-07T17:44:40.000000Z 2026-01-07T17:44:40.298259Z node 3 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 4. All 1 blobs are from cache. 2026-01-07T17:44:40.298292Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'topic1' partition 0 user consumer readTimeStamp done, result 1767807880293 queuesize 0 startOffset 0 2026-01-07T17:44:40.298290Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:49324) <- (200 , 127 bytes) 2026-01-07T17:44:40.298295Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:44:40.298334Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:44:40.298402Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-07T17:44:40.298433Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:49324) connection closed 2026-01-07T17:44:40.298459Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 Http output full {"SequenceNumber":"0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"} 2026-01-07T17:44:40.299295Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:49340) incoming connection opened 2026-01-07T17:44:40.299366Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:49340) -> (POST /Root, 74 bytes) 2026-01-07T17:44:40.299497Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d83a:3edf:d67b:0:c03a:3edf:d67b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: a8c481e5-15702a63-4ead1aa7-6de7060b 2026-01-07T17:44:40.299941Z node 3 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [a8c481e5-15702a63-4ead1aa7-6de7060b] got new request from [d83a:3edf:d67b:0:c03a:3edf:d67b:0] database '/Root' stream '' 2026-01-07T17:44:40.300375Z node 3 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [a8c481e5-15702a63-4ead1aa7-6de7060b] [auth] Authorized successfully 2026-01-07T17:44:40.300427Z node 3 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [a8c481e5-15702a63-4ead1aa7-6de7060b] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:40.302409Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1767807900302 VisibilityDeadlineMilliseconds: 1767807910302 MaxNumberOfMessages: 1 2026-01-07T17:44:40.303383Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-07T17:44:40.303409Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-07T17:44:40.303480Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 5 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-07T17:44:40.303624Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2026-01-07T17:44:40.303642Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-07T17:44:40.303676Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 2 times before, last time 2026-01-07T17:44:40.000000Z 2026-01-07T17:44:40.303703Z node 3 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-07T17:44:40.303732Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:44:40.303772Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:44:40.303815Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-07T17:44:40.303886Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:44:40.304589Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [a8c481e5-15702a63-4ead1aa7-6de7060b] reply ok 2026-01-07T17:44:40.304714Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:49340) <- (200 , 211 bytes) 2026-01-07T17:44:40.304843Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:49340) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1767807880293"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} 2026-01-07T17:44:40.305580Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:49344) incoming connection opened 2026-01-07T17:44:40.305641Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:49344) -> (POST /Root, 80 bytes) 2026-01-07T17:44:40.305739Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [7837:d2de:d67b:0:6037:d2de:d67b:0] request [DeleteMessage] url [/Root] database [/Root] requestId: bad139b2-2142b847-dcfb6c1b-61adedd5 2026-01-07T17:44:40.306071Z node 3 :HTTP_PROXY INFO: http_req.cpp:1324: http request [DeleteMessage] requestId [bad139b2-2142b847-dcfb6c1b-61adedd5] got new request from [7837:d2de:d67b:0:6037:d2de:d67b:0] database '/Root' stream '' 2026-01-07T17:44:40.306476Z node 3 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DeleteMessage] requestId [bad139b2-2142b847-dcfb6c1b-61adedd5] [auth] Authorized successfully 2026-01-07T17:44:40.306498Z node 3 :HTTP_PROXY INFO: http_req.cpp:1077: http request [DeleteMessage] requestId [bad139b2-2142b847-dcfb6c1b-61adedd5] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:40.307032Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:50: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPCommitRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 Offset: 0 2026-01-07T17:44:40.307757Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:40.307785Z node 3 :PERSQUEUE DEBUG: partition.cpp:2409: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:44:40.307811Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:44:40.307838Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:40.307879Z node 3 :PERSQUEUE DEBUG: partition.cpp:2473: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:44:40.307932Z node 3 :PERSQUEUE DEBUG: partition.cpp:3880: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer offset is set to 1 (startOffset 0) session 2026-01-07T17:44:40.307959Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:44:40.307975Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:44:40.307998Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:40.308053Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessage] requestId [bad139b2-2142b847-dcfb6c1b-61adedd5] reply ok 2026-01-07T17:44:40.308149Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:49344) <- (200 , 2 bytes) 2026-01-07T17:44:40.308231Z node 3 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:44:40.308250Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:49344) connection closed Http output full {} 2026-01-07T17:44:40.308744Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:44:40.308802Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 1 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:44:40.308837Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:44:40.308855Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:40.308865Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:40.308879Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:40.308892Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:40.308900Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:40.308937Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |90.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] |90.9%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |90.9%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue |90.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |90.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> KqpRm::DisonnectNodes [GOOD] |90.9%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |91.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TestSqsTopicHttpProxy::TestSendMessage [GOOD] |91.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] |91.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> PersQueueSdkReadSessionTest::SettingsValidation >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2026-01-07T17:44:40.418648Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:44:40.419091Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b33/r3tmp/tmpEpQPm8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:44:40.419594Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/003b33/r3tmp/tmpEpQPm8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/003b33/r3tmp/tmpEpQPm8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12327684890779696550 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:44:40.467244Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:40.467550Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T17:44:40.480418Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:474:2102] with ResourceBroker at [2:444:2101] 2026-01-07T17:44:40.480571Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:475:2103] 2026-01-07T17:44:40.480628Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:472:2352] with ResourceBroker at [1:443:2333] 2026-01-07T17:44:40.480688Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:473:2353] 2026-01-07T17:44:40.480790Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:40.480822Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:40.480869Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-07T17:44:40.480890Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-07T17:44:40.481028Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.495299Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.495587Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.495666Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.496087Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:40.496236Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:40.496380Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:40.496418Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.496515Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.496712Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-07T17:44:40.496758Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-07T17:44:40.496832Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767807880 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-07T17:44:40.497492Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-07T17:44:40.497595Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.498106Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.498699Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.498861Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.499107Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:40.499155Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:40.499451Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:40.499635Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-07T17:44:40.499765Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-07T17:44:41.539575Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:41.539686Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-07T17:44:41.540277Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-07T17:44:41.540368Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-07T17:44:41.540474Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2026-01-07T17:44:41.541126Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:64:2076] ServerId# [1:365:2282] TabletId# 72057594037932033 PipeClientId# [2:64:2076] 2026-01-07T17:44:41.541353Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:152:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-07T17:44:41.541488Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-07T17:44:41.541896Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:492: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-07T17:44:41.541952Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:167: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:477:2105], reason: tenant updated 2026-01-07T17:44:41.542163Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:41.544355Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:41.544577Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-07T17:44:41.876322Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl >> TTabletPipeTest::TestSendAfterOpen |91.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty [GOOD] |91.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> TestSqsTopicHttpProxy::TestReceiveMessage >> BootstrapperTest::RestartUnavailableTablet >> TResourceBroker::TestOverusage >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TResourceBroker::TestRealUsage >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletPipeTest::TestPipeConnectToHint >> TestSqsTopicHttpProxy::TestSendMessageBatch [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> TPipeCacheTest::TestAutoConnect >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> RetryPolicy::TWriteSession_TestPolicy >> TResourceBroker::TestOverusage [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TResourceBroker::TestNotifyActorDied >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TResourceBroker::TestRealUsage [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty >> TFlatMetrics::MaximumValue3 [GOOD] >> BootstrapperTest::KeepExistingTablet >> BootstrapperTest::RestartUnavailableTablet [GOOD] |91.0%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries |91.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatMetrics::MaximumValue4 [GOOD] >> TResourceBroker::TestRandomQueue >> BootstrapperTest::UnavailableStateStorage >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] >> TResourceBroker::TestRandomQueue [GOOD] >> IncrementalBackup::BasicIndexIncrementalRestore >> TPipeCacheTest::TestAutoConnect [GOOD] >> BootstrapperTest::KeepExistingTablet [GOOD] >> IncrementalBackup::IncrementalBackupNonExistentTable [GOOD] >> IncrementalBackup::IncrementalBackupWithIndexes >> BootstrapperTest::DuplicateNodes |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] Test command err: 2026-01-07T17:44:34.314241Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677008071949155:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:34.314314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:34.562133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:34.562217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:34.607083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:34.612341Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:34.614105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:34.616092Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677008071949124:2081] 1767807874312470 != 1767807874312473 2026-01-07T17:44:34.819634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:34.841002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:35.320418Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:35.858813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.779916Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7592677020956852031:2595], ActorId: [1:7592677020956852032:2595], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=YWI4M2EyZTctYmRmYTY1NmYtMmVmZGQ5NGYtNTliMDY5N2E=, TxId: 01kecrz0mebct5az7b06jmydgp 2026-01-07T17:44:37.786961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677020956852053:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:37.787104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:37.788305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677020956852065:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:37.788429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> BootstrapperTest::UnavailableStateStorage [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] >> OperationMapping::IndexBuildRejected [GOOD] >> SplitPathTests::WithDatabaseShouldFail >> OperationMapping::IndexBuildCanceled >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> OperationMapping::IndexBuildSuccess >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> OperationMapping::IndexBuildCanceled [GOOD] >> OperationMapping::IndexBuildSuccess [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] Test command err: 2026-01-07T17:44:34.558631Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677004892317076:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:34.558734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:34.775353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:34.780111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:34.780235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:34.798738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:34.852761Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:34.854662Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677004892317047:2081] 1767807874557061 != 1767807874557064 2026-01-07T17:44:35.026283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:35.127387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:35.568585Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:36.143114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.990509Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7592677017777219958:2597], ActorId: [1:7592677017777219959:2597], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=ZDY2NTdkZjItM2JiNzhjZTgtOTU0NTRlZTctMTczMDEwYzc=, TxId: 01kecrz0tw227561a676yggsrf 2026-01-07T17:44:37.995215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677017777219981:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:37.995300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:37.996146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677017777219992:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:37.996223Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.122172Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677024957071298:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:38.122599Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:38.156150Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:38.245351Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:38.250628Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:38.250706Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:38.279067Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:38.398747Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:38.977952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:38.989484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:44:39.128389Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:39.996621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:41.142924Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7592677037841974160:2594], ActorId: [2:7592677037841974161:2594], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=ODEyOTUxZmEtYmExNGU5YjAtODk0Y2U3NGItNTUyZWU4MWM=, TxId: 01kecrz3xh9nxe9cc3fytjmxen 2026-01-07T17:44:41.144337Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677037841974182:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:41.144398Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:41.144614Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677037841974194:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:41.144641Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... waiting for client destroyed notification ... waiting for connect2 |91.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |91.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2026-01-07T17:44:45.327297Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 2026-01-07T17:44:45.328044Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2026-01-07T17:44:45.328095Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:260: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.101463s 2026-01-07T17:44:45.487632Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 ... waiting for multiple state storage lookup attempts (done) ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] Test command err: 2026-01-07T17:44:34.598640Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677004976899875:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:34.598685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:34.850067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:34.873803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:34.873949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:34.900521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:34.967654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:35.038062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:35.194974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:35.612500Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:36.218703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.195458Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7592677022156770039:2593], ActorId: [1:7592677022156770040:2593], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=YWVkZDlmNjEtN2FlMjNiYmYtOWI1ODY2ODMtZDA0MTQ0ZTE=, TxId: 01kecrz11bd73tt3qgbnajayds 2026-01-07T17:44:38.201254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677022156770062:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.201417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.201944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677022156770075:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.201994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.328175Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677022665823132:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:38.332057Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:38.348379Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:38.469097Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:38.472696Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:38.476674Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677022665823088:2081] 1767807878317120 != 1767807878317123 2026-01-07T17:44:38.512739Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:38.512844Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:38.516203Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:38.749938Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:39.186568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:39.196634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:44:39.323967Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:40.209272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] Test command err: 2026-01-07T17:44:34.650843Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677008253955459:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:34.651743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:34.864128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:34.870370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:34.870492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:34.936826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:35.009033Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:35.012864Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677008253955424:2081] 1767807874647987 != 1767807874647990 2026-01-07T17:44:35.159107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:35.187918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:35.194760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:44:35.657448Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:36.208821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.193074Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677024141026395:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:38.193154Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:38.229293Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:38.340695Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:38.343345Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677024141026352:2081] 1767807878188050 != 1767807878188053 2026-01-07T17:44:38.350756Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:38.350848Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:38.365150Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:38.390103Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:39.079993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:39.089411Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:44:39.219717Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:40.103185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.232288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] Test command err: 2026-01-07T17:44:34.792485Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677008454201859:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:34.793973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:35.017914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:35.018026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:35.043304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:35.104185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:35.119108Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:35.120472Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677008454201820:2081] 1767807874774774 != 1767807874774777 2026-01-07T17:44:35.301924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:35.376581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:35.801798Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:36.324184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:36.506492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.378592Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:44:38.379679Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:38.498651Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:38.512756Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677023089137415:2081] 1767807878287137 != 1767807878287140 2026-01-07T17:44:38.528921Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:38.529030Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:38.557161Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:38.591906Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:39.314823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:39.352900Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-07T17:44:40.327312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:41.588106Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7592677035974040331:2604], ActorId: [2:7592677035974040332:2604], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=MjQxYzY0YWEtNmY1MjZmYzUtZTc0YjBlZjktMTBhNGRiYWU=, TxId: 01kecrz4be53456wzxke0rw4d1 2026-01-07T17:44:41.589591Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677035974040352:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:41.589898Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:41.590240Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677035974040365:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:41.590329Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2026-01-07T17:44:46.109725Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:46.109815Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:46.110445Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:44:46.110489Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2026-01-07T17:44:46.110955Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:44:46.110989Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2026-01-07T17:44:46.111834Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-07T17:44:46.111897Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.123966s 2026-01-07T17:44:46.112008Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-07T17:44:46.112038Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-07T17:44:46.318163Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:46.318831Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:221:2097] 2026-01-07T17:44:46.319317Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-07T17:44:46.319364Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |91.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2026-01-07T17:44:44.940672Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-8 (8 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.940744Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-9 (9 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.940785Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-10 (10 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.940893Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-14 (14 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941018Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-18 (18 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941095Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-21 (21 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941205Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-27 (27 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941328Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-32 (32 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941371Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-33 (33 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941394Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-34 (34 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941493Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-42 (42 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941593Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-49 (49 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941626Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-51 (51 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941647Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-52 (52 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941668Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-53 (53 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941718Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-55 (55 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941746Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-56 (56 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941821Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-62 (62 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941842Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-63 (63 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941904Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-67 (67 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941932Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-68 (68 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.941967Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-69 (69 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942015Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-71 (71 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942087Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-74 (74 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942146Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-77 (77 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942169Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-78 (78 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942204Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-80 (80 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942228Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-81 (81 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942463Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-99 (99 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942571Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-108 (108 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942684Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-113 (113 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942794Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-119 (119 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.942920Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-124 (124 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943200Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-137 (137 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943404Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-155 (155 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943466Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-160 (160 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943564Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-167 (167 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943600Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-169 (169 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943624Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-170 (170 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943789Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-183 (183 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943851Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-186 (186 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943895Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-189 (189 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943953Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-192 (192 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.943980Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-193 (193 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944022Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-194 (194 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944065Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-196 (196 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944131Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-200 (200 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944154Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-201 (201 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944269Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-209 (209 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944324Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-213 (213 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944379Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-217 (217 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944419Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-218 (218 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944486Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-223 (223 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944621Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-234 (234 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944648Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-235 (235 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944708Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-239 (239 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944759Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-240 (240 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944795Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-241 (241 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944836Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-243 (243 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.944878Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-246 (246 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.945022Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-256 (256 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.945118Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-261 (261 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.945344Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-277 (277 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.945381Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-279 (279 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.945461Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-284 (284 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.945497Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-286 (286 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.945716Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-294 ... R ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-467 (467 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982282Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-509 (509 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982309Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-517 (517 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982342Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-520 (520 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982367Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-525 (525 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982413Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-543 (543 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982457Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-581 (581 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982532Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-620 (620 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982598Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-629 (629 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982650Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-635 (635 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982686Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-662 (662 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982772Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-706 (706 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982794Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-731 (731 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982871Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-736 (736 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982899Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-748 (748 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982948Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-755 (755 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.982984Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-762 (762 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983055Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-802 (802 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983171Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-893 (893 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983211Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-920 (920 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983262Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-950 (950 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983329Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-977 (977 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983357Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-982 (982 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983425Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-989 (989 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983477Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-18 (18 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983523Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-42 (42 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983571Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-53 (53 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983599Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-68 (68 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983623Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-80 (80 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983757Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-183 (183 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983787Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-194 (194 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983905Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-279 (279 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983933Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-286 (286 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983957Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-294 (294 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.983992Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-303 (303 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984020Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-319 (319 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984087Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-338 (338 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984120Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-340 (340 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984146Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-356 (356 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984169Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-361 (361 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984228Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-375 (375 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984266Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-392 (392 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984314Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-434 (434 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984344Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-439 (439 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984393Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-468 (468 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984432Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-474 (474 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984476Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-490 (490 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984505Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-516 (516 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984544Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-522 (522 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984605Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-547 (547 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984710Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-661 (661 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984748Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-663 (663 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984794Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-676 (676 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984819Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-681 (681 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984842Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-690 (690 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984858Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-715 (715 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984949Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-757 (757 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.984992Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-777 (777 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985033Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-779 (779 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985057Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-780 (780 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985080Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-793 (793 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985141Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-822 (822 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985176Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-844 (844 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985199Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-873 (873 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985246Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-898 (898 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985270Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-932 (932 by [2:104:2137])' of unknown type 'wrong' to default queue 2026-01-07T17:44:44.985323Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-993 (993 by [2:104:2137])' of unknown type 'wrong' to default queue |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |91.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |91.0%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> BasicUsage::CreateTopicWithCustomName [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] >> TResourceBroker::TestQueueWithConfigure >> TTabletPipeTest::TestShutdown |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TTabletPipeTest::TestShutdown [GOOD] |91.0%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestPipeWithVersionInfo >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TResourceBroker::TestCounters >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> TTabletPipeTest::TestOpen >> TabletState::ExplicitUnsubscribe >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> TTabletPipeTest::TestTwoNodes >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TResourceBroker::TestAutoTaskId >> TTabletPipeTest::TestTwoNodes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> TTabletResolver::TabletResolvePriority [GOOD] >> TResourceBroker::TestCounters [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen >> TabletState::ExplicitUnsubscribe [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TTabletResolver::NodeProblem >> TTabletPipeTest::TestOpen [GOOD] >> TResourceBroker::TestAutoTaskId [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] >> TTabletPipeTest::TestInterconnectSession |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TResourceBroker::TestChangeTaskType >> TabletState::ImplicitUnsubscribeOnDisconnect >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TTabletResolver::NodeProblem [GOOD] >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TResourceBroker::TestChangeTaskType [GOOD] |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TResourceBroker::TestErrors >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TResourceBroker::TestErrors [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2026-01-07T17:44:48.794897Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2026-01-07T17:44:48.795139Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' is required" 2026-01-07T17:44:48.795357Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] |91.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] Test command err: 2026-01-07T17:44:37.692682Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677021015205628:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:37.693759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:37.965171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:37.965327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:38.023816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:38.091591Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:38.092828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:38.094292Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677021015205459:2081] 1767807877674751 != 1767807877674754 2026-01-07T17:44:38.144779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:38.144812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:38.144820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:38.144902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:38.335262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:38.353700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:38.362739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:44:38.601028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:38.607513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:44:38.609470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:38.638712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:44:38.694085Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:38.718017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.869361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.914914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.966993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.005708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.043945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.084053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.132574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.171996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.211325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.797618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677033900108996:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.797618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677033900109004:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.797741Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.798050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677033900109011:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.798133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.801678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:40.812387Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677033900109010:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:44:40.872106Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677033900109063:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:41.203104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:41.231747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:41.258145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:41.282308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... Status: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:44:48.521019Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-07T17:44:48.521094Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 15ms 2026-01-07T17:44:48.521104Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:48.521373Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:44:48.521789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677067452419399:2444], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:48.521859Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:48.522053Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:44:48.522087Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 15ms 2026-01-07T17:44:48.522491Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:44:48.522531Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-07T17:44:48.522636Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 16ms 2026-01-07T17:44:48.523199Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:44:48.647389Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592677067452419339:2437]: Pool not found 2026-01-07T17:44:48.647532Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:44:48.908859Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592677067452419337:2436]: Pool not found 2026-01-07T17:44:48.909091Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:44:48.911290Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677067452419444:2454], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:48.911318Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592677067452419445:2455], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:44:48.911372Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:48.911691Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677067452419448:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:48.911781Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:49.157082Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592677067452419442:2453]: Pool not found 2026-01-07T17:44:49.157357Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:44:49.314877Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677050272547783:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:49.314952Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.0%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... received OnTabletStop ... received OnTabletStop ... received OnTabletStop ... waiting for client shutting down notification ... waiting for connect2 >> TResourceBroker::TestExecutionStat >> TResourceBroker::TestExecutionStat [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> BootstrapperTest::LoneBootstrapper >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] Test command err: 2026-01-07T17:44:50.038884Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:315: [9437185] Detach 2026-01-07T17:44:50.052416Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2026-01-07T17:44:50.057158Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2026-01-07T17:44:50.060370Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:132:2157] 2026-01-07T17:44:50.060429Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:132:2157] 2026-01-07T17:44:50.060737Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:132:2157] 2026-01-07T17:44:50.060809Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:132:2157] 2026-01-07T17:44:50.060947Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:132:2157] 2026-01-07T17:44:50.060987Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:132:2157] 2026-01-07T17:44:50.061068Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:132:2157] 2026-01-07T17:44:50.061258Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:132:2157] Type# 269877249 Reason# ActorUnknown 2026-01-07T17:44:50.061467Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:135:2159] 2026-01-07T17:44:50.061511Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:135:2159] 2026-01-07T17:44:50.061567Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:135:2159] 2026-01-07T17:44:50.061603Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:135:2159] 2026-01-07T17:44:50.061665Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:135:2159] 2026-01-07T17:44:50.061696Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:135:2159] 2026-01-07T17:44:50.061754Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:135:2159] 2026-01-07T17:44:50.061863Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:135:2159] Type# 269877249 Reason# ActorUnknown 2026-01-07T17:44:50.061991Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:137:2161] 2026-01-07T17:44:50.062018Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:137:2161] 2026-01-07T17:44:50.062080Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:137:2161] 2026-01-07T17:44:50.062113Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:137:2161] 2026-01-07T17:44:50.062163Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:137:2161] 2026-01-07T17:44:50.062192Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:137:2161] 2026-01-07T17:44:50.062253Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:137:2161] 2026-01-07T17:44:50.062333Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:137:2161] Type# 269877249 Reason# ActorUnknown |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:105:2138] ... blocking block result NO_GROUP for [1:106:2138] ... blocking block result NO_GROUP for [1:107:2138] ... blocking block result NO_GROUP for [1:108:2138] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] Test command err: 2026-01-07T17:44:37.036500Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677019149456725:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:37.036525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:37.302983Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:37.334273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:37.334361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:37.377056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:37.455825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:37.545122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:37.545153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:37.545162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:37.545251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:37.607228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:37.751980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:37.759268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:44:38.038364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:38.045559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:44:38.049293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:38.053507Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:38.154608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.295054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.340557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.386539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-07T17:44:38.391727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.435907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.499937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.574402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.614001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.650701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.714382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.389864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677032034360223:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.389864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677032034360229:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.389974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.390275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677032034360238:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.390383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.393845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:40.407302Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677032034360237:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-07T17:44:40.483435Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677032034360290:3105] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:40.818195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.847100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.874201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.898824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.930665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... 07][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:44:50.918909Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:44:50.919061Z node 2 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1767807890961, TxId 281474976710690 2026-01-07T17:44:50.919085Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.919095Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:44:50.919105Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.919124Z node 2 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-07T17:44:50.919261Z node 2 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976710690 2026-01-07T17:44:50.919283Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:44:50.919305Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:44:50.919319Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.919589Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:44:50.920500Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:44:50.920658Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-01-07T17:44:50.920675Z node 2 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037907][Partition][0][StateIdle] Updateing MLP consumer 'consumer' config 2026-01-07T17:44:50.920785Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:44:50.920813Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.920825Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.920835Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.920849Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.920859Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.920876Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:44:50.921266Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 20 } MonitoringProjectId: "" 2026-01-07T17:44:50.921338Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:947: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-01-07T17:44:50.921455Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:44:50.922318Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:27597 { status: SUCCESS, issues: }consumer 2026-01-07T17:44:50.941297Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:45318) incoming connection opened 2026-01-07T17:44:50.941374Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:45318) -> (POST /Root, 76 bytes) 2026-01-07T17:44:50.941502Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [18a6:d7ee:677b:0:a6:d7ee:677b:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: e6e19cca-fdfe7073-5b522d6d-a3eb6bdf 2026-01-07T17:44:50.941827Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessageBatch] requestId [e6e19cca-fdfe7073-5b522d6d-a3eb6bdf] got new request from [18a6:d7ee:677b:0:a6:d7ee:677b:0] database '/Root' stream '' 2026-01-07T17:44:50.942257Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-07T17:44:50.942315Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-07T17:44:50.942342Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-07T17:44:50.942369Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-07T17:44:50.942394Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-07T17:44:50.942420Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-07T17:44:50.942455Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-07T17:44:50.950797Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:50.950946Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:50.951885Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:50.954422Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-07T17:44:50.954480Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:50.954507Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:50.954531Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:50.954604Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-07T17:44:50.954691Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessageBatch] requestId [e6e19cca-fdfe7073-5b522d6d-a3eb6bdf] [auth] Authorized successfully 2026-01-07T17:44:50.954740Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessageBatch] requestId [e6e19cca-fdfe7073-5b522d6d-a3eb6bdf] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:50.955088Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessageBatch] requestId [e6e19cca-fdfe7073-5b522d6d-a3eb6bdf] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.EmptyBatchRequest 2026-01-07T17:44:50.955184Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessageBatch] requestId [e6e19cca-fdfe7073-5b522d6d-a3eb6bdf] reply with status: STATUS_UNDEFINED message: The batch request doesn't contain any entries. 2026-01-07T17:44:50.955349Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:45318) <- (400 AWS.SimpleQueueService.EmptyBatchRequest, 112 bytes) 2026-01-07T17:44:50.955445Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:45318) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/6/topic1/8/consumer", "Entries": [ ] } 2026-01-07T17:44:50.955482Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:45318) Response: HTTP/1.1 400 AWS.SimpleQueueService.EmptyBatchRequest Connection: close x-amzn-requestid: e6e19cca-fdfe7073-5b522d6d-a3eb6bdf Content-Type: application/x-amz-json-1.1 Content-Length: 112 2026-01-07T17:44:50.955588Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:45318) connection closed Http output full {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2026-01-07T17:44:51.018515Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:51.018558Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:51.018573Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:51.018586Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:51.018595Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:51.118871Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:51.118900Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:51.118912Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:51.118927Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:51.118937Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2026-01-07T17:44:50.239143Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.239398Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [1:215:2139] followers: 0 2026-01-07T17:44:50.239469Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:215:2139] 2026-01-07T17:44:50.239754Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.239960Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [1:221:2143] followers: 0 2026-01-07T17:44:50.240031Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:221:2143] 2026-01-07T17:44:50.250685Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:215:2139] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.250775Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:215:2139] 2026-01-07T17:44:50.250993Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:221:2143] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.251045Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:221:2143] 2026-01-07T17:44:50.251258Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 4 2026-01-07T17:44:50.251313Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [1:215:2139] by nodeId 2026-01-07T17:44:50.251376Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:215:2139] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.251424Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:44:50.251687Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [2:231:2096] followers: 0 2026-01-07T17:44:50.251756Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:231:2096] 2026-01-07T17:44:50.252139Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [1:221:2143] by nodeId 2026-01-07T17:44:50.252195Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:221:2143] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.252251Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:44:50.252494Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [2:237:2098] followers: 0 2026-01-07T17:44:50.252568Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:237:2098] 2026-01-07T17:44:50.256950Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2026-01-07T17:44:50.257038Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:231:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.257094Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:231:2096] 2026-01-07T17:44:50.257321Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:237:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.257373Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:237:2098] 2026-01-07T17:44:50.265280Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 6 2026-01-07T17:44:50.265379Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [2:231:2096] by nodeId 2026-01-07T17:44:50.265458Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:231:2096] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.265510Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:44:50.265927Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [3:249:2096] followers: 0 2026-01-07T17:44:50.266017Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:249:2096] 2026-01-07T17:44:50.266647Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:237:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.266719Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:237:2098] 2026-01-07T17:44:50.266989Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 7 2026-01-07T17:44:50.267059Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [3:249:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.267104Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:249:2096] 2026-01-07T17:44:50.267348Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [2:237:2098] by nodeId 2026-01-07T17:44:50.267436Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:237:2098] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:44:50.267481Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:44:50.267826Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [3:255:2098] followers: 0 2026-01-07T17:44:50.267894Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:255:2098] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for boot2 ... waiting for connect2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] Test command err: 2026-01-07T17:44:37.130588Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677018100907729:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:37.130662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:37.182489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:37.471732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:37.471862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:37.506704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:37.607930Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:37.608866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:37.617053Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677018100907706:2081] 1767807877129612 != 1767807877129615 2026-01-07T17:44:37.665039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:37.665069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:37.665086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:37.665166Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:37.804821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:37.904577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:37.933164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:44:38.159518Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:38.179018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:38.186293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:44:38.188396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:38.209752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T17:44:38.263767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.444214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.532558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.581568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.618905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.657486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.694411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.730986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.762967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.803165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.386371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677030985811247:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.386407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677030985811255:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.386502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.386877Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677030985811262:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.387023Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:40.391274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:40.403359Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677030985811261:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-07T17:44:40.498720Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677030985811314:3105] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:40.869218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.896431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.927615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.956362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose ... EUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.014189Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.014219Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.014231Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.014246Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.014261Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.111624Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 507 2026-01-07T17:44:50.114794Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.114821Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.114830Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.114843Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.114853Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.216347Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.216384Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.216412Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.216431Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.216440Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.316735Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.316763Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.316775Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.316792Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.316803Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.417285Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.417314Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.417325Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.417343Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.417355Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.517403Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.517447Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.517457Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.517482Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.517490Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.617929Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.617962Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.617973Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.617999Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.618010Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.718250Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.718291Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.718302Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.718318Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.718326Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.818491Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.818530Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.818544Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.818564Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.818577Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.918829Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.918860Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.918872Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.918891Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.918903Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:51.019171Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:51.019200Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:51.019218Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:51.019230Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:51.019237Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:51.113464Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 103 2026-01-07T17:44:51.113649Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [34f259bc-d544d1cf-aa23fcaa-5514af05] reply ok 2026-01-07T17:44:51.113892Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:36002) <- (200 , 2 bytes) 2026-01-07T17:44:51.113981Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:36002) connection closed Http output full {} 2026-01-07T17:44:51.121196Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:51.121231Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:51.121240Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:51.121253Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:51.121262Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] Test command err: 2026-01-07T17:43:45.411072Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676796393687560:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:45.412247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:45.714823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:45.714983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:45.789082Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592676796393687532:2081] 1767807825404588 != 1767807825404591 2026-01-07T17:43:45.801213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:45.833790Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:46.197259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:46.197278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:46.197289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:46.197379Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:46.267576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:46.277984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:43:46.422337Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:49.468180Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:49.581681Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:43:49.581729Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:43:49.581747Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T17:43:49.587138Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.587170Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Creating table 2026-01-07T17:43:49.587229Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:49.587398Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7592676813573557559:2493] Owner: [1:7592676813573557557:2491]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.587411Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7592676813573557559:2493] Owner: [1:7592676813573557557:2491]. Creating table 2026-01-07T17:43:49.587437Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7592676813573557559:2493] Owner: [1:7592676813573557557:2491]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-07T17:43:49.587726Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Describe result: PathErrorUnknown 2026-01-07T17:43:49.587733Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Creating table 2026-01-07T17:43:49.587752Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:49.601518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.603565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.606918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:49.614092Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7592676813573557559:2493] Owner: [1:7592676813573557557:2491]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-07T17:43:49.614155Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7592676813573557559:2493] Owner: [1:7592676813573557557:2491]. Subscribe on create table tx: 281474976710659 2026-01-07T17:43:49.614862Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-07T17:43:49.614897Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Subscribe on create table tx: 281474976710660 2026-01-07T17:43:49.616851Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-07T17:43:49.616869Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Subscribe on create table tx: 281474976710658 2026-01-07T17:43:49.617086Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7592676813573557559:2493] Owner: [1:7592676813573557557:2491]. Subscribe on tx: 281474976710659 registered 2026-01-07T17:43:49.617652Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Subscribe on tx: 281474976710660 registered 2026-01-07T17:43:49.621792Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Subscribe on tx: 281474976710658 registered 2026-01-07T17:43:49.799207Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-07T17:43:49.871737Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-07T17:43:49.871797Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Column diff is empty, finishing 2026-01-07T17:43:49.873072Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-07T17:43:49.875601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.878606Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-07T17:43:49.878621Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7592676813573557560:2494] Owner: [1:7592676813573557557:2491]. Successful alter request: ExecComplete 2026-01-07T17:43:49.880215Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-07T17:43:49.880318Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7592676813573557559:2493] Owner: [1:7592676813573557557:2491]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-07T17:43:49.970141Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-07T17:43:49.970255Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Column diff is empty, finishing 2026-01-07T17:43:49.970391Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7592676813573557558:2492] Owner: [1:7592676813573557557:2491]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-07T17:43:49.971365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.971965Z node 1 :K ... D (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-07T17:44:49.949264Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=YWJjMzczNWEtYmZiYzkyNzYtMjFkMTE0NzYtMzc1Zjc0YTI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [4:7592677069683726785:2611] 2026-01-07T17:44:49.949314Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 58 timeout: 300.000000s actor id: [4:7592677069683726787:3116] 2026-01-07T17:44:49.957153Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 58, sender: [4:7592677069683726786:2612], selfId: [4:7592677026734051992:2266], source: [4:7592677069683726785:2611] 2026-01-07T17:44:49.957415Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7592677069683726781:2608], ActorId: [4:7592677069683726782:2609], TraceId: ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get operation info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=YWJjMzczNWEtYmZiYzkyNzYtMjFkMTE0NzYtMzc1Zjc0YTI=, TxId: 01kecrzcgxfkehbfr59c9cmey0 2026-01-07T17:44:49.957937Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4237: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7592677069683726781:2608], ActorId: [4:7592677069683726782:2609], TraceId: ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get operation info, Do finalization with status UNAVAILABLE, exec status: EXEC_STATUS_ABORTED, finalization status (applicate effect: 0): 1, issues: {
: Error: Lease expired }, retry deadline (wait retry: 0): 2026-01-07T17:44:49.957814Z, lease state: 1 2026-01-07T17:44:49.958421Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7592677069683726781:2608], ActorId: [4:7592677069683726782:2609], TraceId: ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, RunDataQuery with SessionId: ydb://session/3?node_id=4&id=YWJjMzczNWEtYmZiYzkyNzYtMjFkMTE0NzYtMzc1Zjc0YTI=, TxId: 01kecrzcgxfkehbfr59c9cmey0, text: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $stats AS JsonDocument; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; DECLARE $retry_state AS JsonDocument; DECLARE $retry_deadline AS Timestamp; DECLARE $lease_state AS Int32; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, end_ts = CurrentUtcTimestamp(), stats = $stats, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL), retry_state = $retry_state WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2026-01-07T17:44:49.958914Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=YWJjMzczNWEtYmZiYzkyNzYtMjFkMTE0NzYtMzc1Zjc0YTI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 59, targetId: [4:7592677069683726785:2611] 2026-01-07T17:44:49.958949Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 59 timeout: 300.000000s actor id: [4:7592677069683726806:3123] 2026-01-07T17:44:49.973748Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 59, sender: [4:7592677069683726805:2618], selfId: [4:7592677026734051992:2266], source: [4:7592677069683726785:2611] 2026-01-07T17:44:49.974208Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7592677069683726781:2608], ActorId: [4:7592677069683726782:2609], TraceId: ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=4&id=YWJjMzczNWEtYmZiYzkyNzYtMjFkMTE0NzYtMzc1Zjc0YTI=, TxId: 2026-01-07T17:44:49.974310Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7592677069683726781:2608], ActorId: [4:7592677069683726782:2609], TraceId: ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=YWJjMzczNWEtYmZiYzkyNzYtMjFkMTE0NzYtMzc1Zjc0YTI=, TxId: 2026-01-07T17:44:49.974345Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4311: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7592677069683726781:2608], ActorId: [4:7592677069683726782:2609], TraceId: ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2026-01-07T17:44:49.974398Z node 4 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [4:7592677069683726780:2607], ActorId: [4:7592677069683726781:2608], TraceId: ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [4:7592677069683726782:2609] SUCCESS 2026-01-07T17:44:49.974502Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1443: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7592677065388759349:3058] ActorId: [4:7592677069683726687:3076] Database: /dc-1 ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a. Successfully finalized script execution operation, WaitingRetry: 0 2026-01-07T17:44:49.974542Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1789: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7592677065388759349:3058] ActorId: [4:7592677069683726687:3076] Database: /dc-1 ExecutionId: 1d2c3547-79eb17bd-600bf217-dad9183a. Reply success 2026-01-07T17:44:49.974599Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=YWJjMzczNWEtYmZiYzkyNzYtMjFkMTE0NzYtMzc1Zjc0YTI=, workerId: [4:7592677069683726785:2611], local sessions count: 1 2026-01-07T17:44:49.974646Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4833: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7592677065388759329:3048] ActorId: [4:7592677065388759349:3058]. Lease check #0 [4:7592677069683726690:3078] successfully completed, OperationsToCheck: 0 2026-01-07T17:44:49.974680Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4845: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7592677065388759329:3048] ActorId: [4:7592677065388759349:3058]. Finish, success: 1, issues: 2026-01-07T17:44:49.974735Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:78: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Refresh successfully completed 2026-01-07T17:44:50.045684Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecrzckx4b5qepw7h5xsh3qb, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=Nzg2OWVkNy0zZDc5ZDYwNS03MTRhOTI3ZC04ZjYwMWQwYQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [4:7592677061093791903:2537] 2026-01-07T17:44:50.045746Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 60 timeout: 300.000000s actor id: [4:7592677073978694126:3131] 2026-01-07T17:44:50.215103Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecrzckx4b5qepw7h5xsh3qb", Forwarded response to sender actor, requestId: 60, sender: [4:7592677073978694125:2623], selfId: [4:7592677026734051992:2266], source: [4:7592677061093791903:2537] 2026-01-07T17:44:50.229336Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecrzcsm9svy9cszrrp5f67m, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=Nzg2OWVkNy0zZDc5ZDYwNS03MTRhOTI3ZC04ZjYwMWQwYQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 61, targetId: [4:7592677061093791903:2537] 2026-01-07T17:44:50.229393Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 61 timeout: 300.000000s actor id: [4:7592677073978694143:3138] 2026-01-07T17:44:50.695412Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:94: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do ScheduleRefreshScriptExecutions (WaitRefreshScriptExecutions: 0), next refresh after 1.000000s 2026-01-07T17:44:50.695455Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:109: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Schedule lease check after 0.776376s 2026-01-07T17:44:51.066298Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecrzcsm9svy9cszrrp5f67m", Forwarded response to sender actor, requestId: 61, sender: [4:7592677073978694142:2627], selfId: [4:7592677026734051992:2266], source: [4:7592677061093791903:2537] 2026-01-07T17:44:51.075310Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=Nzg2OWVkNy0zZDc5ZDYwNS03MTRhOTI3ZC04ZjYwMWQwYQ==, workerId: [4:7592677061093791903:2537], local sessions count: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] Test command err: 2026-01-07T17:44:36.539048Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677015986565829:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:36.539148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:36.778882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:36.778993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:36.848809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:36.851506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:36.936060Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:36.937809Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677015986565806:2081] 1767807876537260 != 1767807876537263 2026-01-07T17:44:36.996204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:36.996232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:36.996241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:36.996358Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:37.005025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:37.219263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:37.407395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:37.412802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:44:37.417360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:37.436816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:44:37.488805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.548515Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:37.651048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.706583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.770896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.808702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.850363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.890626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.951511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.997839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.050192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.807495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677028871469349:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:39.807672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:39.808079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677028871469361:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:39.808115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677028871469362:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:39.808146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:39.811934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:39.826637Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677028871469365:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:44:39.906977Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677028871469416:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:40.257671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.289875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.318721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.350223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:40.380401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... e { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-07T17:44:47.691849Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592677060650245062:2436]: Pool not found 2026-01-07T17:44:47.692003Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-07T17:44:47.884970Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592677060650245073:2438]: Pool not found 2026-01-07T17:44:47.885127Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-07T17:44:47.886968Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677060650245175:2454], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:47.886984Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7592677060650245176:2455], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-07T17:44:47.887020Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:47.887212Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677060650245179:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:47.887272Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:48.100829Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7592677060650245173:2453]: Pool not found 2026-01-07T17:44:48.101117Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-07T17:44:48.254399Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677043470373526:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:48.254482Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:44:48.546092Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:42734) incoming connection opened 2026-01-07T17:44:48.546195Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:42734) -> (POST /Root, 52 bytes) 2026-01-07T17:44:48.546330Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d805:a6b:b77b:0:c005:a6b:b77b:0] request [SendMessage] url [/Root] database [/Root] requestId: e7776324-4d421e35-f7f39cc-e7bd607 2026-01-07T17:44:48.546710Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessage] requestId [e7776324-4d421e35-f7f39cc-e7bd607] got new request from [d805:a6b:b77b:0:c005:a6b:b77b:0] database '/Root' stream '' 2026-01-07T17:44:48.547102Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-07T17:44:48.547186Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-07T17:44:48.547237Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-07T17:44:48.547272Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-07T17:44:48.547315Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-07T17:44:48.547348Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-07T17:44:48.547369Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-07T17:44:48.557033Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:48.557090Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:48.559371Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:48.559449Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-07T17:44:48.560231Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:48.560279Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:48.560340Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-07T17:44:48.560416Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-07T17:44:48.560547Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessage] requestId [e7776324-4d421e35-f7f39cc-e7bd607] [auth] Authorized successfully 2026-01-07T17:44:48.560610Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessage] requestId [e7776324-4d421e35-f7f39cc-e7bd607] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:48.560983Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessage] requestId [e7776324-4d421e35-f7f39cc-e7bd607] Not retrying GRPC response. Code: 400, Error: MissingParameter 2026-01-07T17:44:48.561095Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessage] requestId [e7776324-4d421e35-f7f39cc-e7bd607] reply with status: STATUS_UNDEFINED message: No QueueUrl parameter. 2026-01-07T17:44:48.561247Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:42734) <- (400 MissingParameter, 64 bytes) 2026-01-07T17:44:48.561338Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:42734) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"", "MessageBody":"MessageBody-0" } 2026-01-07T17:44:48.561383Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:42734) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: e7776324-4d421e35-f7f39cc-e7bd607 Content-Type: application/x-amz-json-1.1 Content-Length: 64 Http output full {"__type":"MissingParameter","message":"No QueueUrl parameter."} 2026-01-07T17:44:48.561522Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:42734) connection closed 2026-01-07T17:44:48.562259Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:42742) incoming connection opened 2026-01-07T17:44:48.562352Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:42742) -> (POST /Root, 100 bytes) 2026-01-07T17:44:48.562496Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b8cb:96b:b77b:0:a0cb:96b:b77b:0] request [SendMessage] url [/Root] database [/Root] requestId: d1500e83-3ba8b078-db56eb2a-31e20367 2026-01-07T17:44:48.562815Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessage] requestId [d1500e83-3ba8b078-db56eb2a-31e20367] got new request from [b8cb:96b:b77b:0:a0cb:96b:b77b:0] database '/Root' stream '' 2026-01-07T17:44:48.563218Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessage] requestId [d1500e83-3ba8b078-db56eb2a-31e20367] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-07T17:44:48.563210Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessage] requestId [d1500e83-3ba8b078-db56eb2a-31e20367] [auth] Authorized successfully 2026-01-07T17:44:48.564486Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessage] requestId [d1500e83-3ba8b078-db56eb2a-31e20367] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.NonExistentQueue 2026-01-07T17:44:48.564575Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessage] requestId [d1500e83-3ba8b078-db56eb2a-31e20367] reply with status: STATUS_UNDEFINED message: You do not have access or the '/Root/ExampleQueueName' does not exist 2026-01-07T17:44:48.564682Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:42742) <- (400 AWS.SimpleQueueService.NonExistentQueue, 134 bytes) 2026-01-07T17:44:48.564749Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:42742) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/16/ExampleQueueName/13/user_consumer", "MessageBody":"MessageBody-0" } 2026-01-07T17:44:48.564786Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:42742) Response: HTTP/1.1 400 AWS.SimpleQueueService.NonExistentQueue Connection: close x-amzn-requestid: d1500e83-3ba8b078-db56eb2a-31e20367 Content-Type: application/x-amz-json-1.1 Content-Length: 134 2026-01-07T17:44:48.564906Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:42742) connection closed Http output full {"__type":"AWS.SimpleQueueService.NonExistentQueue","message":"You do not have access or the '/Root/ExampleQueueName' does not exist"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] Test command err: 2026-01-07T17:44:35.886058Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677012252812241:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:35.886110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:36.120810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:36.132742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:36.136994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:36.208533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:36.275405Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:36.277150Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677012252812138:2081] 1767807875881711 != 1767807875881714 2026-01-07T17:44:36.340397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:36.365339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:36.365371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:36.365386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:36.365465Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:36.546593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:36.718570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:44:36.726158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:36.744672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:44:36.801460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:36.892966Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:36.951193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:36.993511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-07T17:44:36.998493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.055245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.145240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.200430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.236250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.280081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.315462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:37.352128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:38.991715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677025137715691:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.991733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677025137715680:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.991858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.992216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677025137715695:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.992258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:38.996694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:39.007622Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677025137715694:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-07T17:44:39.112486Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677029432683043:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:39.509661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.543907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.578114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.614690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:39.650728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... on and tx pending commits 2026-01-07T17:44:49.598191Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.598198Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:49.698523Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:49.698559Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.698575Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:49.698592Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.698601Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:49.798872Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:49.798903Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.798914Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:49.798931Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.798940Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:49.899214Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:49.899245Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.899255Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:49.899271Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.899281Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:49.999598Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:49.999639Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.999650Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:49.999665Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:49.999676Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.099944Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.099974Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.099999Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.100023Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.100033Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.200296Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.200336Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.200347Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.200364Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.200375Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.301099Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.301131Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.301144Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.301163Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.301174Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.401934Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.401970Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.401982Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.401998Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.402027Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.502328Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.502354Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.502365Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.502387Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.502398Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-07T17:44:50.593506Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 1 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 44 2026-01-07T17:44:50.594461Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-07T17:44:50.594491Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-07T17:44:50.594595Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 6 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-07T17:44:50.594765Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 6 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2026-01-07T17:44:50.594784Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 6. Send blob request. 2026-01-07T17:44:50.594859Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 3 times before, last time 2026-01-07T17:44:47.000000Z 2026-01-07T17:44:50.594895Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 6. All 1 blobs are from cache. 2026-01-07T17:44:50.594942Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:44:50.594975Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-07T17:44:50.595030Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-07T17:44:50.595157Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:44:50.595813Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [79840c5-2a33a96a-bb6f7c4d-203efc42] reply ok 2026-01-07T17:44:50.596063Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:36516) <- (200 , 211 bytes) 2026-01-07T17:44:50.596212Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:36516) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1767807887624"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} jsonReceived = { "Messages": [ { "Attributes": { "SentTimestamp":"1767807887624" }, "Body":"MessageBody-0", "MD5OfBody":"94a29778a1f1f41bf68142847b2e6106", "MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125", "ReceiptHandle":"CAAQAA==" } ] } 2026-01-07T17:44:50.602711Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:44:50.602741Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.602752Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:44:50.602770Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:44:50.602780Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |91.2%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> TPipeCacheTest::TestIdleRefresh >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries [GOOD] >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries >> ConfiguredTabletBootstrapperTest::BasicInitialization >> IncrementalBackup::ShopDemoIncrementalBackupScenario [GOOD] >> IncrementalBackup::VerifyIncrementalBackupTableAttributes >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> TResourceBroker::TestResubmitTask >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> TTabletPipeTest::TestSendAfterReboot >> TTabletLabeledCountersAggregator::SimpleAggregation >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::DefaultConfig [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TabletState::SeqNoSubscriptionReplace >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TResourceBrokerInstant::Test >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |91.2%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TResourceBroker::TestResubmitTask [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> TResourceBroker::TestUpdateCookie >> TTabletPipeTest::TestConsumerSidePipeReset >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestRewriteSameNode |91.2%| [TA] $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestSendAfterReboot [GOOD] |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestSendBeforeBootTarget >> TResourceBrokerInstant::Test [GOOD] >> TResourceBrokerInstant::TestErrors >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TabletState::SeqNoSubscriptionReplace [GOOD] >> TPipeCacheTest::TestTabletNode [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::DefaultConfig [GOOD] Test command err: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 10737418240 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 17179869184 } Total queues cpu: 90 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] >> GroupWriteTest::WriteHardRateDispatcher [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestConnectReject >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] >> ConfiguredTabletBootstrapperTest::BasicInitialization [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscriptionReplace [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConnectReject [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] Test command err: 2026-01-07T17:44:55.769006Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639240 Duration# 0.007826s ... waiting for connect1 ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for connect11 |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] Test command err: 2026-01-07T17:44:57.746305Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:1080: FinishTaskInstant failed for task 2: cannot finish unknown task >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:121:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [1:123:2150] sender: [1:125:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:159:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:163:2057] recipient: [1:105:2139] Leader for TabletID 9437185 is [1:123:2150] sender: [1:166:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:168:2057] recipient: [1:167:2178] Leader for TabletID 9437185 is [1:169:2179] sender: [1:170:2057] recipient: [1:167:2178] Leader for TabletID 9437185 is [1:169:2179] sender: [1:199:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:120:2148] sender: [1:202:2057] recipient: [1:104:2138] Leader for TabletID 9437184 is [1:120:2148] sender: [1:205:2057] recipient: [1:204:2202] Leader for TabletID 9437184 is [1:206:2203] sender: [1:207:2057] recipient: [1:204:2202] Leader for TabletID 9437184 is [1:206:2203] sender: [1:235:2057] recipient: [1:14:2061] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConnectReject [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 4562839363315361932 2026-01-07T17:40:26.303307Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-07T17:40:26.327451Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-07T17:40:26.327525Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-07T17:40:26.330137Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-07T17:40:26.344037Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:40:26.346717Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-07T17:44:55.501345Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-07T17:44:55.501463Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-07T17:44:55.721052Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} |91.2%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2026-01-07T17:44:54.206302Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:54.206373Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:54.206412Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:54.207421Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:44:54.207465Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2026-01-07T17:44:54.207519Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:44:54.207539Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2026-01-07T17:44:54.207693Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:44:54.207717Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 13151740404452589043 2026-01-07T17:44:54.208792Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-07T17:44:54.208914Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-07T17:44:54.208955Z node 3 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-07T17:44:54.209162Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-07T17:44:54.209192Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-07T17:44:54.209258Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-07T17:44:54.209281Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2026-01-07T17:44:54.209506Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-07T17:44:54.209527Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.153108s 2026-01-07T17:44:54.444800Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:54.445449Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:285:2098] 2026-01-07T17:44:54.445828Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-07T17:44:54.445860Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-07T17:44:54.498917Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:54.499520Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:285:2098] 2026-01-07T17:44:54.499919Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-07T17:44:54.499950Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 3 (idx 1) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2026-01-07T17:44:55.437019Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 4 2026-01-07T17:44:55.437090Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2026-01-07T17:44:55.437491Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-07T17:44:55.437545Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:55.437717Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-07T17:44:55.437741Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:55.439594Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:285:2098] 2026-01-07T17:44:55.440164Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:285:2098] 2026-01-07T17:44:55.441646Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-07T17:44:55.441689Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-07T17:44:55.441787Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-07T17:44:55.441815Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2026-01-07T17:44:56.227660Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 4 2026-01-07T17:44:56.227723Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2026-01-07T17:44:56.227820Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-07T17:44:56.227861Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:56.227998Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-07T17:44:56.228017Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:56.228502Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:285:2098] 2026-01-07T17:44:56.228818Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:285:2098] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2026-01-07T17:44:56.229185Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-07T17:44:56.229221Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 10053858333920509680 ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2026-01-07T17:44:56.229681Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-07T17:44:56.229710Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 11851482555838222794 2026-01-07T17:44:56.229848Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2026-01-07T17:44:56.229945Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2026-01-07T17:44:56.229977Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 3 (owner) 2026-01-07T17:44:56.230201Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: WAITFOR 2026-01-07T17:44:56.230226Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2026-01-07T17:44:57.036189Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2026-01-07T17:44:57.036244Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2026-01-07T17:44:57.036284Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:57.036525Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:57.037064Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:285:2098] 2026-01-07T17:44:57.037469Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:285:2098] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2026-01-07T17:44:57.037979Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-07T17:44:57.038017Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 8470239763125230813 2026-01-07T17:44:57.038164Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-07T17:44:57.038212Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 6622044195218853944 ... disconnecting nodes 1 <-> 2 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2026-01-07T17:44:57.038693Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2026-01-07T17:44:57.038746Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED ... disconnecting nodes 1 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2026-01-07T17:44:57.038950Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-07T17:44:57.038985Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.127990s 2026-01-07T17:44:57.039034Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2026-01-07T17:44:57.039052Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED 2026-01-07T17:44:57.039110Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-07T17:44:57.039133Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-07T17:44:57.041122Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:733: tablet: 9437184, type: Dummy, tablet dead 2026-01-07T17:44:57.041188Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:57.044304Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:439:2098] 2026-01-07T17:44:57.060812Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-07T17:44:57.060868Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-07T17:44:57.137391Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:57.138303Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:439:2098] 2026-01-07T17:44:57.138828Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-07T17:44:57.138861Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> ConfiguredTabletBootstrapperTest::ConfigChangeHandling >> TabletState::NormalLifecycle >> TFlatMetrics::MaximumValue1 [GOOD] >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges [GOOD] >> TResourceBrokerInstant::TestMerge >> TabletState::SeqNoSubscribeOutOfOrder |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatMetrics::MaximumValue2 [GOOD] >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] >> TResourceBrokerInstant::TestMerge [GOOD] >> TabletState::NormalLifecycle [GOOD] >> TReplicaTest::CommitWithoutHandshake >> TReplicaTest::Subscribe >> TTabletLabeledCountersAggregator::Version3Aggregation >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> ConfiguredTabletBootstrapperTest::ConfigChangeHandling [GOOD] >> ConfiguredTabletBootstrapperTest::NodeListChange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:121:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [1:123:2150] sender: [1:125:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:159:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:164:2057] recipient: [1:105:2139] Leader for TabletID 9437185 is [1:123:2150] sender: [1:165:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:168:2057] recipient: [1:167:2178] Leader for TabletID 9437185 is [1:169:2179] sender: [1:170:2057] recipient: [1:167:2178] Leader for TabletID 9437185 is [1:169:2179] sender: [1:199:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:120:2148] sender: [1:202:2057] recipient: [1:104:2138] Leader for TabletID 9437184 is [1:120:2148] sender: [1:205:2057] recipient: [1:204:2202] Leader for TabletID 9437184 is [1:206:2203] sender: [1:207:2057] recipient: [1:204:2202] Leader for TabletID 9437184 is [1:206:2203] sender: [1:235:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [0:0:0] sender: [2:109:2057] recipient: [2:107:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [2:109:2057] recipient: [2:107:2139] Leader for TabletID 9437184 is [2:113:2143] sender: [2:114:2057] recipient: [2:107:2139] Leader for TabletID 9437184 is [2:113:2143] sender: [2:133:2057] recipient: [2:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [2:162:2057] recipient: [2:160:2165] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:162:2057] recipient: [2:160:2165] Leader for TabletID 9437185 is [2:166:2169] sender: [2:167:2057] recipient: [2:160:2165] Leader for TabletID 9437185 is [2:166:2169] sender: [2:192:2057] recipient: [2:14:2061] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges [GOOD] Test command err: 2026-01-07T17:44:57.060678Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-07T17:44:57.060818Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-07T17:44:57.060872Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 1 2026-01-07T17:44:57.064466Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:57.065402Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:44:57.065463Z node 1 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-07T17:44:58.049649Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-07T17:44:58.049750Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-07T17:44:58.049832Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 4 2026-01-07T17:44:58.049895Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:58.049941Z node 6 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 6 2026-01-07T17:44:58.049988Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 7 2026-01-07T17:44:58.050219Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:44:58.050256Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-07T17:44:58.236681Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-07T17:44:58.257100Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-07T17:44:58.257201Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 4 2026-01-07T17:44:58.257325Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 4 2026-01-07T17:44:58.270051Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:58.312368Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-07T17:44:58.312468Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-07T17:44:58.312608Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 5 2026-01-07T17:44:58.312746Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:58.313401Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:286:2143] 2026-01-07T17:44:58.314454Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-07T17:44:58.314506Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:72075186232723361, type: Dummy, begin new round, seed: 7501391192840512250 2026-01-07T17:44:58.314920Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:286:2143] 2026-01-07T17:44:58.315801Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 72075186232723361, type: Dummy, apply alien 5 state: UNKNOWN 2026-01-07T17:44:58.315859Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-07T17:44:58.316834Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-07T17:44:58.316878Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:72075186232723361, type: Dummy, begin new round, seed: 18049619590696111298 2026-01-07T17:44:58.318527Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 72075186232723361, type: Dummy, apply alien 4 state: OWNER 2026-01-07T17:44:58.318589Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 72075186232723361, type: Dummy, become watch on node 4 (owner) 2026-01-07T17:44:58.457308Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-07T17:44:58.457419Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-07T17:44:58.457460Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 4 2026-01-07T17:44:58.458675Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-07T17:44:58.458747Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-07T17:44:58.458785Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 5 2026-01-07T17:44:58.458879Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 5 2026-01-07T17:44:58.458983Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:58.459061Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:58.460069Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:340:2166] 2026-01-07T17:44:58.460443Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-07T17:44:58.460483Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-07T17:44:58.747558Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-07T17:44:58.747692Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-07T17:44:58.747744Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 5 2026-01-07T17:44:58.749311Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 7 2026-01-07T17:44:58.749376Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 7 2026-01-07T17:44:58.749624Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 7 2026-01-07T17:44:58.749718Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:44:58.750498Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [5:382:2102] 2026-01-07T17:44:58.750969Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-07T17:44:58.751015Z node 7 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TReplicaTest::Subscribe [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::SubscribeUnknownPath >> TReplicaTest::CommitWithStaleGeneration |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:20.368319Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:20.440967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:20.447697Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:20.448000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:20.448102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:20.761653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:20.851104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:20.851243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:20.885395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:20.957310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:21.573703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:21.574608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:21.574643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:21.574677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:21.574849Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:21.638855Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:22.162867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:24.407659Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:24.420235Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:24.423807Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:24.453741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:24.453853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:24.492223Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:24.494061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:24.668117Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:24.668237Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:24.669464Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.670088Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.670583Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.671443Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.671542Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.671805Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.671931Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.672029Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.672219Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:24.687516Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:24.864708Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:24.895682Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:24.895840Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:24.937673Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:24.939215Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:24.939450Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:24.939512Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:24.939568Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:24.939623Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:24.939681Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:24.939745Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:24.941524Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:24.942757Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:42:24.945544Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:24.957163Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:42:24.957231Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:42:24.957314Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:24.964560Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:24.964666Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:25.011154Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2212:2650] 2026-01-07T17:42:25.011408Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2212:2650], schemeshard id = 72075186224037897 2026-01-07T17:42:25.011990Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2214:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:25.040790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:25.053183Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:25.053290Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:25.064071Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:25.148707Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:25.222501Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:25.492299Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:25.622320Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:25.622434Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:42:26.324355Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... echecking } 2026-01-07T17:43:01.563441Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4208:3764], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:43:01.631384Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4207:3763] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:01.903538Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4229:3777]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:01.903847Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:43:01.903948Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4231:3779] 2026-01-07T17:43:01.904033Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4231:3779] 2026-01-07T17:43:01.904452Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4232:3780] 2026-01-07T17:43:01.904563Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4232:3780], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:43:01.904641Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:43:01.904779Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4231:3779], server id = [2:4232:3780], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:01.904900Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:43:01.905006Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4229:3777], StatRequests.size() = 1 2026-01-07T17:43:01.905243Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 ... waiting for TEvKqpScan (done) 2026-01-07T17:44:01.944609Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2805:3022];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2026-01-07T17:44:01.954603Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [2:4320:3861]. Got EvDeliveryProblem, TabletId: 72075186224037899, NotDelivered: 0, Starting 2026-01-07T17:44:01.954694Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [2:4320:3861]. TKqpScanFetcherActor: broken pipe with tablet 72075186224037899, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2026-01-07T17:44:02.067340Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:4327:3866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 2026-01-07T17:44:02.300091Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:44:02.300209Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR 2026-01-07T17:44:03.155169Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrw2ak17ebmg0dgqarh830", SessionId: ydb://session/3?node_id=2&id=NTFhMTU2YzYtZDZjN2I0YzAtYmRkZWZlMC05NzRiZDQzZg==, Slow query, duration: 61.945630s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:44:03.156644Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4094:3702], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:44:03.156888Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4094:3702], Start read next stream part 2026-01-07T17:44:03.157366Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-07T17:44:03.157518Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4460:3980], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:44:03.161766Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4094:3702], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:44:03.161843Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4094:3702], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDlhMTYwNjgtMTlhODQwZTMtN2MyNTVkNmYtY2VmMmU5MmE=, TxId: 2026-01-07T17:44:03.162445Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4460:3980], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:44:03.162601Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4460:3980], Start read next stream part 2026-01-07T17:44:03.287398Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4486:4002]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:44:03.287747Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:44:03.287801Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4486:4002], StatRequests.size() = 1 2026-01-07T17:44:57.234062Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrxytte3246prpbyx4kn82", SessionId: ydb://session/3?node_id=2&id=OWZkNjIzNGQtOWM1NjhlZWItNWNiOGU5OTUtZWJhMDYyMTc=, Slow query, duration: 54.066471s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:44:57.236261Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4460:3980], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:44:57.236396Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4460:3980], Start read next stream part 2026-01-07T17:44:57.236746Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-07T17:44:57.237123Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4577:4087], ActorId: [2:4578:4088], Starting query actor #1 [2:4579:4089] 2026-01-07T17:44:57.237196Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4578:4088], ActorId: [2:4579:4089], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:44:57.240940Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4578:4088], ActorId: [2:4579:4089], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZjJkMGQyMjUtYzliMDdjYmYtYzZiODUxOGEtNzc1MWU2Mzc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:44:57.243963Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4460:3980], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:44:57.244065Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4084:2463], ActorId: [2:4460:3980], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTBkNTE2NGQtYjdkNTMzMGYtZWJlNjI1ZGMtMWI5ZjhlMTg=, TxId: 2026-01-07T17:44:57.316488Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4596:4103]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:44:57.316659Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:44:57.316689Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4596:4103], StatRequests.size() = 1 2026-01-07T17:44:57.493398Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4578:4088], ActorId: [2:4579:4089], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjJkMGQyMjUtYzliMDdjYmYtYzZiODUxOGEtNzc1MWU2Mzc=, TxId: 2026-01-07T17:44:57.493467Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4578:4088], ActorId: [2:4579:4089], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjJkMGQyMjUtYzliMDdjYmYtYzZiODUxOGEtNzc1MWU2Mzc=, TxId: 2026-01-07T17:44:57.493769Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4577:4087], ActorId: [2:4578:4088], Got response [2:4579:4089] SUCCESS 2026-01-07T17:44:57.494050Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:44:57.508978Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:44:57.509070Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3155:3432] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::NormalLifecycle [GOOD] >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> TReplicaTest::Delete >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> TReplicaTest::SyncVersion [GOOD] >> TReplicaTest::Delete [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |91.2%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IncrementalBackup::BasicIndexIncrementalRestore [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> IncrementalBackup::CdcVersionSync |91.2%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.2%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2026-01-07T17:45:00.440294Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:00.440387Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:978: [1:7:2054] Reject commit from unknown populator: sender# [1:8:2055], owner# 1, generation# 1 2026-01-07T17:45:00.440478Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:00.440553Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:00.709415Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:8:2055] 2026-01-07T17:45:00.709507Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 0 2026-01-07T17:45:00.709593Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:9:2056] 2026-01-07T17:45:00.709628Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:00.709734Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:9:2056] 2026-01-07T17:45:00.709775Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:7:2054] Commit generation: owner# 1, generation# 1 2026-01-07T17:45:00.709849Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:8:2055] 2026-01-07T17:45:00.709883Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:988: [2:7:2054] Reject commit from stale populator: sender# [2:8:2055], owner# 1, generation# 0, pending generation# 1 2026-01-07T17:45:00.709924Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:8:2055] 2026-01-07T17:45:00.709953Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-07T17:45:01.059732Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:45:01.059818Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:01.059978Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:01.060017Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2026-01-07T17:45:01.067457Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:01.067658Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-07T17:45:01.067736Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:01.067875Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-07T17:45:01.067957Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-07T17:45:01.068093Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-07T17:45:01.068129Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2026-01-07T17:45:01.068182Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2026-01-07T17:45:01.068367Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:11:2058] 2026-01-07T17:45:01.068412Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:11:2058], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:01.068513Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:12:2059] 2026-01-07T17:45:01.068554Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:12:2059], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-07T17:45:01.068709Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:13:2060] 2026-01-07T17:45:01.068761Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:13:2060], path# path, domainOwnerId# 0, capabilities# |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } 2026-01-07T17:45:00.935358Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437185] NodeDisconnected NodeId# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2026-01-07T17:45:00.478766Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:00.478843Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:00.479006Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:00.479056Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:00.485901Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:00.486064Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-07T17:45:00.486149Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:00.486284Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-07T17:45:00.486321Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-07T17:45:00.486368Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:00.763291Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-07T17:45:00.763374Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-07T17:45:00.763455Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:01.127746Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:45:01.127847Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:01.128024Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 76 2026-01-07T17:45:01.128089Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:01.128152Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2026-01-07T17:45:01.128226Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2026-01-07T17:45:01.128272Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:01.128346Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:8:2055], cookie# 1 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> ConfiguredTabletBootstrapperTest::NodeListChange [GOOD] >> IncrementalBackup::IncrementalBackupWithIndexes [GOOD] >> IncrementalBackup::IncrementalBackupWithCoveringIndex |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2026-01-07T17:45:00.462841Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:00.462915Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-07T17:45:00.463028Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:00.463062Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 800, generation# 1 2026-01-07T17:45:00.472300Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:9:2056] 2026-01-07T17:45:00.472377Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-07T17:45:00.472479Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:9:2056] 2026-01-07T17:45:00.472518Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 800, generation# 1 2026-01-07T17:45:00.478718Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2026-01-07T17:45:00.478820Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-07T17:45:00.484708Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-07T17:45:00.484945Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:9:2056], cookie# 0, event size# 103 2026-01-07T17:45:00.485026Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-07T17:45:00.485092Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-07T17:45:00.485242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-07T17:45:00.485333Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-07T17:45:00.542391Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:11:2058] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:12:2059] 2026-01-07T17:45:00.542443Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:11:2058] Successful handshake: owner# 800, generation# 1 2026-01-07T17:45:00.542516Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:11:2058] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:12:2059] 2026-01-07T17:45:00.542558Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:11:2058] Commit generation: owner# 800, generation# 1 2026-01-07T17:45:00.542620Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:11:2058] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:13:2060] 2026-01-07T17:45:00.542648Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:11:2058] Successful handshake: owner# 900, generation# 1 2026-01-07T17:45:00.542711Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:11:2058] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:13:2060] 2026-01-07T17:45:00.542737Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:11:2058] Commit generation: owner# 900, generation# 1 2026-01-07T17:45:00.542825Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:11:2058] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2026-01-07T17:45:00.542858Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:11:2058] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-07T17:45:00.542903Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:11:2058] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-07T17:45:00.542995Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:11:2058] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:13:2060], cookie# 0, event size# 103 2026-01-07T17:45:00.543027Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:11:2058] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2026-01-07T17:45:00.543064Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:893: [1:11:2058] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2026-01-07T17:45:00.543127Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:11:2058] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-07T17:45:00.543212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:11:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:14:2061] 2026-01-07T17:45:00.543250Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:11:2058] Subscribe: subscriber# [1:14:2061], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2026-01-07T17:45:00.543574Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:15:2062] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2026-01-07T17:45:00.543604Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:15:2062] Successful handshake: owner# 800, generation# 1 2026-01-07T17:45:00.543653Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:15:2062] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2026-01-07T17:45:00.543688Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:15:2062] Commit generation: owner# 800, generation# 1 2026-01-07T17:45:00.543740Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:15:2062] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:17:2064] 2026-01-07T17:45:00.543761Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:15:2062] Successful handshake: owner# 800, generation# 1 2026-01-07T17:45:00.543798Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:15:2062] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:17:2064] 2026-01-07T17:45:00.543838Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:15:2062] Commit generation: owner# 800, generation# 1 2026-01-07T17:45:00.543898Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:15:2062] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2026-01-07T17:45:00.543924Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:15:2062] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-07T17:45:00.543963Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:15:2062] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-07T17:45:00.544046Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:15:2062] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:17:2064], cookie# 0, event size# 103 2026-01-07T17:45:00.544071Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:15:2062] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-07T17:45:00.544104Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:15:2062] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-07T17:45:00.544170Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:15:2062] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:18:2065] 2026-01-07T17:45:00.544202Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:15:2062] Subscribe: subscriber# [1:18:2065], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-07T17:45:00.544507Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:19:2066] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:20:2067] 2026-01-07T17:45:00.544548Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:19:2066] Successful handshake: owner# 800, generation# 1 2026-01-07T17:45:00.544591Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:19:2066] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:20:2067] 2026-01-07T17:45:00.544611Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:19:2066] Commit generation: owner# 800, generation# 1 2026-01-07T17:45:00.544662Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:19:2066] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Gener ... DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-07T17:45:01.170429Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:399:2446] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2026-01-07T17:45:01.170481Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:399:2446] Successful handshake: owner# 910, generation# 1 2026-01-07T17:45:01.170533Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:399:2446] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2026-01-07T17:45:01.170572Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:399:2446] Commit generation: owner# 910, generation# 1 2026-01-07T17:45:01.170623Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:399:2446] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:401:2448] 2026-01-07T17:45:01.170646Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:399:2446] Successful handshake: owner# 910, generation# 1 2026-01-07T17:45:01.170693Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:399:2446] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:401:2448] 2026-01-07T17:45:01.170716Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:399:2446] Commit generation: owner# 910, generation# 1 2026-01-07T17:45:01.170782Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:399:2446] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 64 2026-01-07T17:45:01.170811Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:399:2446] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-07T17:45:01.170836Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:399:2446] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2026-01-07T17:45:01.170936Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:399:2446] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:401:2448], cookie# 0, event size# 130 2026-01-07T17:45:01.170964Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:399:2446] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2026-01-07T17:45:01.170992Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:399:2446] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2026-01-07T17:45:01.171070Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:399:2446] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:402:2449] 2026-01-07T17:45:01.171098Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:399:2446] Upsert description: path# /Root/Tenant/table_inside 2026-01-07T17:45:01.171143Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:399:2446] Subscribe: subscriber# [2:402:2449], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-07T17:45:01.173446Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:403:2450] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2026-01-07T17:45:01.173500Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:403:2450] Successful handshake: owner# 910, generation# 1 2026-01-07T17:45:01.173548Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:403:2450] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2026-01-07T17:45:01.173583Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:403:2450] Commit generation: owner# 910, generation# 1 2026-01-07T17:45:01.173632Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:403:2450] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:405:2452] 2026-01-07T17:45:01.173663Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:403:2450] Successful handshake: owner# 910, generation# 1 2026-01-07T17:45:01.173715Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:403:2450] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:405:2452] 2026-01-07T17:45:01.173742Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:403:2450] Commit generation: owner# 910, generation# 1 2026-01-07T17:45:01.173817Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:403:2450] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2026-01-07T17:45:01.173846Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:403:2450] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-07T17:45:01.173869Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:403:2450] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2026-01-07T17:45:01.173926Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:403:2450] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:405:2452], cookie# 0, event size# 64 2026-01-07T17:45:01.173952Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:403:2450] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-07T17:45:01.174008Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:403:2450] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:406:2453] 2026-01-07T17:45:01.174036Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:403:2450] Upsert description: path# /Root/Tenant/table_inside 2026-01-07T17:45:01.174101Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:403:2450] Subscribe: subscriber# [2:406:2453], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-07T17:45:01.444460Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:45:01.444512Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-07T17:45:01.444579Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:45:01.444612Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 800, generation# 1 2026-01-07T17:45:01.444686Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:9:2056] 2026-01-07T17:45:01.444714Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 900, generation# 1 2026-01-07T17:45:01.444758Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:9:2056] 2026-01-07T17:45:01.444779Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 900, generation# 1 2026-01-07T17:45:01.444863Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 118 2026-01-07T17:45:01.444890Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2026-01-07T17:45:01.445023Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2026-01-07T17:45:01.445110Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:9:2056], cookie# 0, event size# 117 2026-01-07T17:45:01.445140Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2026-01-07T17:45:01.445182Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:893: [3:7:2054] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2026-01-07T17:45:01.445218Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2026-01-07T17:45:01.445289Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2026-01-07T17:45:01.445372Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-07T17:45:01.445425Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> BasicUsage::CreateTopicWithCustomMetricsLevel >> TReplicaTest::Update >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> ConfiguredTabletBootstrapperTest::NodeListChange [GOOD] Test command err: 2026-01-07T17:45:00.008599Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-07T17:45:00.008763Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-07T17:45:00.016604Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:45:00.017751Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:45:00.017827Z node 1 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-07T17:45:00.156918Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 1 2026-01-07T17:45:00.157999Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-07T17:45:00.158053Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723360 bootstrapper on node 1 2026-01-07T17:45:00.158152Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-07T17:45:00.828247Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 4 2026-01-07T17:45:00.828328Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 3 2026-01-07T17:45:00.828397Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 3 2026-01-07T17:45:00.828449Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:45:00.828654Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-07T17:45:00.828684Z node 3 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-07T17:45:01.033272Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 3 2026-01-07T17:45:01.033400Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 3 2026-01-07T17:45:01.033450Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723360 bootstrapper on node 3 2026-01-07T17:45:01.034942Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-07T17:45:01.035010Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 4 2026-01-07T17:45:01.035118Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 4 2026-01-07T17:45:01.035272Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-07T17:45:01.042902Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: OK, leader: [3:221:2142] 2026-01-07T17:45:01.043449Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723360, type: Dummy, connect: ERROR 2026-01-07T17:45:01.043502Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> TReplicaTest::UpdateWithoutHandshake >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> IncrementalBackup::MultipleIndexesIncrementalRestore [GOOD] >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TReplicaTest::Unsubscribe |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TReplicaTest::HandshakeWithStaleGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2026-01-07T17:45:02.774737Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:02.774814Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:02.775030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:02.775068Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:02.781005Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:02.781178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-07T17:45:02.781267Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:02.781402Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2026-01-07T17:45:02.781456Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# path 2026-01-07T17:45:02.781512Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-07T17:45:02.781563Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-07T17:45:02.781663Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:8:2055] 2026-01-07T17:45:02.781705Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:03.061925Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-07T17:45:03.061998Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:03.062116Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:03.062156Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:03.062221Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:03.062302Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2026-01-07T17:45:02.932602Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:02.932688Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:806: [1:7:2054] Reject update from unknown populator: sender# [1:8:2055], owner# 1, generation# 1 2026-01-07T17:45:02.932778Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-07T17:45:02.932825Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-07T17:45:02.932985Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:02.933122Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2026-01-07T17:45:02.933203Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# path 2026-01-07T17:45:02.933275Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-07T17:45:02.933311Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:02.933559Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-07T17:45:02.933640Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:8:2055] 2026-01-07T17:45:02.933682Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:03.210929Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-07T17:45:03.211047Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:03.211181Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:03.211220Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:814: [2:7:2054] Reject update from stale populator: sender# [2:8:2055], owner# 1, generation# 0, pending generation# 1 2026-01-07T17:45:03.211304Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-07T17:45:03.211339Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-07T17:45:03.211413Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:03.211518Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-07T17:45:03.211565Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# path 2026-01-07T17:45:03.211623Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-07T17:45:03.211656Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:03.211714Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-07T17:45:03.211796Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2026-01-07T17:45:03.211835Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TReplicaTest::Merge >> TBSV::ShouldLimitBlockStoreVolumeDropRate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TBSV::CleanupDroppedVolumesOnRestart >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TBSV::ShardsNotLeftInShardsToDelete >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.4%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaTest::Commit >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> BsControllerConfig::ExtendByCreatingSeparateBox >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] >> BsControllerConfig::DeleteStoragePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] Test command err: 2026-01-07T17:44:57.250131Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [2:8:2055] 2026-01-07T17:44:57.250388Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:9:2056] worker 0 2026-01-07T17:44:57.250426Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:10:2057] worker 1 2026-01-07T17:44:57.250445Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:11:2058] worker 2 2026-01-07T17:44:57.250461Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:12:2059] worker 3 2026-01-07T17:44:57.250483Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:13:2060] worker 4 2026-01-07T17:44:57.250504Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:14:2061] worker 5 2026-01-07T17:44:57.250531Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:15:2062] worker 6 2026-01-07T17:44:57.250559Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:16:2063] worker 7 2026-01-07T17:44:57.250605Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:17:2064] worker 8 2026-01-07T17:44:57.250627Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:18:2065] worker 9 Sending message to [2:10:2057] from [2:8:2055] id 1 Sending message to [2:11:2058] from [2:8:2055] id 2 Sending message to [2:12:2059] from [2:8:2055] id 3 Sending message to [2:13:2060] from [2:8:2055] id 4 Sending message to [2:14:2061] from [2:8:2055] id 5 Sending message to [2:15:2062] from [2:8:2055] id 6 Sending message to [2:16:2063] from [2:8:2055] id 7 Sending message to [2:17:2064] from [2:8:2055] id 8 Sending message to [2:18:2065] from [2:8:2055] id 9 Sending message to [2:9:2056] from [2:8:2055] id 10 2026-01-07T17:44:57.940190Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [2:10:2057] 2026-01-07T17:44:57.940269Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [2:11:2058] 2026-01-07T17:44:57.940380Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [2:12:2059] 2026-01-07T17:44:57.940426Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [2:13:2060] 2026-01-07T17:44:57.940478Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [2:14:2061] 2026-01-07T17:44:57.940532Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [2:15:2062] 2026-01-07T17:44:57.940603Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [2:16:2063] 2026-01-07T17:44:57.940651Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [2:17:2064] 2026-01-07T17:44:57.940714Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [2:18:2065] 2026-01-07T17:44:57.941071Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [2:9:2056] 2026-01-07T17:44:57.941125Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:13:2060] 2026-01-07T17:44:57.942569Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:13:2060] 2026-01-07T17:44:57.972431Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:13:2060] Initiator [2:8:2055] 2026-01-07T17:44:57.995564Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:14:2061] 2026-01-07T17:44:57.997008Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:14:2061] 2026-01-07T17:44:58.026752Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:14:2061] Initiator [2:8:2055] 2026-01-07T17:44:58.049852Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:15:2062] 2026-01-07T17:44:58.051447Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:15:2062] 2026-01-07T17:44:58.080175Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:15:2062] Initiator [2:8:2055] 2026-01-07T17:44:58.103248Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:16:2063] 2026-01-07T17:44:58.104709Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:16:2063] 2026-01-07T17:44:58.135320Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:16:2063] Initiator [2:8:2055] 2026-01-07T17:44:58.156791Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:17:2064] 2026-01-07T17:44:58.158034Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:17:2064] 2026-01-07T17:44:58.182802Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:17:2064] Initiator [2:8:2055] 2026-01-07T17:44:58.201877Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:18:2065] 2026-01-07T17:44:58.203037Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:18:2065] 2026-01-07T17:44:58.229591Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:18:2065] Initiator [2:8:2055] 2026-01-07T17:44:58.253643Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:8:2055] 2026-01-07T17:44:58.253782Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:8:2055] 2026-01-07T17:44:58.258950Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [2:9:2056] 2026-01-07T17:44:58.260488Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [2:9:2056] 2026-01-07T17:44:58.286639Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:9:2056] Initiator [2:8:2055] 2026-01-07T17:44:58.307013Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:10:2057] 2026-01-07T17:44:58.308301Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:10:2057] 2026-01-07T17:44:58.328484Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:10:2057] Initiator [2:8:2055] 2026-01-07T17:44:58.345530Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:11:2058] 2026-01-07T17:44:58.346790Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:11:2058] 2026-01-07T17:44:58.371041Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:11:2058] Initiator [2:8:2055] 2026-01-07T17:44:58.388846Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:12:2059] 2026-01-07T17:44:58.390032Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:12:2059] 2026-01-07T17:44:58.414557Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:12:2059] Initiator [2:8:2055] 2026-01-07T17:44:58.433607Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:8:2055] 2026-01-07T17:44:58.433751Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:8:2055] 2026-01-07T17:44:58.438001Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:8:2055] 2026-01-07T17:44:58.438145Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:8:2055] 2026-01-07T17:44:58.443070Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:8:2055] 2026-01-07T17:44:58.443207Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:8:2055] 2026-01-07T17:44:58.449066Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:8:2055] 2026-01-07T17:44:58.449224Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:8:2055] 2026-01-07T17:44:58.454434Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:8:2055] 2026-01-07T17:44:58.454570Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:8:2055] 2026-01-07T17:44:58.459670Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [2:8:2055] 2026-01-07T17:44:58.459823Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [2:8:2055] 2026-01-07T17:44:58.466821Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:8:2055] 2026-01-07T17:44:58.466971Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:8:2055] 2026-01-07T17:44:58.472141Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:8:2055] 2026-01-07T17:44:58.472287Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:8:2055] 2026-01-07T17:44:58.477541Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:8:2055] 2026-01-07T17:44:58.477680Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:8:2055] 2026-01-07T17:44:58.482574Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:8:2055] Initiator [2:7:2054] TEST 2 10 duration 1.346769s 2026-01-07T17:44:58.733815Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [3:8:2055] 2026-01-07T17:44:58.734350Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:9:2056] worker 0 2026-01-07T17:44:58.734413Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:10:2057] worker 1 2026-01-07T17:44:58.734446Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_ag ... or got response node 8 [3:8:2055] 2026-01-07T17:44:59.900292Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [3:8:2055] 2026-01-07T17:44:59.903770Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [3:8:2055] 2026-01-07T17:44:59.903867Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [3:8:2055] 2026-01-07T17:44:59.908678Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [3:8:2055] 2026-01-07T17:44:59.908816Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [3:8:2055] 2026-01-07T17:44:59.913670Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [3:8:2055] Initiator [3:7:2054] TEST 2 20 duration 1.305819s 2026-01-07T17:45:00.209434Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [4:8:2055] 2026-01-07T17:45:00.209534Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [4:8:2055] self [4:9:2056] worker 0 Sending message to [4:9:2056] from [4:8:2055] id 1 Sending message to [4:9:2056] from [4:8:2055] id 2 Sending message to [4:9:2056] from [4:8:2055] id 3 Sending message to [4:9:2056] from [4:8:2055] id 4 Sending message to [4:9:2056] from [4:8:2055] id 5 Sending message to [4:9:2056] from [4:8:2055] id 6 Sending message to [4:9:2056] from [4:8:2055] id 7 Sending message to [4:9:2056] from [4:8:2055] id 8 Sending message to [4:9:2056] from [4:8:2055] id 9 Sending message to [4:9:2056] from [4:8:2055] id 10 2026-01-07T17:45:00.834917Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [4:9:2056] 2026-01-07T17:45:00.834962Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [4:9:2056] 2026-01-07T17:45:00.834982Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [4:9:2056] 2026-01-07T17:45:00.835008Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [4:9:2056] 2026-01-07T17:45:00.835058Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [4:9:2056] 2026-01-07T17:45:00.835086Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [4:9:2056] 2026-01-07T17:45:00.835112Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [4:9:2056] 2026-01-07T17:45:00.835138Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [4:9:2056] 2026-01-07T17:45:00.835166Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [4:9:2056] 2026-01-07T17:45:00.835194Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [4:9:2056] 2026-01-07T17:45:00.835438Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [4:9:2056] 2026-01-07T17:45:00.836633Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [4:9:2056] 2026-01-07T17:45:00.858626Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [4:9:2056] 2026-01-07T17:45:00.859598Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [4:9:2056] 2026-01-07T17:45:00.884334Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [4:9:2056] 2026-01-07T17:45:00.885591Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [4:9:2056] 2026-01-07T17:45:00.916566Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [4:9:2056] 2026-01-07T17:45:00.918043Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [4:9:2056] 2026-01-07T17:45:00.950510Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [4:9:2056] 2026-01-07T17:45:00.952011Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [4:9:2056] 2026-01-07T17:45:00.998427Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [4:9:2056] 2026-01-07T17:45:01.000285Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [4:9:2056] 2026-01-07T17:45:01.033889Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [4:9:2056] 2026-01-07T17:45:01.035185Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [4:9:2056] 2026-01-07T17:45:01.067524Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [4:9:2056] 2026-01-07T17:45:01.069391Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [4:9:2056] 2026-01-07T17:45:01.116870Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [4:9:2056] 2026-01-07T17:45:01.118582Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [4:9:2056] 2026-01-07T17:45:01.151235Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [4:9:2056] 2026-01-07T17:45:01.152808Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [4:9:2056] 2026-01-07T17:45:01.200331Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:9:2056] Initiator [4:8:2055] 2026-01-07T17:45:01.428645Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [4:8:2055] 2026-01-07T17:45:01.429279Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [4:8:2055] 2026-01-07T17:45:01.470697Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:8:2055] Initiator [4:7:2054] TEST 2 1 duration 1.440625s 2026-01-07T17:45:01.696920Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [5:7:2054] self [5:8:2055] worker 0 Sending message to [5:8:2055] from [5:8:2055] id 1 Sending message to [5:8:2055] from [5:8:2055] id 2 Sending message to [5:8:2055] from [5:8:2055] id 3 Sending message to [5:8:2055] from [5:8:2055] id 4 Sending message to [5:8:2055] from [5:8:2055] id 5 Sending message to [5:8:2055] from [5:8:2055] id 6 Sending message to [5:8:2055] from [5:8:2055] id 7 Sending message to [5:8:2055] from [5:8:2055] id 8 Sending message to [5:8:2055] from [5:8:2055] id 9 Sending message to [5:8:2055] from [5:8:2055] id 10 2026-01-07T17:45:02.521567Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [5:8:2055] 2026-01-07T17:45:02.521631Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [5:8:2055] 2026-01-07T17:45:02.521648Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [5:8:2055] 2026-01-07T17:45:02.521665Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [5:8:2055] 2026-01-07T17:45:02.521683Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [5:8:2055] 2026-01-07T17:45:02.521753Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [5:8:2055] 2026-01-07T17:45:02.521782Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [5:8:2055] 2026-01-07T17:45:02.521808Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [5:8:2055] 2026-01-07T17:45:02.521835Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [5:8:2055] 2026-01-07T17:45:02.521861Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [5:8:2055] 2026-01-07T17:45:02.522065Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [5:8:2055] 2026-01-07T17:45:02.523400Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [5:8:2055] 2026-01-07T17:45:02.549541Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [5:8:2055] 2026-01-07T17:45:02.550865Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [5:8:2055] 2026-01-07T17:45:02.587672Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [5:8:2055] 2026-01-07T17:45:02.589784Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [5:8:2055] 2026-01-07T17:45:02.631904Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [5:8:2055] 2026-01-07T17:45:02.634162Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [5:8:2055] 2026-01-07T17:45:02.671901Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [5:8:2055] 2026-01-07T17:45:02.673604Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [5:8:2055] 2026-01-07T17:45:02.723044Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [5:8:2055] 2026-01-07T17:45:02.725069Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [5:8:2055] 2026-01-07T17:45:02.762058Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [5:8:2055] 2026-01-07T17:45:02.764172Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [5:8:2055] 2026-01-07T17:45:02.805708Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [5:8:2055] 2026-01-07T17:45:02.807496Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [5:8:2055] 2026-01-07T17:45:02.845220Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [5:8:2055] 2026-01-07T17:45:02.847007Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [5:8:2055] 2026-01-07T17:45:02.886453Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [5:8:2055] 2026-01-07T17:45:02.888397Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [5:8:2055] 2026-01-07T17:45:02.955024Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [5:8:2055] Initiator [5:7:2054] TEST 2 1 duration 1.724172s |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation |91.4%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2026-01-07T17:45:04.150838Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:04.150925Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:04.151049Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2026-01-07T17:45:04.151121Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-07T17:45:04.151231Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:04.151320Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-07T17:45:04.151364Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:04.151535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.151571Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:04.157982Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:04.168584Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:9:2056] 2026-01-07T17:45:04.168683Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:9:2056], path# path 2026-01-07T17:45:04.168808Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-07T17:45:04.168849Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-07T17:45:04.168889Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.439030Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] >> BsControllerConfig::AddDriveSerial >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |91.4%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2026-01-07T17:45:04.184876Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:8:2055] 2026-01-07T17:45:04.184973Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-07T17:45:04.185086Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:04.185144Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:772: [1:7:2054] Reject handshake from stale populator: sender# [1:8:2055], owner# 1, generation# 1, pending generation# 2 2026-01-07T17:45:04.482454Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-07T17:45:04.482524Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:04.482656Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-07T17:45:04.482709Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.482838Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-07T17:45:04.483018Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.483063Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:04.490849Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:04.491085Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-07T17:45:04.491128Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-07T17:45:04.491178Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.491296Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.491338Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:04.491394Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.491469Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.491520Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-07T17:45:04.491632Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:04.491751Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:10:2057] 2026-01-07T17:45:04.491969Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:10:2057], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2026-01-07T17:45:04.780725Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:45:04.780799Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:04.780970Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.781023Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:04.781090Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:04.781181Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.781243Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-07T17:45:04.781292Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.781353Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:04.781440Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-07T17:45:04.781476Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2026-01-07T17:45:04.781519Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2026-01-07T17:45:04.781606Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.781645Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:04.781699Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [3:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.781783Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.781828Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-07T17:45:04.781862Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [3:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] >> BsControllerConfig::PDiskCreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2026-01-07T17:45:04.253997Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2026-01-07T17:45:04.254077Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-07T17:45:04.254182Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-07T17:45:04.254288Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-07T17:45:04.254335Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.254392Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-07T17:45:04.254533Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:04.254572Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:04.254699Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.254737Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:04.261743Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:04.262044Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-07T17:45:04.262093Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-07T17:45:04.262149Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.550244Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-07T17:45:04.550312Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:04.550430Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-07T17:45:04.550474Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.550543Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-07T17:45:04.550689Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.550784Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:04.550840Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:04.550970Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-07T17:45:04.551019Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-07T17:45:04.551055Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.551132Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:9:2056] 2026-01-07T17:45:04.551186Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.551268Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.551315Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:04.551362Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:04.551465Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.551500Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-07T17:45:04.551556Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:04.551656Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:10:2057] 2026-01-07T17:45:04.551705Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:10:2057], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2026-01-07T17:45:04.834709Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:9:2056] 2026-01-07T17:45:04.834785Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-07T17:45:04.834852Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 1, capabilities# 2026-01-07T17:45:04.834963Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:45:04.834999Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:04.835068Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:45:04.835099Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 1, generation# 1 2026-01-07T17:45:04.835198Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1006: [3:7:2054] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |91.4%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::MergeIntersectingBoxes >> BsControllerConfig::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:04.725168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:04.725305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:04.725364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:04.725414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:04.725455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:04.725487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:04.725569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:04.725655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:04.726553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:04.726887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:04.815866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:04.815941Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:04.834137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:04.834578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:04.834797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:04.841904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:04.842253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:04.842983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:04.843286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:04.855479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:04.855772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:04.857066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:04.857144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:04.857274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:04.857335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:04.857461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:04.857668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:04.864989Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:45:05.002474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:05.002709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.002905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:45:05.002956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:45:05.003142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:45:05.003201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:05.005735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:05.005929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:45:05.006114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.006174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:45:05.006209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:45:05.006262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:45:05.010842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.010934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:45:05.010976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:45:05.013822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.013880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.013929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:45:05.014009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:45:05.017914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:45:05.020497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:45:05.020721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:45:05.021834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:05.021973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:05.022037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:45:05.022337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:45:05.022400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:45:05.022576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:45:05.022671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:05.025089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:05.025148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... d: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:45:05.135244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:45:05.135280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:05.135321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:45:05.135355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:05.135428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:45:05.135484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:45:05.135513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:45:05.135554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:05.135585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:45:05.135617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:45:05.135764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:45:05.135800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:45:05.135827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:45:05.135853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-07T17:45:05.137759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:45:05.137804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:45:05.137920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:45:05.137947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:45:05.138401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:45:05.138463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:45:05.138488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:45:05.138593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:05.138617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:05.138730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:45:05.138848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:05.138874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:45:05.138899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:45:05.139236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:05.139292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:05.139317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:45:05.139345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-07T17:45:05.139398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:45:05.139793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:45:05.139830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:45:05.139880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:45:05.140173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:05.140232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:05.140252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:45:05.140272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:45:05.140300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:45:05.140357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:45:05.140762Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2026-01-07T17:45:05.140832Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2026-01-07T17:45:05.140898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:05.142308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:45:05.144491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:45:05.146027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:45:05.146302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:45:05.147499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:45:05.147578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:45:05.147838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:45:05.147877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:45:05.148187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:45:05.148255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:45:05.148284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2375] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2026-01-07T17:45:05.148558Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-07T17:45:05.148630Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> BsControllerConfig::SelectAllGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:04.607724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:04.607819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:04.607887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:04.607930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:04.607965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:04.607995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:04.608059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:04.608121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:04.608994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:04.609285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:04.708386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:04.708724Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:04.724851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:04.725231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:04.725424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:04.733041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:04.733372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:04.734067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:04.734313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:04.736985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:04.737189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:04.738252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:04.738307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:04.738416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:04.738464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:04.738554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:04.738740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:04.745902Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:45:04.878768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:04.878992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:04.879196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:45:04.879239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:45:04.879468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:45:04.879530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:04.882464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:04.882683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:45:04.882892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:04.882981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:45:04.883023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:45:04.883060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:45:04.885301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:04.885371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:45:04.885419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:45:04.887410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:04.887463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:45:04.887515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:45:04.887583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:45:04.891509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:45:04.894072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:45:04.894272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:45:04.895398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:04.895517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:04.895575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:45:04.895832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:45:04.895886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:45:04.896045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:45:04.896110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:04.898268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:04.898317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... [1:15:2062] 2026-01-07T17:45:05.164867Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:05.165114Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 218us result status StatusPathDoesNotExist 2026-01-07T17:45:05.165252Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:45:05.166360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:407:2382] sender: [1:476:2058] recipient: [1:107:2140] Leader for TabletID 72057594046678944 is [1:407:2382] sender: [1:479:2058] recipient: [1:478:2436] Leader for TabletID 72057594046678944 is [1:480:2437] sender: [1:481:2058] recipient: [1:478:2436] 2026-01-07T17:45:05.209815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:05.209917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:05.209970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:05.210013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:05.210065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:05.210093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:05.210148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:05.210217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:05.211016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:05.211280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:05.226940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:05.228338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:05.228520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:05.228656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:05.228699Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:05.229006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:05.229713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1418: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:05.229819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1492: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.229884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1518: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.230269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.230359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-07T17:45:05.230579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.230686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.230812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.230921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.231020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.231197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.231497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.231649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.232066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.232164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.232359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.232479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.232552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.232650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.232832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.233026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.233228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.233463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.233548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.233609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.233756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.233801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.233852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:05.239050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:05.241329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:05.241394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:05.242033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:05.242116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:05.242172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:05.242560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:480:2437] sender: [1:542:2058] recipient: [1:15:2062] 2026-01-07T17:45:05.275833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:05.276049Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 250us result status StatusPathDoesNotExist 2026-01-07T17:45:05.276160Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2026-01-07T17:45:04.722326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:04.722402Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:04.722509Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-07T17:45:04.722562Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 1, generation# 1 2026-01-07T17:45:04.722610Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:8:2055] 2026-01-07T17:45:04.722640Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-07T17:45:04.993227Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-07T17:45:04.993292Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-07T17:45:04.993427Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-07T17:45:04.993571Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-07T17:45:04.993611Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:04.993777Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:04.993814Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:05.000265Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:05.000472Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:9:2056] 2026-01-07T17:45:05.000574Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-07T17:45:05.000609Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-07T17:45:05.000643Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:05.000726Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:9:2056] 2026-01-07T17:45:05.278026Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-07T17:45:05.278100Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-07T17:45:05.278244Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:05.278309Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:05.278385Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:05.278505Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-07T17:45:05.278606Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-07T17:45:05.278756Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:05.278798Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:45:05.278853Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:05.279045Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-07T17:45:05.279086Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-07T17:45:05.279134Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-07T17:45:05.279216Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-07T17:45:05.279311Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-07T17:45:05.279374Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-07T17:45:05.279489Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:9:2056] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 |91.4%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |91.4%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> KqpQueryPerf::Update+QueryService-UseSink >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpQueryPerf::RangeLimitRead-QueryService >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] |91.4%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> KqpQueryPerf::Update-QueryService-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |91.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:04.497971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:04.498087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:04.498140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:04.498181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:04.498218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:04.498248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:04.498301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:04.498366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:04.499436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:04.499772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:04.584496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:04.584561Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:04.598436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:04.598808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:04.599008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:04.606144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:04.606503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:04.607250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:04.607534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:04.610738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:04.610953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:04.612127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:04.612188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:04.612311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:04.612361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:04.612473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:04.612672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:04.772561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.773638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.773768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.773857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.773940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:04.774777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... : Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 FAKE_COORDINATOR: Add transaction: 129 at step: 5000064 FAKE_COORDINATOR: advance: minStep5000064 State->FrontStep: 5000063 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000064 2026-01-07T17:45:06.681418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000064, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:06.681566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000064 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:06.681637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_bsv.cpp:40: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000064, at schemeshard: 72057594046678944 2026-01-07T17:45:06.681799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2026-01-07T17:45:06.681948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-07T17:45:06.681989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:45:06.682032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-07T17:45:06.682065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:45:06.682130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:45:06.682211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2026-01-07T17:45:06.682270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-07T17:45:06.682325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:45:06.682368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-07T17:45:06.682421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-07T17:45:06.682557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2026-01-07T17:45:06.682660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-07T17:45:06.682701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 57 2026-01-07T17:45:06.682736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 49], 18446744073709551615 2026-01-07T17:45:06.683506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2026-01-07T17:45:06.683571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2026-01-07T17:45:06.685485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:45:06.685575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:45:06.685607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:45:06.685806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:06.685844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:06.686031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 49] 2026-01-07T17:45:06.686175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:06.686209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-07T17:45:06.686248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 129, path id: 49 FAKE_COORDINATOR: Erasing txId 129 2026-01-07T17:45:06.686752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:06.686850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:06.686883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:45:06.686925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 49], version: 18446744073709551615 2026-01-07T17:45:06.686982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2026-01-07T17:45:06.687568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:45:06.687620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 49], at schemeshard: 72057594046678944 2026-01-07T17:45:06.687687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:45:06.688149Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 2026-01-07T17:45:06.688277Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 2026-01-07T17:45:06.688481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:06.688553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:06.688586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:45:06.688616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 57 2026-01-07T17:45:06.688646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:45:06.688741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-07T17:45:06.689322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2026-01-07T17:45:06.689849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2026-01-07T17:45:06.691287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-07T17:45:06.693365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:45:06.693515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-07T17:45:06.693971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2026-01-07T17:45:06.695034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2026-01-07T17:45:06.695773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2026-01-07T17:45:06.695823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2026-01-07T17:45:06.696498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-07T17:45:06.696623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-07T17:45:06.696667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:2080:3948] TestWaitNotification: OK eventTxId 129 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> KqpQueryPerf::Replace+QueryService+UseSink >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 |91.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats >> KqpQueryPerf::IndexUpsert+QueryService-UseSink >> BsControllerConfig::SelectAllGroups [GOOD] >> BsControllerConfig::SoleCommandErrorWhenCombined |91.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::RangeLimitRead+QueryService >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> KqpQueryPerf::UpdateOn+QueryService+UseSink >> BsControllerConfig::SoleCommandErrorWhenCombined [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> IncrementalBackup::VerifyIncrementalBackupTableAttributes [GOOD] >> IncrementalBackup::ResetVsUpsertMissingColumnsTest >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SoleCommandErrorWhenCombined [GOOD] Test command err: 2026-01-07T17:45:06.404333Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:06.405451Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:06.405825Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:06.407449Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:06.407743Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:06.407888Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:06.407913Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:06.408079Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:06.415866Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:06.415971Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:06.416072Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:06.416147Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:06.416271Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:06.416344Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:06.609718Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.132954s 2026-01-07T17:45:06.609870Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.133133s Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:26:2057] recipient: [11:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:26:2057] recipient: [11:24:2068] Leader for TabletID 72057594037932033 is [11:28:2070] sender: [11:29:2057] recipient: [11:24:2068] 2026-01-07T17:45:08.463653Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:08.464602Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:08.465220Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:08.466399Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:08.466911Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:08.467027Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:08.467054Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:08.467232Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:08.489955Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:08.490118Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:08.490242Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:08.490427Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:08.490537Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:08.490628Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:28:2070] sender: [11:49:2057] recipient: [11:11:2058] 2026-01-07T17:45:08.502018Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:08.502173Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:08.524144Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:08.524293Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:08.524443Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:08.524527Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:08.524664Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:08.524710Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:08.524756Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:08.524802Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:08.538827Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:08.538955Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:08.549725Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:08.549861Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:08.551186Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:08.551236Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:08.551479Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:08.551535Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:08.552166Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { } } Command { EnableSelfHeal { } } Command { QueryBaseConfig { } } } Status { Success: true } Status { ErrorDescription: "command must be sole" } ErrorDescription: "command must be sole" Leader for TabletID 72057594037932033 is [0:0:0] sender: [12:26:2057] recipient: [12:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [12:26:2057] recipient: [12:23:2068] Leader for TabletID 72057594037932033 is [12:28:2070] sender: [12:29:2057] recipient: [12:23:2068] 2026-01-07T17:45:08.766310Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:08.769336Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:08.769632Z node 12 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:08.770205Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:08.771480Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:08.771711Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:08.771742Z node 12 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:08.771920Z node 12 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:08.782752Z node 12 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:08.782900Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:08.783004Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:08.783134Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:08.783251Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:08.783316Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [12:28:2070] sender: [12:49:2057] recipient: [12:11:2058] 2026-01-07T17:45:08.794773Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:08.794946Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:08.818217Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:08.818354Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:08.818442Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:08.818518Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:08.818616Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:08.818729Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:08.818793Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:08.818858Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:08.833540Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:08.833684Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:08.845451Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:08.845583Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:08.846936Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:08.846995Z node 12 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:08.847220Z node 12 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:08.847270Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:08.853426Z node 12 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { } } Command { EnableSelfHeal { } } Command { QueryBaseConfig { } } } Status { Success: true } Status { ErrorDescription: "command must be sole" } ErrorDescription: "command must be sole" |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] >> BsControllerConfig::ReassignGroupDisk >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::CommandRollbackWhenAlone >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> KqpQueryPerf::Upsert-QueryService-UseSink |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::CommandRollbackWhenAlone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenAlone [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:202:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:202:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:202:2077] 2026-01-07T17:45:06.324666Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:06.325965Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:06.326344Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:06.328205Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:06.328625Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:06.328845Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:06.328879Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:06.329245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:06.339140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:06.339278Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:06.339501Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:06.339668Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:06.339783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:06.339872Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2026-01-07T17:45:06.351336Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:06.351535Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:06.376963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:06.377113Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:06.377209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:06.377301Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:06.377429Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:06.377487Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:06.377556Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:06.377611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:06.388395Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:06.388550Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:06.400109Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:06.400266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:06.401773Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:06.401852Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:06.402083Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:06.402146Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:06.416123Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:200:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:200:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:200:2077] 2026-01-07T17:45:08.443891Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:08.444846Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:08.445108Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:08.447042Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:08.447472Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:08.447710Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:08.447738Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:08.447951Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:08.467901Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:08.468063Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:08.468184Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:08.468362Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:08.468484Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:08.468595Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2026-01-07T17:45:08.481571Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:08.481747Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:08.513466Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:08.513591Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:08.513669Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:08.513757Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:08.513869Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:08.513927Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:08.513985Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:08.514056Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:08.524847Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:08.524990Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:08.536036Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:08.536177Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:08.537675Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:08.537734Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:08.537945Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:08.537991Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:08.538593Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:26:2057] recipient: [21:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:26:2057] recipient: [21:24:2068] Leader for TabletID 72057594037932033 is [21:28:2070] sender: [21:29:2057] recipient: [21:24:2068] 2026-01-07T17:45:10.126817Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:10.127807Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:10.128038Z node 21 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:10.129206Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:10.129626Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:10.129826Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:10.129875Z node 21 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:10.130113Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:10.140186Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:10.140333Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:10.140447Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:10.140579Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:10.140686Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:10.140787Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [21:28:2070] sender: [21:49:2057] recipient: [21:11:2058] 2026-01-07T17:45:10.152320Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:10.152476Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:10.173679Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:10.173854Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:10.173953Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:10.174048Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:10.174171Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:10.174223Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:10.174257Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:10.174308Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:10.185028Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:10.185154Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:10.195873Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:10.195991Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:10.197415Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:10.197464Z node 21 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:10.197689Z node 21 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:10.197740Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:10.199870Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenAlone" } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true Leader for TabletID 72057594037932033 is [0:0:0] sender: [22:26:2057] recipient: [22:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [22:26:2057] recipient: [22:23:2068] Leader for TabletID 72057594037932033 is [22:28:2070] sender: [22:29:2057] recipient: [22:23:2068] 2026-01-07T17:45:10.430028Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:10.431064Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:10.431308Z node 22 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:10.432494Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:10.433419Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:10.433554Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:10.433584Z node 22 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:10.433765Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:10.443473Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:10.443605Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:10.443725Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:10.443860Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:10.443958Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:10.444054Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [22:28:2070] sender: [22:49:2057] recipient: [22:11:2058] 2026-01-07T17:45:10.457902Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:10.458104Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:10.479433Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:10.479585Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:10.479658Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:10.479726Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:10.479848Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:10.479904Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:10.479939Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:10.479993Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:10.492873Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:10.493040Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:10.503853Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:10.503978Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:10.505371Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:10.505436Z node 22 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:10.505671Z node 22 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:10.505711Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:10.506367Z node 22 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenAlone" } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> KqpQueryPerf::Delete-QueryService-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> KqpQueryPerf::RangeRead-QueryService >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:202:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:202:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:202:2077] 2026-01-07T17:45:05.670163Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:05.672427Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:05.672721Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:05.678135Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:05.678460Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:05.678596Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:05.678613Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:05.678767Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:05.685814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:05.685904Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:05.686016Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:05.686126Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:05.686222Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:05.686277Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2026-01-07T17:45:05.697481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:05.697609Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:05.725525Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:05.725662Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:05.725781Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:05.725873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:05.725983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:05.726038Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:05.726105Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:05.726171Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:05.738206Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:05.738402Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:05.749505Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:05.749663Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:05.751111Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:05.751177Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:05.751406Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:05.751459Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:05.769336Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-07T17:45:05.793909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-07T17:45:05.794608Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:200:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:200:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:200:2077] 2026-01-07T17:45:07.515429Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:07.516396Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:07.516647Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:07.517918Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:07.518305Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:07.518550Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:07.518576Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:07.518765Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:07.527210Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:07.527359Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:07.527498Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:07.527620Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:07.527771Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:07.527841Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2026-01-07T17:45:07.539294Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:07.539498Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:07.563994Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:07.564115Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:07.564171Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:07.564267Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:07.564412Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:07.564476Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:07.564513Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:07.564598Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:07.575423Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:07.575571Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:07.586379Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:07.586511Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:07.587740Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:07.587789Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:07.587980Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:07.588038Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:07.588734Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-07T17:45:07.590017Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2026-01-07T17:45:09.734331Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2026-01-07T17:45:09.736173Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2026-01-07T17:45:09.736884Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2026-01-07T17:45:09.737706Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2026-01-07T17:45:09.738521Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2026-01-07T17:45:09.739407Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2026-01-07T17:45:09.740109Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2026-01-07T17:45:09.740849Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2026-01-07T17:45:09.741535Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2026-01-07T17:45:09.742290Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2026-01-07T17:45:09.743044Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2026-01-07T17:45:09.743743Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2026-01-07T17:45:09.744439Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2026-01-07T17:45:09.745277Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:205:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:205:2077] Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:227:2066] recipient: [31:205:2077] 2026-01-07T17:45:11.527118Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:11.528181Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:11.528485Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:11.531650Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:11.532153Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:11.532369Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:11.532399Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:11.532617Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:11.543002Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:11.543143Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:11.543253Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:11.543426Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:11.543603Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:11.543751Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:247:2066] recipient: [31:20:2067] 2026-01-07T17:45:11.555293Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:11.555472Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:11.581207Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:11.581371Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:11.581471Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:11.581565Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:11.581704Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:11.581783Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:11.581820Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:11.581890Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:11.592695Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:11.592962Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:11.604836Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:11.604990Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:11.606392Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:11.606450Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:11.606665Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:11.606725Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:11.609863Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2026-01-07T17:45:11.610986Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2026-01-07T17:45:11.611732Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2026-01-07T17:45:11.612488Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2026-01-07T17:45:11.613439Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2026-01-07T17:45:11.614102Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2026-01-07T17:45:11.614712Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2026-01-07T17:45:11.615432Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2026-01-07T17:45:11.616146Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2026-01-07T17:45:11.617042Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2026-01-07T17:45:11.620424Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2026-01-07T17:45:11.621302Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2026-01-07T17:45:11.622026Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2026-01-07T17:45:11.622692Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2026-01-07T17:45:11.623356Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2026-01-07T17:45:11.624116Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2026-01-07T17:45:11.624834Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2026-01-07T17:45:11.625593Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2026-01-07T17:45:11.626317Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2026-01-07T17:45:11.627816Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] >> BsControllerConfig::ReassignGroupDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29592, MsgBus: 13389 2026-01-07T17:45:06.860818Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677142273293973:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:06.860862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:07.088581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:07.123799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:07.123915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:07.180610Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677142273293930:2081] 1767807906854502 != 1767807906854505 2026-01-07T17:45:07.188611Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:07.210392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:07.285952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:07.288085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:07.288098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:07.288106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:07.288230Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:07.700309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:07.775886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:07.887886Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:07.954701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.153415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.233090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.147051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159453165005:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.147190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.147636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159453165015:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.147685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.441417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.478052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.511491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.580475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.614993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.650235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.694884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.737662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.817371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159453165889:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.817493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.817744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159453165894:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.817794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159453165895:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.817897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.822539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:10.834761Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677159453165898:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:10.918143Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677159453165951:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:11.864229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677142273293973:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:11.864297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28771, MsgBus: 2406 2026-01-07T17:45:06.910679Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677145104431673:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:06.910765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:07.165246Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:07.179401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:07.179640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:07.227546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:07.281097Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:07.344697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:07.344718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:07.344749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:07.344832Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:07.377011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:07.897618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:07.921069Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:07.974484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.159813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.342979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.423670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.313921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677162284302695:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.314040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.314520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677162284302705:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.314571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.674623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.723502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.765143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.801937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.838575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.879619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.921586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.975421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.042736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677166579270872:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.042820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.043106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677166579270877:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.043154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677166579270878:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.043190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.046837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:11.057979Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677166579270881:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:11.129949Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677166579270932:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:11.910969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677145104431673:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:11.911038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:202:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:202:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:202:2077] 2026-01-07T17:45:06.007991Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:06.009327Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:06.009720Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:06.011793Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:06.012228Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:06.012446Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:06.012478Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:06.012694Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:06.023062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:06.023198Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:06.023610Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:06.023846Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:06.023946Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:06.024043Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:248:2066] recipient: [1:20:2067] 2026-01-07T17:45:06.035796Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:06.035949Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:06.064195Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:06.064344Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:06.064435Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:06.064518Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:06.064617Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:06.064666Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:06.064703Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:06.064748Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:06.077461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:06.077580Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:06.088362Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:06.088513Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:06.089840Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:06.089913Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:06.090111Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:06.090169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:06.104209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2026-01-07T17:45:06.104808Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-07T17:45:06.104852Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-07T17:45:06.104867Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-07T17:45:06.104894Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-07T17:45:06.104918Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-07T17:45:06.104962Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-07T17:45:06.104994Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-07T17:45:06.105009Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-07T17:45:06.105025Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-07T17:45:06.105038Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-07T17:45:06.105052Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-07T17:45:06.105068Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-07T17:45:06.105092Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-07T17:45:06.105112Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-07T17:45:06.105139Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-07T17:45:06.105153Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-07T17:45:06.105167Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-07T17:45:06.105186Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-07T17:45:06.105199Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-07T17:45:06.105211Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-07T17:45:06.105224Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-07T17:45:06.105237Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2026-01-07T17:45:06.105260Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2026-01-07T17:45:06.105277Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2026-01-07T17:45:06.105298Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2026-01-07T17:45:06.105323Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2026-01-07T17:45:06.105344Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2026-01-07T17:45:06.105357Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2026-01-07T17:45:06.105370Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2026-01-07T17:45:06.105382Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:200:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:200:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:200:2077] 2026-01-07T17:45:08.090459Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:08.091369Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:08.091651Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:08.092904Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:08.093348Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:08.093617Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNo ... Port: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2026-01-07T17:45:10.143997Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2026-01-07T17:45:10.144048Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2026-01-07T17:45:10.144073Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2026-01-07T17:45:10.144096Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 24:1000 Path# /dev/disk 2026-01-07T17:45:10.144123Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 25:1000 Path# /dev/disk 2026-01-07T17:45:10.144148Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 26:1000 Path# /dev/disk 2026-01-07T17:45:10.144179Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 27:1000 Path# /dev/disk 2026-01-07T17:45:10.144222Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 28:1000 Path# /dev/disk 2026-01-07T17:45:10.144250Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 29:1000 Path# /dev/disk 2026-01-07T17:45:10.144274Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 30:1000 Path# /dev/disk 2026-01-07T17:45:10.144317Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 31:1000 Path# /dev/disk 2026-01-07T17:45:10.144343Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 32:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2026-01-07T17:45:10.170948Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [33:290:2068] recipient: [33:267:2079] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [33:290:2068] recipient: [33:267:2079] Leader for TabletID 72057594037932033 is [33:292:2081] sender: [33:293:2068] recipient: [33:267:2079] 2026-01-07T17:45:12.725063Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:12.726104Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:12.726424Z node 33 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:12.727806Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:12.728242Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:12.728525Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:12.728563Z node 33 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:12.728818Z node 33 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:12.738764Z node 33 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:12.738894Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:12.738997Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:12.739123Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:12.739193Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:12.739239Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [33:292:2081] sender: [33:314:2068] recipient: [33:22:2069] 2026-01-07T17:45:12.751042Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:12.751209Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:12.779802Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:12.780195Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:12.780286Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:12.780358Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:12.780491Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:12.780565Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:12.780625Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:12.780691Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:12.791556Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:12.791951Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:12.802690Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:12.803060Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:12.804540Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:12.804580Z node 33 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:12.804737Z node 33 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:12.804780Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:12.809342Z node 33 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2026-01-07T17:45:12.810062Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 33:1000 Path# /dev/disk 2026-01-07T17:45:12.810098Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 34:1000 Path# /dev/disk 2026-01-07T17:45:12.810120Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 35:1000 Path# /dev/disk 2026-01-07T17:45:12.810137Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 36:1000 Path# /dev/disk 2026-01-07T17:45:12.810165Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 37:1000 Path# /dev/disk 2026-01-07T17:45:12.810184Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 38:1000 Path# /dev/disk 2026-01-07T17:45:12.810199Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 39:1000 Path# /dev/disk 2026-01-07T17:45:12.810212Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 40:1000 Path# /dev/disk 2026-01-07T17:45:12.810235Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 41:1000 Path# /dev/disk 2026-01-07T17:45:12.810266Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 42:1000 Path# /dev/disk 2026-01-07T17:45:12.810297Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 43:1000 Path# /dev/disk 2026-01-07T17:45:12.810315Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 44:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2026-01-07T17:45:12.836661Z node 33 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9209, MsgBus: 62669 2026-01-07T17:45:08.133527Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677153292400912:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:08.133624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:08.648680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:08.648816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:08.659386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:08.719153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:08.767710Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:08.925154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:08.925169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:08.925173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:08.927679Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:08.995206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:09.144064Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:09.446493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:09.508106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.643749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.796984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.870765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.591944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677166177304626:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.592103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.592761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677166177304636:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.592821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.908129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.942852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.993915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.026931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.063789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.115766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.181070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.238420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.370994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677170472272806:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.371083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.371528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677170472272811:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.371567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677170472272812:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.371601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.376479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:12.395167Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677170472272815:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:12.471146Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677170472272866:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:13.137131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677153292400912:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:13.137188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:34.230650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:34.340659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:34.350310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:34.350645Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:34.350781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:34.930939Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:35.041749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:35.041899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:35.081244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:35.178489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:35.938599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:35.939706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:35.939751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:35.939800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:35.940030Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:36.002230Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:36.621593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:40.000701Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:40.008591Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:40.013050Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:40.091351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:40.091496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:40.155431Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:40.157817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.436889Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:40.437000Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:40.438518Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.439106Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.439594Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.440489Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.440615Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.440901Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.441022Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.441148Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.441505Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:40.464387Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:40.794289Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:40.847779Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:40.847920Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:40.900221Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:40.903143Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:40.903484Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:40.903559Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:40.903613Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:40.903678Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:40.903738Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:40.903799Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:40.904787Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:40.907367Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:42:40.910356Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:40.924756Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:42:40.924821Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:42:40.924944Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:40.935160Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:40.935299Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:41.005456Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:42:41.005945Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:42:41.010981Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:41.074843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:41.099937Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:41.100093Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:41.128693Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:41.363980Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:41.426187Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:41.970719Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:42.096437Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:42.096537Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:42:42.862974Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ty: 1 } 2026-01-07T17:43:13.167890Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4227:3777]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:13.168236Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:43:13.168331Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4229:3779] 2026-01-07T17:43:13.168384Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4229:3779] 2026-01-07T17:43:13.168652Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4230:3780] 2026-01-07T17:43:13.168740Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4230:3780], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:43:13.168795Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:43:13.168898Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4229:3779], server id = [2:4230:3780], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:13.168974Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:43:13.169031Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4227:3777], StatRequests.size() = 1 2026-01-07T17:43:13.865967Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:44:15.446274Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4092:3702], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:44:15.446555Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4092:3702], Start read next stream part 2026-01-07T17:44:15.446805Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrwdd6fm2mn3mavarb0gxm", SessionId: ydb://session/3?node_id=2&id=YzdkZDVmODItNDIzNGFjYjktZWQ1YTIxMTMtODkxZjY5ODg=, Slow query, duration: 62.891351s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:44:15.448498Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-07T17:44:15.448675Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4339:3879], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:44:15.452467Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4092:3702], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:44:15.452543Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4092:3702], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTc5NzY3ZmYtZDk5MTkxNzEtZDJmNzJmNjEtOTQwZWMxZjE=, TxId: 2026-01-07T17:44:15.453166Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4339:3879], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:44:15.453284Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4339:3879], Start read next stream part 2026-01-07T17:44:15.562010Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4366:3902]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:44:15.562350Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:44:15.562405Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4366:3902], StatRequests.size() = 1 2026-01-07T17:45:07.196786Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecryatxejtd255n867n8n3w", SessionId: ydb://session/3?node_id=2&id=MjE5YjU0OTctYWUwYmUwZTAtMWE2ODUxY2YtZDkyY2FhZjg=, Slow query, duration: 51.739553s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:45:07.198451Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4339:3879], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:45:07.198561Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4339:3879], Start read next stream part 2026-01-07T17:45:07.198881Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32180, txId: 18446744073709551615] shutting down 2026-01-07T17:45:07.199255Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4454:3985], ActorId: [2:4456:3986], Starting query actor #1 [2:4457:3987] 2026-01-07T17:45:07.199317Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4456:3986], ActorId: [2:4457:3987], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:45:07.201851Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4339:3879], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:45:07.201897Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4082:2463], ActorId: [2:4339:3879], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTcxZWU5OTgtY2MzNzhmYjMtZGNlMTg4YzMtNzBiM2I0MA==, TxId: 2026-01-07T17:45:07.202503Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4456:3986], ActorId: [2:4457:3987], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWFiMzc1ZDUtNjQ1MjIwNDAtYTA0MjllZS04MjgxNDY5ZQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:45:07.274445Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4473:4001]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:45:07.274847Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:45:07.274899Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4473:4001], StatRequests.size() = 1 2026-01-07T17:45:07.505019Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4456:3986], ActorId: [2:4457:3987], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWFiMzc1ZDUtNjQ1MjIwNDAtYTA0MjllZS04MjgxNDY5ZQ==, TxId: 2026-01-07T17:45:07.505133Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4456:3986], ActorId: [2:4457:3987], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWFiMzc1ZDUtNjQ1MjIwNDAtYTA0MjllZS04MjgxNDY5ZQ==, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:45:07.591088Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:45:07.591186Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:45:07.612922Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4229:3779], schemeshard count = 1 2026-01-07T17:45:07.900732Z node 2 :STATISTICS ERROR: tx_analyze_deadline.cpp:26: [72075186224037894] Delete long analyze operation, OperationId=operationId 2026-01-07T17:45:08.316891Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:45:08.317226Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-07T17:45:08.317365Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-07T17:45:08.329579Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:45:08.329681Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:45:08.330002Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:45:09.215322Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:45:09.215411Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:45:09.215479Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:45:09.215524Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:45:13.094249Z node 2 :STATISTICS DEBUG: tx_analyze_deadline.cpp:44: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:3150:3428] 2026-01-07T17:45:13.094404Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64780, MsgBus: 14482 2026-01-07T17:45:07.699226Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677147812314548:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:07.699304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:07.744439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:45:08.095735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:08.095862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:08.108840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:08.121417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:08.408328Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:08.441197Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677147812314521:2081] 1767807907697407 != 1767807907697410 2026-01-07T17:45:08.465299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:08.485601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:08.485618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:08.485690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:08.485754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:08.721097Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:09.070710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:09.076817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:09.122296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.263671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.433840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.531059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.568404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677164992185582:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.568518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.569437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677164992185592:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.569609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.918085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.957316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.027965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.060575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.120864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.201471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.253882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.335585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.450678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169287153770:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.450792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.451063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169287153775:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.451180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169287153776:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.451439Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.455278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:12.468960Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677169287153779:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:12.530405Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677169287153831:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:12.700379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677147812314548:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:12.700453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> KqpQueryPerf::Replace-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4848, MsgBus: 17040 2026-01-07T17:45:08.929152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677153265990544:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:08.929249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:09.185059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:09.188660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:09.188750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:09.244781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:09.285786Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:09.387058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:09.387098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:09.387110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:09.387197Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:09.394185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:09.874211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:09.881662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:09.942945Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:09.944109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.095648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.253330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.319535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.199232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677170445861563:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.199353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.200093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677170445861573:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.200146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.612953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.649941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.684175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.719223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.758745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.818668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.859003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.922604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.997045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677170445862447:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.997121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.997267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677170445862453:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.997319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.997401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677170445862452:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.001579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:13.011383Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677170445862456:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:13.073233Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677174740829803:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:13.929097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677153265990544:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:13.929170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14101, MsgBus: 24194 2026-01-07T17:45:09.026501Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677155417435813:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:09.026724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:09.065054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:45:09.349777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:09.351839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:09.414853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:09.426353Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677155417435706:2081] 1767807909022009 != 1767807909022012 2026-01-07T17:45:09.431508Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:09.433754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:09.533557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:09.533580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:09.533586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:09.533669Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:09.680445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:09.976746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:10.033014Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:10.039799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.185047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.347391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.417379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.452154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677168302339465:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.452403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.452797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677168302339475:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.452860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.771335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.826409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.862748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.901952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.937943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.993024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.038386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.101875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.187953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677172597307646:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.188087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.193080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677172597307651:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.193162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677172597307652:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.193256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.197914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:13.215276Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677172597307655:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:45:13.311527Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677172597307706:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:14.025679Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677155417435813:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:14.025753Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> IncrementalBackup::IncrementalBackupWithCoveringIndex [GOOD] >> IncrementalBackup::IncrementalBackupMultipleIndexes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> KqpQueryPerf::IndexUpsert+QueryService+UseSink >> Other::TraceInvalidTokenForbidden >> IncrementalBackup::CdcVersionSync [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithDataVerification >> KqpQueryPerf::IndexInsert-QueryService-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> Other::TraceNoValidGroupForbidden ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5574, MsgBus: 10787 2026-01-07T17:45:10.495969Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677160661426252:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:10.502991Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:10.729233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:10.732726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:10.732836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:10.739080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:10.840191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:10.849280Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677160661426131:2081] 1767807910470659 != 1767807910470662 2026-01-07T17:45:10.929530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:10.929554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:10.929563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:10.929657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:10.956444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:11.319564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:11.376085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.498140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:11.566449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.724064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.812450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.848702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677173546329902:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.848801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.849138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677173546329912:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.849170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:14.155820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.198881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.237394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.274256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.313405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.392761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.436408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.478569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.568224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677177841298077:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:14.568310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:14.568542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677177841298082:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:14.568598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677177841298083:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:14.568601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:14.575198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:14.586990Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677177841298086:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:45:14.653267Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677177841298137:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:15.492879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677160661426252:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:15.495782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:35.368526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:35.438515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:35.451466Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:35.451933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:35.451973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:35.640597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:35.640715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:35.694731Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807813319477 != 1767807813319481 2026-01-07T17:43:35.702463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:35.742657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:35.835577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:36.116986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.117062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.117095Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:43:36.117242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-07T17:43:36.117284Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:43:36.207529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-07T17:43:36.207721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.207882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:43:36.207924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:43:36.208048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:43:36.208098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:36.208164Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.208619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:43:36.208715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:43:36.208758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.208788Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.208920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.208967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.209018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.209055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:43:36.209084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:43:36.209108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:43:36.209164Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.209497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.209524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.209599Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.209630Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.209670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.209707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:43:36.209734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:43:36.209778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.210002Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.210019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.210078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.210106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.210139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.210174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.210201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:43:36.210222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.210252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:43:36.212733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:43:36.213064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.213094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:43:36.213191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:43:36.213333Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-07T17:43:36.213626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.213661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.213687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:43:36.213769Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-07T17:43:36.213789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:43:36.213827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 7205759404664 ... er action and tx pending commits 2026-01-07T17:45:08.386876Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.386904Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:45:08.409256Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:08.409340Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.409386Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:08.409430Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.409466Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:45:08.443654Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:08.443748Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.443791Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:08.443835Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.443876Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:45:08.466987Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:08.467075Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.467105Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:08.467140Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.467177Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:45:08.489315Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:08.489409Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.489449Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:08.489495Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.489534Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:45:08.512069Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [9:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:08.512177Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:08.512279Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [9:399:2398], Recipient [9:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:08.512324Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:08.549518Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:45:08.549674Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:08.549720Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.549757Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:08.549808Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:08.549844Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:45:08.577794Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [9:1771:3366], Recipient [9:399:2398]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2026-01-07T17:45:08.577925Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found incremental backup table at: /Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table 2026-01-07T17:45:08.580667Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [9:1773:3368], Recipient [9:399:2398]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2026-01-07T17:45:08.580778Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found attribute: __incremental_backup = {} ... waiting for SysViewsRoster update finished 2026-01-07T17:45:13.989929Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:45:14.002829Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:45:14.006233Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:319:2361], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:45:14.006541Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:45:14.006759Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:45:14.371852Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:14.372046Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:14.407364Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1767807909339261 != 1767807909339265 2026-01-07T17:45:14.413186Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:14.476086Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:14.558784Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:45:14.872005Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:14.889706Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:15.018094Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.307105Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:15.307257Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:961:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:15.307381Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:15.308745Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:15.308978Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:15.314861Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:15.450808Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:45:15.485100Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> ActorPage::InvalidTokenForbidden >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] Test command err: 2026-01-07T17:44:05.945011Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1767807845944976 2026-01-07T17:44:06.343464Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676888120155779:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:06.343512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:06.588189Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592676886962610489:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:06.588245Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:06.605381Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:44:06.631145Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:06.631604Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:44:07.005021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:07.028822Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:07.277013Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:07.277265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:07.317196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:07.317286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:07.326573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:07.326678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:07.359997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:07.449451Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:07.490144Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:44:07.543601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:07.589207Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:07.607429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:07.728214Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:07.743015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:07.763220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00387e/r3tmp/yandexjk9P8M.tmp 2026-01-07T17:44:07.763252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00387e/r3tmp/yandexjk9P8M.tmp 2026-01-07T17:44:07.763444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00387e/r3tmp/yandexjk9P8M.tmp 2026-01-07T17:44:07.763559Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:07.823578Z INFO: TTestServer started on Port 12141 GrpcPort 25953 PQClient connected to localhost:25953 2026-01-07T17:44:08.048114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:10.928116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676905300026279:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:10.928380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:10.929066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676905300026297:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:10.929145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:10.929146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676905300026299:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:10.929523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676905300026303:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:10.929584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:10.932861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:10.953824Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676905300026302:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2026-01-07T17:44:11.238497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:11.253107Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676909594993685:2981] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:11.270827Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592676904142479828:2308], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:11.273343Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=OTNmYWI1Y2YtN2NhOWQxNTgtMzY3MmEzMDgtNDRkZTljNTQ=, ActorId: [2:7592676904142479801:2302], ActorState: ExecuteState, LegacyTraceId: 01kecry6e5dk90rt6d9s6xsrt4, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:11.275823Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:44:11.315637Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676909594993743:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:11.316337Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTVhNzVlNzktNTlkYjlmYS1lMzBlYWMyMi1jNGQ4ZDhhYw==, ActorId: [1:7592676 ... 760574Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.760590Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:45:16.766861Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-07T17:45:16.776232Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:207: new Alter topic request 2026-01-07T17:45:16.778216Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:45:16.785042Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:16.785084Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.785102Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:16.785124Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.785139Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:45:16.787801Z node 9 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1767807916] updating configuration. Deleted partitions []. Added partitions [] 2026-01-07T17:45:16.788308Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:45:16.790603Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:45:16.797302Z node 10 :PERSQUEUE DEBUG: partition.cpp:1289: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1767807916840, TxId 281474976715678 2026-01-07T17:45:16.797345Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:16.797365Z node 10 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-07T17:45:16.797408Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:45:16.797422Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:16.797438Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:45:16.797448Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:45:16.797705Z node 10 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1767807916840, TxId 281474976715678 2026-01-07T17:45:16.797736Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:16.797752Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:45:16.797767Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:16.797796Z node 10 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-07T17:45:16.797959Z node 10 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976715678 2026-01-07T17:45:16.797977Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:45:16.797990Z node 10 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:45:16.798007Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:45:16.798193Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:45:16.800987Z node 10 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:45:16.801246Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:45:16.801353Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:45:16.801389Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:16.801407Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.801419Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:16.801435Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.801460Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:45:16.801492Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:45:16.801863Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1767807916" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1767807916" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1767807916" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 4 MonitoringProjectId: "" 2026-01-07T17:45:16.801911Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:45:16.802055Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:45:16.804433Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:45:16.815621Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-07T17:45:16.860731Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:16.860769Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.860785Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:16.860805Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.860820Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:45:16.888939Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:16.888974Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.888989Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:16.889007Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.889019Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:45:16.968566Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:16.968602Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.968618Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:16.968637Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.968653Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:45:16.990566Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:16.990598Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.990614Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:16.990631Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:16.990642Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:45:17.215816Z node 9 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [9:7592677189430188431:2487] TxId: 281474976715679. Ctx: { TraceId: 01kecs06r0eddg5dsk3wqnf5ad, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGVmODJhMWItZmNkYjM4ZGUtMzQ5YWM0ZTEtOWYxZmUzMjE=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 trace_id# 2026-01-07T17:45:17.216029Z node 9 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [9:7592677189430188438:2487], TxId: 281474976715679, task: 3. Ctx: { CheckpointId : . TraceId : 01kecs06r0eddg5dsk3wqnf5ad. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=9&id=OGVmODJhMWItZmNkYjM4ZGUtMzQ5YWM0ZTEtOWYxZmUzMjE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [9:7592677189430188431:2487], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |91.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] >> KqpQueryPerf::RangeRead-QueryService [GOOD] |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |91.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback |91.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4662, MsgBus: 14234 2026-01-07T17:45:07.015164Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677147175711619:2095];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:07.015719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:07.319734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:07.319851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:07.338584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:07.355774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:07.398872Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:07.466263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:07.466286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:07.466293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:07.466371Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:07.508438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:07.947191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:08.037307Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:08.046412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.299828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.603544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.688947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.578158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677160060615311:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.578293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.585068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677160060615321:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.585161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.903673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.934332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.966248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.010406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.041859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.074416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.101162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.140584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.209058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677164355583484:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.209121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.209204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677164355583489:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.209248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677164355583491:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.209291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.213143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:11.223100Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677164355583493:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:11.284707Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677164355583544:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:12.014961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677147175711619:2095];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:12.015039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15105, MsgBus: 10530 2026-01-07T17:45:14.328083Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677179963528249:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:14.328182Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:14.348266Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:14.419918Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:14.421233Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677179963528139:2081] 1767807914323629 != 1767807914323632 2026-01-07T17:45:14.451680Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:14.451769Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:14.463362Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:14.516766Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:14.516791Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:14.516801Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:14.516877Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:14.650982Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:14.950021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:14.965137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:14.984141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.035495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.212828Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.304057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.428596Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:17.745750Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677192848431905:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.745868Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.746224Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677192848431914:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.746295Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.825031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.860741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.890298Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.933637Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.978003Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.048356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.125778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.222503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.391605Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677197143400087:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.391683Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.391995Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677197143400092:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.392045Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677197143400093:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.392274Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.396279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:18.412134Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677197143400096:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:18.505912Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677197143400149:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:19.330848Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677179963528249:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:19.330937Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 >> BsControllerConfig::DeleteStoragePool [GOOD] >> BsControllerConfig::CommandRollbackWhenCombined ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1122, MsgBus: 16714 2026-01-07T17:45:14.038186Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677177925483933:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:14.038889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:14.288850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:14.322258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:14.322363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:14.365196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:14.380264Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:14.469957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:14.509103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:14.509130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:14.509136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:14.509216Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:14.988961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:15.024220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:15.041373Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:15.120541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.326768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.596181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.731996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.206872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677195105354828:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.206988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.209322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677195105354837:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.209404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.582949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.649838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.715114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.755375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.811824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.845957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.902667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.993201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.054251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677177925483933:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:19.054625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:19.130293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677199400323001:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:19.130377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:19.130673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677199400323006:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:19.130734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677199400323007:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:19.130854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:19.136187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:19.157263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:45:19.158081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677199400323010:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:19.241555Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677199400323063:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 8346, MsgBus: 28683 2026-01-07T17:45:06.870935Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677142466019655:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:06.871056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:07.123692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:07.123958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:07.124215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:07.158208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:07.233737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:07.302679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:07.302704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:07.302709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:07.302786Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:07.386633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:07.713131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:07.724045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:07.809568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:07.880950Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:07.968076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.134521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.208281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.110886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159645890592:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.111015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.111329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159645890602:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.111367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.450169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.498171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.534526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.569623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.610019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.659166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.706432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.760588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.838595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159645891472:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.838713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.838934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159645891477:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.838960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677159645891478:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.838978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.842150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:10.853303Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677159645891481:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:10.932237Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677159645891534:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:11.870575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677142466019655:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:11.870646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30659, MsgBus: 6857 2026-01-07T17:45:14.061297Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677178521125830:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:14.061349Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:14.098512Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:14.183524Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:14.193964Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677178521125806:2081] 1767807914060246 != 1767807914060249 2026-01-07T17:45:14.221411Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:14.221495Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:14.225035Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:14.355489Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:14.355523Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:14.355532Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:14.355650Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:14.369755Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:14.728376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:14.756310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.834035Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.019588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.081786Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:15.120573Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.770251Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677191406029573:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.770385Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.774017Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677191406029583:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.774140Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.866732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.907237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.984466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.082709Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.160920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.255992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.354663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.431471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.613789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677195700997755:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.613892Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.614343Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677195700997760:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.614402Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677195700997761:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.614683Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:18.619041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:18.651870Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677195700997764:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:45:18.732992Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677195700997817:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:19.061155Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677178521125830:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:19.061215Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 >> BsControllerConfig::CommandRollbackWhenCombined [GOOD] |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler >> ReadSessionImplTest::CommonHandler [GOOD] >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenCombined [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2930:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2930:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2930:2117] 2026-01-07T17:45:05.820375Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:05.821790Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:05.822215Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:05.824269Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:05.825160Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:05.825506Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:05.825539Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:05.825861Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:05.835220Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:05.835354Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:05.835528Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:05.835673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:05.835782Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:05.835856Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-07T17:45:05.847830Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:05.848014Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:05.894605Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:05.894779Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:05.894858Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:05.894956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:05.895091Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:05.895166Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:05.895200Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:05.895255Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:05.906072Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:05.906259Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:05.917197Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:05.917365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:05.918761Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:05.918808Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:05.919039Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:05.919089Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:05.938551Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } } 2026-01-07T17:45:05.939983Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-07T17:45:05.940069Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-07T17:45:05.940093Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-07T17:45:05.940116Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-07T17:45:05.940138Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-07T17:45:05.940164Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-07T17:45:05.940186Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-07T17:45:05.940225Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-07T17:45:05.940248Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-07T17:45:05.940268Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-07T17:45:05.940297Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-07T17:45:05.940324Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-07T17:45:05.940350Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-07T17:45:05.940370Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-07T17:45:05.940392Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-07T17:45:05.940431Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-07T17:45:05.940454Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-07T17:45:05.940479Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-07T17:45:05.940500Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-07T17:45:05.940520Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-07T17:45:05.940545Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-07T17:45:05.940597Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk ... UG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:24.250682Z node 101 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:24.250988Z node 101 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:24.251046Z node 101 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:24.251250Z node 101 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:24.270380Z node 101 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:24.270545Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:24.270667Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:24.270786Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:24.270878Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:24.270971Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [101:28:2070] sender: [101:49:2057] recipient: [101:11:2058] 2026-01-07T17:45:24.285621Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:24.285796Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:24.309512Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:24.309687Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:24.309834Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:24.309921Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:24.310049Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:24.310116Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:24.310166Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:24.310218Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:24.321553Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:24.321723Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:24.332951Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:24.333147Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:24.334678Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:24.334738Z node 101 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:24.334975Z node 101 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:24.335036Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:24.335798Z node 101 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" } } Command { ReadHostConfig { } } Rollback: true } Status { Success: true } Status { Success: true } Status { Success: true HostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" ItemConfigGeneration: 1 } } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-07T17:45:24.336791Z node 101 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } } Status { Success: true } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [102:26:2057] recipient: [102:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [102:26:2057] recipient: [102:23:2068] Leader for TabletID 72057594037932033 is [102:28:2070] sender: [102:29:2057] recipient: [102:23:2068] 2026-01-07T17:45:24.618401Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:24.619438Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:24.619774Z node 102 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:24.621695Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:24.621938Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:24.622107Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:24.622139Z node 102 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:24.622365Z node 102 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:24.633321Z node 102 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:24.633494Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:24.633620Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:24.633765Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:24.633862Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:24.633967Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [102:28:2070] sender: [102:49:2057] recipient: [102:11:2058] 2026-01-07T17:45:24.645550Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:24.645698Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:24.677547Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:24.677688Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:24.677781Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:24.677863Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:24.677974Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:24.678019Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:24.678066Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:24.678144Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:24.689575Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:24.689722Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:24.700652Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:24.700812Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:24.702411Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:24.702480Z node 102 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:24.702743Z node 102 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:24.702816Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:24.703577Z node 102 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" } } Command { ReadHostConfig { } } Rollback: true } Status { Success: true } Status { Success: true } Status { Success: true HostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" ItemConfigGeneration: 1 } } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-07T17:45:24.704752Z node 102 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } } Status { Success: true } Success: true ConfigTxSeqNo: 1 |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> Other::TraceInvalidTokenForbidden [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2026-01-07T17:44:42.870225Z :SettingsValidation INFO: Random seed for debugging is 1767807882870194 2026-01-07T17:44:43.119370Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677045931057331:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:43.119466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:43.157459Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:44:43.161890Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677046677625475:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:43.162087Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:43.179163Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:44:43.351347Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:43.387033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:43.409022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:43.409135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:43.412242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:43.412315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:43.422310Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:44:43.422488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:43.424865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:43.539219Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:43.587318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:43.595551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003875/r3tmp/yandexUYXkPZ.tmp 2026-01-07T17:44:43.595578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003875/r3tmp/yandexUYXkPZ.tmp 2026-01-07T17:44:43.595760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003875/r3tmp/yandexUYXkPZ.tmp 2026-01-07T17:44:43.595857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:43.630878Z INFO: TTestServer started on Port 24717 GrpcPort 8341 2026-01-07T17:44:43.635782Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:8341 2026-01-07T17:44:43.797143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:44.135086Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:44.171424Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:46.259261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677058815960530:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:46.259376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677058815960522:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:46.259513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:46.260901Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677058815960537:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:46.261084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:46.262549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:46.279850Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677058815960536:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-07T17:44:46.595589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:46.596962Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677058815960628:2977] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:46.611959Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592677059562527623:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:46.612317Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NWNmOWRhNzUtYTVlMTNhNmQtYTNmYzBmODAtNjQ2MDgxYjg=, ActorId: [2:7592677059562527597:2301], ActorState: ExecuteState, LegacyTraceId: 01kecrz92n9ftmar0ygexca3rt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:46.613928Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592677058815960678:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:46.614199Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWZkNjU0MGEtMzE5YjI1NzQtY2EyMDc5YzQtMjRmNTVkODg=, ActorId: [1:7592677058815960520:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrz8xh22aqtv84v28sfmqb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:46.620547Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:44:46.620547Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:44:46.730213Z n ... 28_v1 is DEAD 2026-01-07T17:45:21.978985Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [5:7592677202358649860:2493] disconnected. 2026-01-07T17:45:21.979007Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [5:7592677202358649860:2493] disconnected; active server actors: 1 2026-01-07T17:45:21.979023Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [5:7592677202358649860:2493] client user disconnected session shared/user_5_1_16303090275643336828_v1 2026-01-07T17:45:21.981203Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_5_1_16303090275643336828_v1 2026-01-07T17:45:21.981241Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [5:7592677202358649863:2496] destroyed 2026-01-07T17:45:21.981284Z node 6 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_16303090275643336828_v1 2026-01-07T17:45:22.025361Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:22.025397Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:22.025413Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:22.025435Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:22.025449Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:45:22.133120Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:22.133156Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:22.133170Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:22.133189Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:22.133202Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:45:22.302764Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [5:7592677210948584624:2520] TxId: 281474976715683. Ctx: { TraceId: 01kecs0bjwfv9mq2mjp3nyt4xh, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzNmNjhiMTctNjEyNzkyZDYtNmNkY2U2YzAtYjRlMTkzYmQ=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 6 trace_id# 2026-01-07T17:45:22.302925Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592677210948584628:2520], TxId: 281474976715683, task: 3. Ctx: { TraceId : 01kecs0bjwfv9mq2mjp3nyt4xh. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MzNmNjhiMTctNjEyNzkyZDYtNmNkY2U2YzAtYjRlMTkzYmQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592677210948584624:2520], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2026-01-07T17:45:23.642584Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.642617Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.642681Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:45:23.643011Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:45:23.643546Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:45:23.643828Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.644169Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2026-01-07T17:45:23.646389Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.646424Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.646454Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:45:23.646733Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:45:23.647277Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:45:23.647406Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.647658Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:45:23.648784Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:45:23.660604Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2026-01-07T17:45:23.660796Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2026-01-07T17:45:23.661037Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:45:23.661106Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-07T17:45:23.661141Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-07T17:45:23.661201Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-07T17:45:23.665044Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.665074Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.665110Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:45:23.665391Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:45:23.666037Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:45:23.666240Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.666634Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:45:23.667456Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.667700Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:45:23.667872Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:45:23.667948Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:45:23.668065Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2026-01-07T17:45:23.669831Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.669861Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.669905Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:45:23.670324Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:45:23.670778Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:45:23.670962Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:23.671362Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:45:23.672075Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:45:23.672583Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:45:23.672785Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-07T17:45:23.672891Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:45:23.672976Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:45:23.673039Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-07T17:45:23.673191Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:45:23.673233Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:45:25.794197Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:25.794246Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:25.794288Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:45:25.803968Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:45:25.807179Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:45:25.808333Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:25.809666Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:45:25.809897Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:45:25.809987Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:45:25.810096Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |91.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19249, MsgBus: 20769 2026-01-07T17:45:17.972059Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677189741880754:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:17.972358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:18.592169Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:18.702474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:18.712980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:18.752068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:18.777585Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677189741880542:2081] 1767807917947578 != 1767807917947581 2026-01-07T17:45:18.787055Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:18.840251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:18.938837Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:18.989630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:18.989651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:18.989657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:18.989721Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:19.474664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:19.480813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:19.527260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.675163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.868538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.963753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.259829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677211216718900:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.259956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.265076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677211216718910:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.265167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.649577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.702670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.739790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.780143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.814421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.872846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.904452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.942934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.973843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677189741880754:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:22.973909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:23.021179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677215511687079:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.021272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.021468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677215511687084:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.021526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677215511687085:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.021579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.025312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:23.035426Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677215511687088:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:23.124632Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677215511687139:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26194, MsgBus: 6641 2026-01-07T17:45:17.586742Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677190221680718:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:17.587263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:17.587691Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.005092s 2026-01-07T17:45:17.890488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:17.890612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:17.957628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:17.975445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:18.010133Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:18.029048Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677190221680647:2081] 1767807917485775 != 1767807917485778 2026-01-07T17:45:18.337228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:18.449459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:18.449482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:18.449498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:18.449575Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:18.604419Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:19.346922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:19.356338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:19.441651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.716143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.953479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:20.111888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.434193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677211696519013:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.434357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.437649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677211696519023:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.437766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.583334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677190221680718:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:22.583439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:22.898808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.933467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.969067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.002304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.027482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.059965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.105324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.166725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.262262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677215991487191:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.262379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.262623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677215991487197:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.262927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677215991487196:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.262972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.267975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:23.285379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677215991487200:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:23.349307Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677215991487251:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> Other::TraceNoValidGroupForbidden [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceInvalidTokenForbidden [GOOD] Test command err: 2026-01-07T17:45:19.075672Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677200958236135:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:19.076635Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:19.153608Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:45:19.539248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:19.539385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:19.587367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:19.922802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:19.941020Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677200958236000:2081] 1767807919049538 != 1767807919049541 2026-01-07T17:45:19.981300Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:20.105162Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:20.275119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:20.281632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:20.281656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:20.281667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:20.281742Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:20.772283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:20.891086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:20.941026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:45:20.952671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 >> ActorPage::InvalidTokenForbidden [GOOD] >> ActorPage::NoUseAuthOk |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] |91.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> JsonChangeRecord::Heartbeat |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |91.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams >> JsonChangeRecord::Heartbeat [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] |91.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> JsonChangeRecord::DataChangeVersion [GOOD] |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceNoValidGroupForbidden [GOOD] Test command err: 2026-01-07T17:45:19.858764Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677199602787210:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:19.858905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:20.460504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:20.520561Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677199602787057:2081] 1767807919811233 != 1767807919811236 2026-01-07T17:45:20.532630Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:20.543832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:20.543934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:20.575160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:20.784086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:20.889231Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:20.949347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:20.949374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:20.949381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:20.949471Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:21.381333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:21.391183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:21.458431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:21.484921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7397, MsgBus: 11425 2026-01-07T17:45:12.219797Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677168217252900:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:12.219861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:12.513065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:12.515100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:12.515203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:12.520986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:12.591530Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:12.612221Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677168217252778:2081] 1767807912181220 != 1767807912181223 2026-01-07T17:45:12.751381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:12.825550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:12.825576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:12.825586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:12.825678Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:13.227337Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:13.438698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:13.507265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.732600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.923749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:14.004820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:16.419244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677185397123842:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:16.419362Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:16.421300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677185397123852:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:16.421391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:16.882029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:16.929702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:16.980350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.057926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.109237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.199678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.229711Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677168217252900:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:17.229788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:17.281689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.362680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:17.509344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677189692092035:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.509443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.509733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677189692092041:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.509971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677189692092040:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.510019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:17.513828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:17.530208Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677189692092044:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:17.617944Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677189692092096:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10514, MsgBus: 21077 2026-01-07T17:45:21.430468Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677209810168424:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:21.459158Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:21.569012Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:21.571134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:21.571228Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:21.572004Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:21.612909Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:21.760470Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:21.771835Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:21.771860Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:21.771868Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:21.771943Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:22.426227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:22.442756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.501618Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:22.544711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.787681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.870151Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:25.758116Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677226990039436:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:25.758197Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:25.758642Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677226990039446:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:25.758690Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:25.895086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:25.935848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:25.989194Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.072694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.160238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.232501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.347977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.418260Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.473697Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677209810168424:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:26.476672Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:26.561533Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677231285007619:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:26.561667Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:26.562259Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677231285007624:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:26.562320Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677231285007625:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:26.562484Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:26.567592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:26.582419Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677231285007628:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:26.660571Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677231285007680:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.8%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:27.131733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:27.203206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:27.214915Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:27.215243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:27.215282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:27.432754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:27.432846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:27.480346Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807804996378 != 1767807804996382 2026-01-07T17:43:27.490368Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:27.530898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:27.620330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:28.056558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:28.057465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:28.057497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:28.057536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:28.057677Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:28.070212Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-07T17:43:39.780781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:39.780974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:39.781085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:39.782617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:39.782747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:39.788049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:39.813844Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2826], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-07T17:43:39.877443Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2860] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:40.103153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:41.006533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:41.542636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:42.547568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:43.344790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:43.884162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:45.085705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:45.433670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-07T17:43:50.397944Z node 1 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrx7zvdv3qt5vkt14b14x0", SessionId: ydb://session/3?node_id=1&id=NmMzNDAwNDItMmJlZTY4YzUtMTNjYzUwODAtYTM0ZThiMzI=, Slow query, duration: 10.620161s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n ", parameters: 0b REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2026-01-07T17:43:51.260252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:43:51.260326Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-07T17:44:01.933409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-07T17:44:03.209150Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager. ... ading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:16.302414Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:16.302788Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-07T17:45:16.302819Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-01-07T17:45:16.302843Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2026-01-07T17:45:16.302868Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2026-01-07T17:45:16.302902Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2026-01-07T17:45:16.302928Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:16.302961Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:16.303742Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3142:4504];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-07T17:45:16.303865Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3150:4506];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-07T17:45:16.303936Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3157:4512];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2026-01-07T17:45:27.515498Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:27.515605Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:27.515645Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:27.515847Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:27.515952Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:27.516012Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2026-01-07T17:45:27.516074Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.516122Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.516193Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:27.516248Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:27.516277Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2026-01-07T17:45:27.516305Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.516342Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.516398Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:27.516438Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:27.516466Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2026-01-07T17:45:27.516501Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.516535Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.516573Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:27.516759Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:27.517129Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:27.517226Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:27.517258Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-07T17:45:27.517294Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.517325Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.517368Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:27.517559Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:27.517588Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-07T17:45:27.517618Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.517655Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:27.517809Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:27.517842Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-07T17:45:27.517877Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.517905Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:27.517943Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:27.518173Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3142:4504];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-07T17:45:27.518278Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3150:4506];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-07T17:45:27.518405Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3157:4512];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64543, MsgBus: 3030 2026-01-07T17:45:07.046380Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677150321064304:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:07.046484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:07.284966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:07.363179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:07.363332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:07.371708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:07.373095Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677150321064278:2081] 1767807907045657 != 1767807907045660 2026-01-07T17:45:07.397063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:07.469324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:07.469349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:07.469364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:07.469455Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:07.471463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:07.875438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:07.893195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:45:07.943649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.063850Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:08.122213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.299587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.385185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.274393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677163205968033:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.274506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.274988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677163205968043:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.275046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.662948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.699092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.735619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.775398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.814185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.849234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.880922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.931189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.023724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677167500936206:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.023842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.024143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677167500936211:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.024178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677167500936212:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.024298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.027796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:11.038097Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677167500936215:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:45:11.124046Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677167500936268:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:12.049110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677150321064304:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:12.049174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:12.895884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 2026-01-07T17:45:16.991192Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:17.027409Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:17.027497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:17.066187Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:17.293046Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:17.345636Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:17.345663Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:17.345674Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:17.345773Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:17.901093Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:18.051456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:18.061623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:18.080556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.214328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.559152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.646267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.505097Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677207405105381:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:21.505206Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:21.509588Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677207405105391:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:21.509704Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:21.605089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.688675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.732934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.771500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.834464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.877057Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677185930267102:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:21.877191Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:21.902662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.961515Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.039916Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.199083Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677211700073558:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.199169Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.199351Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677211700073563:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.199373Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677211700073564:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.199546Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.203474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:22.221622Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677211700073567:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:22.322953Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677211700073618:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:24.085413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.180960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.257598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> JsonChangeRecord::DataChange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7056, MsgBus: 20446 2026-01-07T17:45:06.900362Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677145581466422:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:06.900432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:07.124070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:07.165100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:07.165228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:07.203021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:07.231844Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:07.233815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677145581466394:2081] 1767807906899104 != 1767807906899107 2026-01-07T17:45:07.297265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:07.297294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:07.297307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:07.297430Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:07.318938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:07.674002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:07.771994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:07.909761Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:07.919664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.109830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:08.195798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.255568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677162761337464:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.255756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.256181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677162761337474:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.256232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:10.607700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.690647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.731972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.767671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.805359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.869140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.906453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.955169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.028141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677167056305650:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.028211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.028617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677167056305655:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.028627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677167056305656:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.028674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.032902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:11.045672Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677167056305659:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:11.120711Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677167056305710:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:11.900745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677145581466422:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:11.900819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:13.295361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... e 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:17.454120Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:17.454201Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:17.454313Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:17.455424Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677190824947293:2081] 1767807917326614 != 1767807917326617 2026-01-07T17:45:17.487322Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:17.602281Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:17.685464Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:17.685491Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:17.685499Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:17.685574Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:18.242163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:18.251622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:18.267080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.354486Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:18.367565Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.604006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:18.732705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.598134Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677208004818351:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:21.598235Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:21.598535Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677208004818361:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:21.598575Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:21.662964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.699764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.741965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.805324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.852950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.954960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.030248Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.111373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.209818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677212299786533:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.209912Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.210264Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677212299786538:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.210325Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677212299786539:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.210428Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.213944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:22.225184Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677212299786542:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:22.327291Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677212299786593:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:24.311902Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.407040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.492084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |91.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25336, MsgBus: 15828 2026-01-07T17:45:08.444940Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677152626381982:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:08.445664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:08.475293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:45:08.719635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:08.719792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:08.755113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:08.796688Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:08.797080Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:08.821100Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677152626381769:2081] 1767807908418826 != 1767807908418829 2026-01-07T17:45:08.937686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:08.937705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:08.937712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:08.937799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:09.078645Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:09.360053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:09.435302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.445259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:09.638542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.801758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.880644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.833507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677165511285527:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.833619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.833982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677165511285537:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.834040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.212582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.253255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.285319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.314374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.358324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.405979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.448433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.496437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.585740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169806253712:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.585843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.585931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169806253717:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.586243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169806253719:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.586314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.590025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:12.603259Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677169806253720:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:12.674987Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677169806253772:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:13.445045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677152626381982:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:13.446781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:14.607659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, bu ... 2026-01-07T17:45:19.006161Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:19.041479Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:19.041564Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:19.045625Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:19.253050Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:19.281617Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:19.281642Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:19.281654Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:19.281739Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:19.909541Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:20.012783Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:20.029618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:20.035363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:20.135141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:20.348042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:20.466057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.848454Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677214866118787:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.848544Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.848989Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677214866118797:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.849035Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.969823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.010944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.048274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.101316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.144234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.193567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.255574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.311083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.395175Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677219161086967:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.395278Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.395366Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677219161086972:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.395414Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677219161086974:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.395487Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.400147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:23.411921Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677219161086976:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:23.480896Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677219161087027:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:23.901760Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677197686247772:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:23.901845Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:25.844798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:25.953323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.068465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11039, MsgBus: 17556 2026-01-07T17:45:08.303736Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677152541379246:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:08.305206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:08.585171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:08.585369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:08.604238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:08.623001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:08.664484Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677152541379061:2081] 1767807908278860 != 1767807908278863 2026-01-07T17:45:08.673279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:08.749322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:08.749359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:08.749372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:08.749468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:08.899156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:09.140797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:09.147766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:09.195736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.313621Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:09.369750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.548474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:09.636115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.478384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677165426282820:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.478522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.481443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677165426282830:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.481561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:11.840507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.872339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.904695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.937015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:11.970109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.005485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.070028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.138979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.233476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169721250996:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.233545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.233702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169721251002:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.233715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677169721251001:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.233732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.236745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:12.246877Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677169721251005:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:12.363946Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677169721251058:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:13.305665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677152541379246:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:13.305962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:14.222346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... .cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:18.173166Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677189319193873:2081] 1767807917993346 != 1767807917993349 2026-01-07T17:45:18.188464Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:18.201848Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:18.341044Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:18.421813Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:18.421839Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:18.421847Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:18.421940Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:19.002520Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:19.335510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:19.349553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:19.370863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.487106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.742892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:19.846776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.815641Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677210794032212:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.815747Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.816094Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677210794032221:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.816144Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:22.897194Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.949718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.987274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:22.997568Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677189319193900:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:22.997636Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:23.039429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.081957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.146075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.202911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.268728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.355826Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677215089000392:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.355923Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.356136Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677215089000397:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.356138Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677215089000398:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.356179Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.359421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:23.370442Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677215089000401:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:23.469873Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677215089000452:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:25.971758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.059269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:26.167250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes [GOOD] >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11094, MsgBus: 30032 2026-01-07T17:45:09.096343Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677154921921781:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:09.096486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:09.410355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:09.482845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:09.482953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:09.484576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:09.547148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:09.657101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:09.657126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:09.657131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:09.657208Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:09.705015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:10.093237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:10.095124Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:10.139435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.272251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.441379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:10.508271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.478994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677167806825356:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.479112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.479429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677167806825366:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.479502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:12.854812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.902974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.936955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:12.975259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.013411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.047625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.101433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.149886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:13.234358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677172101793534:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.234455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.235070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677172101793540:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.235358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677172101793539:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.235390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:13.242231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:13.259098Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677172101793543:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:13.351360Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677172101793594:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:14.094299Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677154921921781:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:14.094391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:15.287704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:15.362538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOp ... 2026-01-07T17:45:19.864877Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:19.894712Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:19.894795Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:19.913540Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:20.045586Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:20.045611Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:20.045621Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:20.045708Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:20.078973Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:20.595875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:20.609597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:45:20.632597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:20.705848Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:20.752339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:20.977220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.061675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.762387Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677215804281766:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.762494Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.762775Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677215804281775:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.762829Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.834536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.927348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.976997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.018468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.065841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.128918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.176699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.271677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.397224Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677220099249940:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.397304Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.397491Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677220099249945:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.397521Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.397531Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677220099249946:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.403545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:24.458617Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677220099249949:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:45:24.564204Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677220099250000:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:24.706256Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677198624410756:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:24.706367Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:27.327933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:27.411746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:27.460452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] >> ColumnShardTiers::DSConfigsStub [GOOD] |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |92.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::EqWidthHistogram 2026-01-07 17:45:18,301 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-07 17:45:18,703 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 162197 58.6M 58.3M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/ec9d/0017af/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args 162497 1.7G 1.6G 1.6G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/ec9d/0017af/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/c Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:35:22.638382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:22.715328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:22.722153Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:35:22.722492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:35:22.722613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:35:23.085388Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:23.176222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:23.176326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:23.211353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:23.303590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:35:24.102000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:35:24.103052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:35:24.103102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:35:24.103132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:35:24.103580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:35:24.158486Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:35:24.786396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:35:27.966636Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:35:27.980077Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:35:27.984749Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:35:28.037593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:28.037730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:28.084948Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:35:28.088436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:28.348211Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:35:28.348330Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:35:28.350061Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.351033Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.351608Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.352562Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.353164Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.353340Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.353576Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.353755Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.353967Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:35:28.376224Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:35:28.605366Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:35:28.707575Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:35:28.707688Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:35:28.746286Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:35:28.747808Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:35:28.748048Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:35:28.748123Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:35:28.748190Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:35:28.748245Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:35:28.748303Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:35:28.748361Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:35:28.749252Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:35:28.767441Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:28.767593Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:35:28.805027Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:35:28.805914Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:35:28.886847Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:35:28.888655Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:35:28.905939Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:35:28.906009Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:35:28.906099Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:35:28.914237Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:35:28.918520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:35:28.927369Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:35:28.927537Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:35:28.943972Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:35:29.165360Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:35:29.185325Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:35:29.565173Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:35:29.692308Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:35:29.692414Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:35:30.593516Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 94046644480 2026-01-07T17:36:03.244766Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:36:03.244868Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:36:03.245275Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:36:03.258674Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:36:03.401523Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:36:03.404859Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4479:4025], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:36:03.407924Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4479:4025], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1` 2026-01-07T17:36:03.408060Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4479:4025], Start read next stream part 2026-01-07T17:36:03.429254Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4492:4031], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.429384Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4503:4036], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.429548Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.431209Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4506:4039], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.431522Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:36:03.462766Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4524:4045], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:36:03.466054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:36:03.528896Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4507:4040], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-07T17:36:03.700830Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4586:4087], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:36:03.735648Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4585:4086] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:36:04.215325Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4607:4100]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:36:04.215567Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:36:04.215634Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4609:4102] 2026-01-07T17:36:04.215688Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4609:4102] 2026-01-07T17:36:04.215933Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4610:4103] 2026-01-07T17:36:04.216072Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4609:4102], server id = [2:4610:4103], tablet id = 72075186224037894, status = OK 2026-01-07T17:36:04.216122Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4610:4103], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:36:04.216175Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:36:04.216268Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:36:04.216342Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4607:4100], StatRequests.size() = 1 2026-01-07T17:36:04.216407Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:40:22.695013Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrfaag10c8a4j7fweymhtn", SessionId: ydb://session/3?node_id=2&id=M2EyODQ1MDUtNmI5ZjljZWUtNzBiM2U0ZjktZDQ3NTFjMGU=, Slow query, duration: 259.270224s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-07T17:40:22.705591Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4479:4025], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:40:22.713713Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4479:4025], Start read next stream part 2026-01-07T17:40:22.722737Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32120, txId: 18446744073709551615] shutting down 2026-01-07T17:40:22.723418Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4747:4227], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:40:22.728457Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4479:4025], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:40:22.728584Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4479:4025], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTA4MmRjM2EtODViNTBiODYtNmQ4YjA1NjItZDExMTg0MDU=, TxId: 2026-01-07T17:40:22.737360Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4747:4227], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-07T17:40:22.737549Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4469:2463], ActorId: [2:4747:4227], Start read next stream part 2026-01-07T17:40:23.128784Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4773:4249]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:40:23.129213Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:40:23.129264Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4773:4249], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/0017af/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/0017af/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:44.758714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:44.875666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:44.892869Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:44.893433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:44.893494Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:45.183259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:45.183389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:45.264216Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807821671687 != 1767807821671691 2026-01-07T17:43:45.280809Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:45.338312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:45.447679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:46.200061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:46.202568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:46.202628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:46.202662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:46.202914Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:46.217278Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:46.384860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-07T17:43:46.553494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:43:46.553812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:43:46.554104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:43:46.554282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:43:46.554417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:43:46.554516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:43:46.554613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:43:46.554742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:43:46.554844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:43:46.554977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:43:46.555100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:43:46.555194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:43:46.555318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:43:46.612795Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:140;event=start_subscribing_metadata; 2026-01-07T17:43:46.620417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:43:46.620557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:43:46.620687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:43:46.620729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:43:46.621451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:43:46.621512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:43:46.621645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:43:46.621696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:43:46.621769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:43:46.621826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:43:46.621878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:43:46.621922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:43:46.622147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:43:46.622203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:43:46.622358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:43:46.622410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:43:46.622481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:43:46.622524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:43:46.622583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:43:46.622619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:43:46.622775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=720 ... 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:19.752675Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:19.752716Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2026-01-07T17:45:19.752749Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:19.752778Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2026-01-07T17:45:19.752810Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2026-01-07T17:45:19.752850Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037888 2026-01-07T17:45:19.752897Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:19.752950Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:19.752979Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2026-01-07T17:45:19.753008Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:19.753033Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2026-01-07T17:45:19.753060Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2026-01-07T17:45:19.753094Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037889 2026-01-07T17:45:19.753131Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:19.753267Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:19.753297Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2026-01-07T17:45:19.753328Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:19.753353Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2026-01-07T17:45:19.753380Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2026-01-07T17:45:19.753414Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037890 2026-01-07T17:45:19.753452Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:19.754347Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-07T17:45:19.754451Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:955:2809];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-07T17:45:19.754526Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:959:2813];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2026-01-07T17:45:31.319773Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-07T17:45:31.319867Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-07T17:45:31.319893Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-07T17:45:31.319941Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-07T17:45:31.320081Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-07T17:45:31.320141Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-01-07T17:45:31.320194Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:31.320245Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:31.320318Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:31.320388Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-07T17:45:31.320421Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2026-01-07T17:45:31.320451Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:31.320475Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:31.320522Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:31.320553Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-07T17:45:31.320587Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2026-01-07T17:45:31.320625Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:31.320650Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:31.320688Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:31.320723Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-07T17:45:31.320748Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2026-01-07T17:45:31.320775Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:31.320798Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:31.320832Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:31.321118Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-07T17:45:31.321243Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:955:2809];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-07T17:45:31.321349Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:959:2813];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 2026-01-07T17:45:32.338742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:45:32.338823Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] >> ColumnShardTiers::TTLUsage [GOOD] |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> ActorPage::NoUseAuthOk [GOOD] >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:34.841785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:34.841867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:34.841894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:34.841920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:34.841945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:34.841965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:34.842029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:34.842099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:34.842811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:34.843027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:34.928249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:34.928332Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:34.943594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:34.943924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:34.944174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:34.951012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:34.951385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:34.952083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:34.952339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:34.955182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:34.955377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:34.956664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:34.956727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:34.956852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:34.956908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:34.956978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:34.957184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:35.110891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.111924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.112958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.113032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t TxId: 102 at schemeshard: 72057594046678944 2026-01-07T17:45:36.314619Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:882:2837] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2026-01-07T17:45:36.315017Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:127:2151], Recipient [1:127:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:45:36.315060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:45:36.315126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:45:36.315182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-07T17:45:36.315219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:45:36.315260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-07T17:45:36.315310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-07T17:45:36.315391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-07T17:45:36.315433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-07T17:45:36.317364Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:36.317423Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2026-01-07T17:45:36.317557Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:127:2151], Recipient [1:127:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:45:36.317593Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:45:36.317651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:45:36.317710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:45:36.317805Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:45:36.317854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:45:36.317887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:36.317919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:45:36.317970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:36.318008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:45:36.318067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:784:2750] message: TxId: 102 2026-01-07T17:45:36.318116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:36.318156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:45:36.318188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:45:36.318333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-07T17:45:36.318384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:45:36.320289Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:36.320405Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:784:2750] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2026-01-07T17:45:36.320568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:45:36.320616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:911:2858] 2026-01-07T17:45:36.320832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:913:2860], Recipient [1:127:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:45:36.320866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:45:36.320891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-07T17:45:36.321254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:980:2925], Recipient [1:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-07T17:45:36.321304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:36.321388Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:36.321621Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy" took 181us result status StatusSuccess 2026-01-07T17:45:36.322100Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy" PathDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableCopy" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:36.322634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:981:2926], Recipient [1:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-07T17:45:36.322678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:36.322788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:36.322999Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex" took 173us result status StatusPathDoesNotExist 2026-01-07T17:45:36.323178Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/TableCopy/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/TableCopy\' (id: [OwnerId: 72057594046678944, LocalPathId: 41])" Path: "/MyRoot/TableCopy/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/TableCopy" LastExistedPrefixPathId: 41 LastExistedPrefixDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> test_sql_negative.py::test[watermarks-bad_column-default.txt] >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:34.987667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:34.987829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:34.987870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:34.987912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:34.987950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:34.987983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:34.988051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:34.988125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:34.989143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:34.989491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:35.066292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:35.066365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:35.083218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:35.083809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:35.084052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:35.089605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:35.090037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:35.090854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:35.091151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:35.094258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:35.094482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:35.095870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:35.095943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:35.096106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:35.096175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:35.096232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:35.096514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:35.235321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.236598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.236721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.236793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.236871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.236958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.237738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... s: true ShowPrivateTable: true } 2026-01-07T17:45:37.049495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:37.049631Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:37.049964Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex" took 346us result status StatusSuccess 2026-01-07T17:45:37.050984Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Index impl table name: indexImplTable 2026-01-07T17:45:37.051628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1073:3006], Recipient [1:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:45:37.051677Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:37.051799Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:37.052154Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" took 328us result status StatusSuccess 2026-01-07T17:45:37.053107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:35.133700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:35.133807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:35.133842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:35.133877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:35.133912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:35.133940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:35.133993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:35.134066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:35.134975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:35.135216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:35.223756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:35.223817Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:35.238991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:35.239299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:35.239476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:35.255883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:35.256389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:35.257173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:35.257451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:35.263507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:35.263734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:35.264977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:35.265048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:35.265171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:35.265229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:35.265291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:35.265502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:35.412747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.413914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.414923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.415010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:37.387344Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex" took 288us result status StatusSuccess 2026-01-07T17:45:37.388056Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:37.388643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1500:3323], Recipient [1:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:45:37.388712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:37.388829Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:37.389133Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" took 313us result status StatusSuccess 2026-01-07T17:45:37.389975Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "19700101000000Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 49 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:27.016884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:27.085293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:27.097630Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:27.097956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:27.097995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:27.294730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:27.294830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:27.341314Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807805044245 != 1767807805044249 2026-01-07T17:43:27.351354Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:27.391536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:27.481815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:27.860892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:27.861871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:27.861904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:27.861931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:27.862095Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:27.874686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:27.994281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-07T17:43:28.054255Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:28.055197Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:43:28.055492Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:43:28.075189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:43:28.075454Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2026-01-07T17:43:28.080793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:43:28.081040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:43:28.081221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:43:28.081290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:43:28.081350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:43:28.081407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:43:28.081482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:43:28.081566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:43:28.081635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:43:28.081721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:43:28.081782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:43:28.081858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:43:28.081934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:43:28.096944Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:43:28.097524Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:911:2777], Recipient [1:955:2809]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:43:28.098773Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 72075186224037888 2026-01-07T17:43:28.099137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:43:28.099198Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:43:28.099349Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:43:28.099491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:43:28.099540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:43:28.099569Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:43:28.099628Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:43:28.099670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:43:28.099696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:43:28.099713Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:43:28.099869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:43:28.099912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:43:28.099948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:43:28.099981Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:43:28.100055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:43:28.100093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Ex ... 2811]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.298083Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:957:2811];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-07T17:45:35.298158Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:959:2813]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.298192Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:959:2813];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP column0/uint64_value: 0 TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5027;count=35;size=14676;count=211;size=1515;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3740;count=21;size=6578;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5027;count=35;size=14746;count=212;size=1515;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3740;count=21;size=6578;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2026-01-07T17:45:35.481736Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:951:2806]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.481844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-07T17:45:35.481978Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:955:2809]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.482027Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:955:2809];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-07T17:45:35.482124Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:957:2811]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.482167Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:957:2811];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-07T17:45:35.482247Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:959:2813]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.482286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:959:2813];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP TEvBlobStorage::TEvPut tId=72057594037968897;c=0;:66/0:size=1466;count=9;size=5310;count=36;size=14746;count=212;size=1515;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3740;count=21;size=6578;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5310;count=36;size=14746;count=212;size=1515;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3740;count=21;size=6578;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5310;count=36;size=14816;count=213;size=1515;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3740;count=21;size=6578;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2026-01-07T17:45:35.675277Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:951:2806]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.675367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-07T17:45:35.675472Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:955:2809]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.675515Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:955:2809];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-07T17:45:35.675604Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:957:2811]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.675641Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:957:2811];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-07T17:45:35.675717Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:959:2813]: NActors::TEvents::TEvWakeup 2026-01-07T17:45:35.675773Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:959:2813];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP REQUEST=SELECT COUNT(*) FROM `/Root/olapStore/olapTable`;EXPECTATION=1 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::NoUseAuthOk [GOOD] Test command err: 2026-01-07T17:45:20.402179Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677202110496433:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:20.402257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:20.451359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:45:21.037051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:21.105943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:21.106051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:21.156489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:21.293130Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677202110496398:2081] 1767807920397662 != 1767807920397665 2026-01-07T17:45:21.321553Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:21.533142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:21.533715Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:21.565557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:21.565585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:21.565593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:21.565670Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:21.835280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:21.894642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:21.916201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:45:21.921011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:45:29.353940Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677243735521452:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:29.354319Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:29.373082Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:29.458559Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:29.469127Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677243735521418:2081] 1767807929350428 != 1767807929350431 2026-01-07T17:45:29.484085Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:29.484195Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:29.524532Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:29.601781Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:29.601815Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:29.601823Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:29.601932Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:29.613061Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:29.892319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:29.948316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:29.962588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:35.289288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:35.289400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:35.289438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:35.289477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:35.289520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:35.289552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:35.289636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:35.289713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:35.290710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:35.290999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:35.387041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:35.387107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:35.409991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:35.410433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:35.410666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:35.420748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:35.421304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:35.422071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:35.422359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:35.425583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:35.425799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:35.427181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:35.427254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:35.427399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:35.427463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:35.427525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:35.427790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:35.579451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.580565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.580715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.580805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.580878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:35.581797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 45 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:38.125836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1223:3133], Recipient [1:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:45:38.125922Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:38.126066Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:38.126415Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2" took 295us result status StatusSuccess 2026-01-07T17:45:38.127308Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2" PathDescription { Self { Name: "ValueIndex2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex2" LocalPathId: 46 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value2" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:38.127967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1224:3134], Recipient [1:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-07T17:45:38.128056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:38.128168Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:38.128417Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2/indexImplTable" took 241us result status StatusSuccess 2026-01-07T17:45:38.128857Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 47 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test |92.0%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |92.0%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |92.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage |92.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |92.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |92.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::Update [GOOD] |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |92.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::Update [GOOD] |92.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> TableWriter::Restore [GOOD] |92.2%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TableWriter::Backup [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] |92.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> TTxDataShardMiniKQL::ReadSpecialColumns >> TTxDataShardMiniKQL::ReadConstant >> ColumnShardTiers::DSConfigs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |92.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TTxDataShardMiniKQL::Write |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::CrossShard_5_AllToAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64772, MsgBus: 9225 2026-01-07T17:45:19.210421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677197906072465:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:19.210476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:19.935385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:19.949753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:19.949890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:20.098182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:20.123786Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:20.133124Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677197906072442:2081] 1767807919196922 != 1767807919196925 2026-01-07T17:45:20.407274Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:20.408397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:20.408405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:20.408414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:20.408483Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:20.417294Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:21.056573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:21.070957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:21.140327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.351114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.560733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:21.665647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:23.746720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677215085943502:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.746816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.748086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677215085943512:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:23.749107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.083438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.134503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.170663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.213166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677197906072465:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:24.213237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:24.240398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.335789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.408520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.496569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.621041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:24.874811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677219380911681:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.874889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.875322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677219380911686:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.875384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677219380911687:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.875632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:24.879130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:24.918962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677219380911690:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:45:25.029131Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677223675879037:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:28.467242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... ification cookie mismatch for subscription [2:7592677255445808047:2081] 1767807932054151 != 1767807932054154 2026-01-07T17:45:32.211824Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:32.211922Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:32.245467Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:32.350758Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:32.370724Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:32.370751Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:32.370760Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:32.370848Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:33.077205Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:33.114466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:33.121768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:45:33.135515Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:33.257931Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:33.425198Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:33.571687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:35.865071Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677268330711802:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:35.865179Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:35.873089Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677268330711812:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:35.873196Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:36.003709Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:36.050930Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:36.109016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:36.150057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:36.202629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:36.262995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:36.334886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:36.409406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:36.577116Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677272625679978:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:36.577266Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:36.577621Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677272625679983:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:36.577681Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677272625679984:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:36.577795Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:36.581954Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:36.604188Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677272625679987:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:45:36.694683Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677272625680038:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:45:37.073214Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677255445808266:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:37.073375Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:45:39.470074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:39.563681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:39.653761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TTxDataShardMiniKQL::WriteEraseRead >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> BsControllerConfig::MoveGroups [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> IncrementalBackup::IncrementalBackupMultipleIndexes [GOOD] >> IncrementalBackup::IndexDataVerificationIncrementalRestore >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 |92.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2930:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2930:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2930:2117] 2026-01-07T17:45:06.894411Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:06.895550Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:06.895956Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:06.897828Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:06.898713Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:06.899032Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:06.899061Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:06.899355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:06.909359Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:06.909485Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:06.909638Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:06.909740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:06.909838Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:06.909899Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-07T17:45:06.922503Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:06.922763Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:06.965642Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:06.965812Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:06.965902Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:06.965990Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:06.966128Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:06.966205Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:06.966242Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:06.966297Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:06.977291Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:06.977471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:06.988345Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:06.988502Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:06.989922Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:06.989977Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:06.990185Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:06.990237Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:07.006347Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2026-01-07T17:45:07.008020Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-07T17:45:07.008085Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-07T17:45:07.008116Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-07T17:45:07.008146Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-07T17:45:07.008172Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-07T17:45:07.008198Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-07T17:45:07.008220Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-07T17:45:07.008258Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-07T17:45:07.008285Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-07T17:45:07.008308Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-07T17:45:07.008339Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-07T17:45:07.008369Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-07T17:45:07.008417Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-07T17:45:07.008442Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-07T17:45:07.008465Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-07T17:45:07.008498Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-07T17:45:07.008522Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-07T17:45:07.008544Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-07T17:45:07.008565Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-07T17:45:07.008590Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-07T17:45:07.008610Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 8:1000 Path# /dev/disk1 2026-01-07T17:45:38.035220Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2026-01-07T17:45:38.035278Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2026-01-07T17:45:38.035326Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2026-01-07T17:45:38.035386Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2026-01-07T17:45:38.035435Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2026-01-07T17:45:38.035485Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2026-01-07T17:45:38.035540Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2026-01-07T17:45:38.035597Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2026-01-07T17:45:38.035659Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2026-01-07T17:45:38.035717Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2026-01-07T17:45:38.035801Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2026-01-07T17:45:38.035852Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2026-01-07T17:45:38.035897Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2026-01-07T17:45:38.035998Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2026-01-07T17:45:38.036057Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2026-01-07T17:45:38.036112Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2026-01-07T17:45:38.036167Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2026-01-07T17:45:38.036225Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2026-01-07T17:45:38.036279Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2026-01-07T17:45:38.036336Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2026-01-07T17:45:38.036390Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2026-01-07T17:45:38.036436Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2026-01-07T17:45:38.036465Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2026-01-07T17:45:38.036509Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2026-01-07T17:45:38.036555Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2026-01-07T17:45:38.036601Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2026-01-07T17:45:38.036645Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2026-01-07T17:45:38.036699Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2026-01-07T17:45:38.036747Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2026-01-07T17:45:38.036791Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2026-01-07T17:45:38.036848Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2026-01-07T17:45:38.036901Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2026-01-07T17:45:38.036963Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2026-01-07T17:45:38.037025Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2026-01-07T17:45:38.037073Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2026-01-07T17:45:38.037117Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2026-01-07T17:45:38.037161Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2026-01-07T17:45:38.037215Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2026-01-07T17:45:38.037280Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2026-01-07T17:45:38.037333Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2026-01-07T17:45:38.037379Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2026-01-07T17:45:38.037414Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2026-01-07T17:45:38.037455Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2026-01-07T17:45:38.037501Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2026-01-07T17:45:38.037551Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2026-01-07T17:45:38.037600Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2026-01-07T17:45:38.037648Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2026-01-07T17:45:38.037705Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2026-01-07T17:45:38.037750Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2026-01-07T17:45:38.037793Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2026-01-07T17:45:38.037838Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2026-01-07T17:45:38.037879Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2026-01-07T17:45:38.037923Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2026-01-07T17:45:38.037970Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2026-01-07T17:45:38.038017Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2026-01-07T17:45:38.038065Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2026-01-07T17:45:38.038120Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2026-01-07T17:45:38.038167Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2026-01-07T17:45:38.038223Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2026-01-07T17:45:38.038268Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2026-01-07T17:45:38.038312Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2026-01-07T17:45:38.038355Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2026-01-07T17:45:38.038403Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2026-01-07T17:45:38.038456Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2026-01-07T17:45:38.038498Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2026-01-07T17:45:38.038538Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2026-01-07T17:45:38.038582Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2026-01-07T17:45:38.038641Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2026-01-07T17:45:38.660895Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.640073s 2026-01-07T17:45:38.661127Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.640371s 2026-01-07T17:45:38.710432Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-07T17:45:38.817146Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2026-01-07T17:45:38.849411Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-07T17:45:38.958822Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2026-01-07T17:45:38.976844Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-07T17:45:39.092563Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2026-01-07T17:45:39.114653Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } |92.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:41.523574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:41.657556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:41.680766Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:41.681372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:41.681436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:42.035376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:42.035553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:42.114139Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807818900111 != 1767807818900115 2026-01-07T17:43:42.127416Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:42.178286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:42.304274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:42.979603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:42.981343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:42.981415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:42.981456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:42.981704Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:42.996652Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-07T17:43:55.211140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:958:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.211337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.211942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:985:2831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.212043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.215363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:55.364445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1077:2904], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.364590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.365084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1081:2908], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.365157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.365311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1084:2911], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:55.370125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:55.506710Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1086:2913], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:43:55.908263Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1178:2978] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:57.996515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:58.692726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:59.367617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:00.131086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:00.563060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:01.648270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:01.943449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2026-01-07T17:44:06.840780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:44:06.840870Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-07T17:44:17.647217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", ... 1-07T17:45:32.147443Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:32.147480Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-07T17:45:32.147506Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2026-01-07T17:45:32.147530Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2026-01-07T17:45:32.147555Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2026-01-07T17:45:32.147586Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 72075186224037894 2026-01-07T17:45:32.147611Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:32.147649Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:32.147996Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3116:4477];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-07T17:45:32.148121Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3120:4479];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-07T17:45:32.148195Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3130:4487];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2026-01-07T17:45:43.406175Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:43.406266Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:43.406305Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:43.406542Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:43.406768Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:43.406826Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2026-01-07T17:45:43.406884Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.406924Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.406988Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:43.407028Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:43.407053Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2026-01-07T17:45:43.407080Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.407103Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.407135Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:43.407163Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:43.407185Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2026-01-07T17:45:43.407207Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.407227Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.407258Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:43.407338Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:43.407361Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-07T17:45:43.407385Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.407405Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.407433Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:43.407733Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:43.407948Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-07T17:45:43.408268Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:43.408299Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-07T17:45:43.408337Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.408385Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:43.408655Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-07T17:45:43.408679Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-07T17:45:43.408704Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.408728Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-07T17:45:43.408759Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-07T17:45:43.409151Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3116:4477];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-07T17:45:43.409262Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3120:4479];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-07T17:45:43.409457Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3130:4487];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |92.3%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2930:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2930:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2930:2117] 2026-01-07T17:45:05.583869Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:05.585081Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:05.585530Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T17:45:05.587617Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:05.588560Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T17:45:05.588922Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:05.589102Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T17:45:05.589439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T17:45:05.599232Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T17:45:05.599365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T17:45:05.599541Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T17:45:05.599667Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:05.599759Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T17:45:05.599830Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-07T17:45:05.611929Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T17:45:05.612085Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:05.656361Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T17:45:05.656533Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:05.656618Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T17:45:05.656699Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:05.656844Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T17:45:05.656909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:05.656986Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T17:45:05.657063Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:05.667926Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T17:45:05.668095Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:05.681607Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T17:45:05.681768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T17:45:05.683298Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T17:45:05.683351Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T17:45:05.683557Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T17:45:05.683628Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T17:45:05.701670Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2026-01-07T17:45:05.702564Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-07T17:45:05.702660Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-07T17:45:05.702686Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-07T17:45:05.702711Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-07T17:45:05.702739Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-07T17:45:05.702765Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-07T17:45:05.702788Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-07T17:45:05.702831Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-07T17:45:05.702859Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-07T17:45:05.702895Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-07T17:45:05.702939Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-07T17:45:05.702965Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-07T17:45:05.702989Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-07T17:45:05.703011Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-07T17:45:05.703036Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-07T17:45:05.703074Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-07T17:45:05.703110Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-07T17:45:05.703166Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-07T17:45:05.703192Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-07T17:45:05.703218Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-07T17:45:05.703255Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-07T17:45:05.703281Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2026-01-07T17:45:05.703306Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2026-01-07T17:45:05.703345Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2026-01-07T17:45:05.703370Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2026-01-07T17:45:05.703406Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2026-01-07T17:45:05.703431Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2026-01-07T17:45:05.703460Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2026-01-07T17:45:05.703487Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2026-01-07T17:45:05.703528Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2026-01-07T17:45:05.703563Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2026-01-07T17:45:05.703596Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2026-01-07T17:45:05.703620Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Cr ... R NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2026-01-07T17:45:37.881970Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2026-01-07T17:45:37.881999Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2026-01-07T17:45:37.882042Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2026-01-07T17:45:37.882071Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2026-01-07T17:45:37.882101Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2026-01-07T17:45:37.882130Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2026-01-07T17:45:37.882159Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2026-01-07T17:45:37.882189Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2026-01-07T17:45:37.882217Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2026-01-07T17:45:37.882249Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2026-01-07T17:45:37.882278Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2026-01-07T17:45:37.882309Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2026-01-07T17:45:37.882339Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2026-01-07T17:45:37.882371Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2026-01-07T17:45:37.882402Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2026-01-07T17:45:37.882431Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2026-01-07T17:45:37.882462Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2026-01-07T17:45:37.882491Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2026-01-07T17:45:37.882522Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2026-01-07T17:45:37.882553Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2026-01-07T17:45:38.266417Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.391223s 2026-01-07T17:45:38.266646Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.391487s 2026-01-07T17:45:38.339772Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2026-01-07T17:45:38.342060Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2026-01-07T17:45:38.342142Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2026-01-07T17:45:38.342195Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2026-01-07T17:45:38.342230Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2026-01-07T17:45:38.342260Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2026-01-07T17:45:38.342285Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2026-01-07T17:45:38.342326Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2026-01-07T17:45:38.342354Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2026-01-07T17:45:38.342381Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2026-01-07T17:45:38.342404Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2026-01-07T17:45:38.342431Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2026-01-07T17:45:38.342457Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2026-01-07T17:45:38.342497Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2026-01-07T17:45:38.342526Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2026-01-07T17:45:38.342567Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2026-01-07T17:45:38.342610Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2026-01-07T17:45:38.342641Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2026-01-07T17:45:38.342667Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2026-01-07T17:45:38.342695Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2026-01-07T17:45:38.342722Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2026-01-07T17:45:38.342750Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2026-01-07T17:45:38.342775Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2026-01-07T17:45:38.342802Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2026-01-07T17:45:38.342828Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2026-01-07T17:45:38.342855Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2026-01-07T17:45:38.342883Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2026-01-07T17:45:38.342909Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2026-01-07T17:45:38.342955Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2026-01-07T17:45:38.343005Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2026-01-07T17:45:38.343031Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] |92.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2026-01-07T17:45:46.006054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:46.006117Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:46.006580Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:46.019642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:46.020054Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:45:46.020347Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:45:46.031980Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:46.080616Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:45:46.081248Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:45:46.083176Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:45:46.083267Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:45:46.083380Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:45:46.083739Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:45:46.083972Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:45:46.084057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:45:46.169711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:45:46.205779Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:45:46.206012Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:45:46.206122Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:45:46.206178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:45:46.206227Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:45:46.206288Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.206450Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.206508Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.206821Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:46.206922Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:46.207059Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.207119Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:46.207166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:46.207202Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:46.207265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:46.207308Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:46.207359Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:46.207456Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.207504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.207546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:45:46.217201Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:45:46.217288Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:46.217394Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:46.217629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:45:46.217712Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:45:46.217777Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:45:46.217830Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:46.217870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:45:46.217922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:45:46.217964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.218298Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:46.218335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:45:46.218374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:46.218406Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.218466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:45:46.218493Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:46.218529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:45:46.218560Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.218604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:45:46.243597Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:46.243682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.243721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.243769Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:45:46.243836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:45:46.244436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.244496Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.244540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:45:46.244652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:45:46.244679Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:45:46.244810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.244850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:45:46.244880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:45:46.244942Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:45:46.247971Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:45:46.248037Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.248241Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.248274Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.248327Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.248372Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:45:46.248411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:45:46.248457Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:45:46 ... ine.cpp:1937: Add [0:7] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:45:49.076665Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:45:49.076708Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:45:49.076763Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 9437184 2026-01-07T17:45:49.076798Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-07T17:45:49.076819Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:45:49.076843Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:45:49.076872Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit BlockFailPoint 2026-01-07T17:45:49.076895Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-07T17:45:49.076916Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:45:49.076959Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:45:49.076981Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit ExecuteDataTx 2026-01-07T17:45:49.077524Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:7] at tablet 9437184 with status COMPLETE 2026-01-07T17:45:49.077589Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:7] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 3, SelectRangeBytes: 46, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:45:49.077648Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-07T17:45:49.077678Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:45:49.077704Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:49.077731Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit FinishPropose 2026-01-07T17:45:49.077769Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 7 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:45:49.077840Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is DelayComplete 2026-01-07T17:45:49.077871Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:49.077906Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit CompletedOperations 2026-01-07T17:45:49.077941Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit CompletedOperations 2026-01-07T17:45:49.077982Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-07T17:45:49.078005Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit CompletedOperations 2026-01-07T17:45:49.078035Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 9437184 has finished 2026-01-07T17:45:49.078098Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:49.078138Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 9437184 on unit FinishPropose 2026-01-07T17:45:49.078180Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:49.081846Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-07T17:45:49.081919Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-07T17:45:49.082239Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:310:2291], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:49.082286Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:49.082338Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:309:2290], serverId# [3:310:2291], sessionId# [0:0:0] 2026-01-07T17:45:49.082580Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\342\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\002\203\004\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?6\003?4e\005\001\013?:\003?8m\005\001\003?<\002\003?>\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\ 2026-01-07T17:45:49.082621Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:49.082710Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:49.083280Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2026-01-07T17:45:49.083362Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-07T17:45:49.083396Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2026-01-07T17:45:49.083426Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:45:49.083450Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:45:49.083489Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:45:49.083543Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 9437184 2026-01-07T17:45:49.083579Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-07T17:45:49.083604Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:45:49.083628Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:45:49.083651Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit BlockFailPoint 2026-01-07T17:45:49.083674Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-07T17:45:49.083696Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:45:49.083717Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:45:49.083737Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2026-01-07T17:45:49.084357Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2026-01-07T17:45:49.084429Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:45:49.084498Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-07T17:45:49.084529Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:45:49.084556Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:49.084583Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit FinishPropose 2026-01-07T17:45:49.084622Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:45:49.084686Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is DelayComplete 2026-01-07T17:45:49.084716Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:49.084752Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit CompletedOperations 2026-01-07T17:45:49.084788Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2026-01-07T17:45:49.084829Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-07T17:45:49.084855Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2026-01-07T17:45:49.084885Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 9437184 has finished 2026-01-07T17:45:49.089039Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:49.089128Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:8] at 9437184 on unit FinishPropose 2026-01-07T17:45:49.089193Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:21.118366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:21.118453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:21.118506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:21.118545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:21.118578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:21.118607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:21.118659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:21.118745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:21.119538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:21.119815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:21.240374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:21.240459Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:21.257122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:21.257524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:21.257725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:21.264425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:21.264812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:21.265675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:21.265966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:21.268707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:21.268914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:21.270085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:21.270143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:21.270260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:21.270320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:21.270378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:21.270564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:21.420445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.421420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.421525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.421614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.421698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.421772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.421842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.421899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.421965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.422055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.422110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.422201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.422261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.422326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:21.422434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 5:48.443810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-07T17:45:48.444087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.444185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.444522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.444612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.444866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.445311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.445374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.445454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.445619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.445700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.446113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.446342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.446411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.446460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.446590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.446636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.446683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.446979Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:45:48.453348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:48.453513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:48.457689Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:2151:4074], Recipient [1:2151:4074]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-07T17:45:48.457747Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-07T17:45:48.460023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:48.460103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:48.460560Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:2151:4074], Recipient [1:2151:4074]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:48.460620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:48.462185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:48.462250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:48.462308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:48.462343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:48.463207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:2189:4074], Recipient [1:2151:4074]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:45:48.463260Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:45:48.463295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:2151:4074] sender: [1:2210:2058] recipient: [1:15:2062] 2026-01-07T17:45:48.521426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:2209:4121], Recipient [1:2151:4074]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:45:48.521504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:48.521640Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:48.521971Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 299us result status StatusSuccess 2026-01-07T17:45:48.522802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 27450 Memory: 156864 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> Other::TraceHttpOk >> TExternalTableTest::DropExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2026-01-07T17:45:46.424565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:46.424644Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:46.425539Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:46.440278Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:46.440678Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:45:46.440991Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:45:46.453375Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:46.536797Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:45:46.537652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:45:46.545701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:45:46.545815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:45:46.545897Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:45:46.546399Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:45:46.546709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:45:46.546809Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:45:46.665709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:45:46.758493Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:45:46.758771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:45:46.758920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:45:46.758977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:45:46.759027Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:45:46.759081Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.759482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.759580Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.760202Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:46.760334Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:46.760508Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.760562Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:46.760619Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:46.760660Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:46.760811Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:46.760852Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:46.760901Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:46.761028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.761088Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.761139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:45:46.779179Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:45:46.784248Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:46.784460Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:46.784716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:45:46.784821Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:45:46.784905Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:45:46.787463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:46.787539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:45:46.787585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:45:46.787627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.788204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:46.788258Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:45:46.788302Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:46.788359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.788435Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:45:46.788464Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:46.790136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:45:46.790203Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.790241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:45:46.807048Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:46.807126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.807170Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.807228Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:45:46.807310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:45:46.807904Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.808017Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.808073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:45:46.808270Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:45:46.808331Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:45:46.808493Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.808547Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:45:46.808626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:45:46.808678Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:45:46.812763Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:45:46.812853Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.813834Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.813902Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.813971Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.814249Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:45:46.814300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:45:46.814356Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:45:46.814403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 26-01-07T17:45:49.150572Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-07T17:45:49.150685Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-07T17:45:49.150971Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:238:2230], Recipient [3:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:49.151023Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:49.151363Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:49.151469Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:49.151617Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 238 RawX2: 12884904118 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2026-01-07T17:45:49.151707Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [3:26:2073], Recipient [3:238:2230]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-07T17:45:49.151744Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-07T17:45:49.151781Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-07T17:45:49.151827Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:49.151946Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [3:26:2073], Recipient [3:238:2230]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2026-01-07T17:45:49.151983Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-07T17:45:49.152030Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2026-01-07T17:45:49.152105Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:49.152149Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:49.152193Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:49.152234Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:49.152276Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:49.152314Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:49.152366Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:49.152465Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [3:284:2267], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:288:2271] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T17:45:49.152500Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-07T17:45:49.152586Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [3:128:2153], Recipient [3:238:2230]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-07T17:45:49.152625Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-07T17:45:49.152671Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-07T17:45:49.152731Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-07T17:45:49.165404Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [3:284:2267], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:284:2267] ServerId: [3:288:2271] } 2026-01-07T17:45:49.165476Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-07T17:45:49.200713Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-07T17:45:49.200894Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-07T17:45:49.201276Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:294:2275], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:49.201320Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:49.201372Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:292:2274], serverId# [3:294:2275], sessionId# [0:0:0] 2026-01-07T17:45:49.201667Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2026-01-07T17:45:49.201711Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:49.201809Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:49.202576Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2026-01-07T17:45:49.202681Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:45:49.202723Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2026-01-07T17:45:49.202768Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:45:49.202805Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:45:49.202852Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:45:49.202910Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2026-01-07T17:45:49.202952Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:45:49.202997Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:45:49.203023Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:45:49.203048Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2026-01-07T17:45:49.203085Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:45:49.203107Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:45:49.203130Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:45:49.203157Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-07T17:45:49.203609Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2026-01-07T17:45:49.203679Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:45:49.203749Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:45:49.203777Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:45:49.203803Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:49.203830Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2026-01-07T17:45:49.203869Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:45:49.203958Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayComplete 2026-01-07T17:45:49.203989Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:49.204028Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-07T17:45:49.204067Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-07T17:45:49.204116Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:45:49.204140Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-07T17:45:49.204168Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-07T17:45:49.204239Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:49.204280Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-07T17:45:49.204325Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> TExternalTableTest::DropTableTwice >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TExternalTableTest::CreateExternalTable |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |92.3%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |92.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> TExternalTableTest::ReadOnlyMode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2026-01-07T17:45:46.124080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:46.124171Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:46.124877Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:46.138814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:46.139269Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:45:46.139608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:45:46.199796Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:46.249127Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:45:46.249830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:45:46.251647Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:45:46.251717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:45:46.251780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:45:46.252220Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:45:46.252421Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:45:46.252502Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:45:46.367466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:45:46.462224Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:45:46.462456Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:45:46.462562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:45:46.462614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:45:46.462651Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:45:46.462695Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.462888Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.462942Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.463272Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:46.463401Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:46.463538Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.463578Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:46.463625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:46.463660Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:46.463702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:46.463735Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:46.463783Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:46.463884Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.463941Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.463982Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:45:46.484199Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:45:46.484290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:46.484418Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:46.484593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:45:46.484695Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:45:46.484803Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:45:46.484864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:46.484907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:45:46.487428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:45:46.487498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.488037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:46.488090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:45:46.488132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:46.488170Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.488250Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:45:46.488279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:46.488328Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:45:46.488360Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.488390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:45:46.502282Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:46.502375Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.502415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.502474Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:45:46.502550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:45:46.503135Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.503189Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.503234Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:45:46.503378Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-07T17:45:46.503411Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:45:46.503581Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.503634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-07T17:45:46.503675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:45:46.503721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:45:46.523217Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:45:46.523311Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.523565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.523650Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.523710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.523753Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:45:46.523788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:45:46.523842Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2026-01-07T17:45:46.523901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2026-01-07T17:45:46. ... :59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-07T17:45:50.313987Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-07T17:45:50.314135Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2026-01-07T17:45:50.314237Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 97002198b}, Memory{0 dyn 0} 2026-01-07T17:45:50.314459Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:50.314500Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2026-01-07T17:45:50.315568Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 5 at 9437184 restored its data 2026-01-07T17:45:50.316476Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [6:5] at 9437184 exceeded memory limit 10778022 and requests 86224176 more for the next try 2026-01-07T17:45:50.316626Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 5 released its data 2026-01-07T17:45:50.316678Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is Restart 2026-01-07T17:45:50.316712Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:45:50.316751Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:50.316821Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:52: TPlanQueueUnit at 9437184 out-of-order limits exceeded 2026-01-07T17:45:50.316865Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:50.316978Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 4 -> retry Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:45:50.317036Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} pin 0 (0 b) load 0 (0 b) 2026-01-07T17:45:50.317101Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release tx data 2026-01-07T17:45:50.317141Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} released on update Res{3 10778022b}, Memory{0 dyn 0} 2026-01-07T17:45:50.317205Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} update Res{3 97002198b} type transaction 2026-01-07T17:45:50.317338Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) (release resources {0, 97002198}) 2026-01-07T17:45:50.317402Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 18.820903 to 1.882090 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314])) 2026-01-07T17:45:50.317533Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) (priority=5 type=transaction resources={0, 97002198} resubmit=1) 2026-01-07T17:45:50.317584Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) to queue queue_transaction 2026-01-07T17:45:50.317639Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 97002198} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) from queue queue_transaction 2026-01-07T17:45:50.317684Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) to queue queue_transaction 2026-01-07T17:45:50.317722Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_transaction from 0.000000 to 15.244932 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230])) 2026-01-07T17:45:50.317803Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 97002198b}, Memory{0 dyn 97002198} 2026-01-07T17:45:50.317863Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:50.317911Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2026-01-07T17:45:50.318785Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 5 at 9437184 restored its data 2026-01-07T17:45:50.588957Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2026-01-07T17:45:50.589069Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:45:50.589155Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:50.589199Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:45:50.589238Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437184 to execution unit CompleteOperation 2026-01-07T17:45:50.589276Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2026-01-07T17:45:50.589548Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is DelayComplete 2026-01-07T17:45:50.589585Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2026-01-07T17:45:50.589646Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437184 to execution unit CompletedOperations 2026-01-07T17:45:50.589683Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2026-01-07T17:45:50.589723Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is Executed 2026-01-07T17:45:50.589750Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2026-01-07T17:45:50.589785Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [6:5] at 9437184 has finished 2026-01-07T17:45:50.589824Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:50.589854Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:45:50.589889Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:50.589929Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:50.590059Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2026-01-07T17:45:50.590129Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 97002198b}, Memory{0 dyn 0} 2026-01-07T17:45:50.590327Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) (release resources {0, 97002198}) 2026-01-07T17:45:50.590389Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 15.244932 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230])) 2026-01-07T17:45:50.606061Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2026-01-07T17:45:50.606151Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:45:50.606202Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2026-01-07T17:45:50.606284Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:45:50.606387Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2026-01-07T17:45:50.606456Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-07T17:45:50.606787Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2026-01-07T17:45:50.606831Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:50.606861Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2026-01-07T17:45:50.606908Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:45:50.606961Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-07T17:45:50.606994Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:50.607243Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [3:347:2314], Recipient [3:458:2400]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2026-01-07T17:45:50.607298Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:45:50.607357Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2026-01-07T17:45:50.607461Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [3:238:2230], Recipient [3:458:2400]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-07T17:45:50.607494Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:45:50.607541Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TExternalTableTest::DropTableTwice [GOOD] >> TExternalTableTest::ParallelCreateExternalTable >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:49.562132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:49.562230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:49.562273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:49.562307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:49.562349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:49.562377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:49.562429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:49.562500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:49.563279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:49.563527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:49.653998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:49.654061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:49.674921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:49.675279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:49.675456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:49.682880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:49.683270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:49.684050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:49.684329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:49.687056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:49.687280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:49.688555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:49.688615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:49.688754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:49.688803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:49.688844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:49.689054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:49.839995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.840892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.841910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 2026-01-07T17:45:51.161527Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2026-01-07T17:45:51.161645Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2026-01-07T17:45:51.162521Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:51.164971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:45:51.165066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:51.165463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:45:51.165612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:45:51.165639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:51.166843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:45:51.167022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:45:51.167225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [1:1309:3157], Recipient [1:691:2677]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1309:3157] ServerId: [1:1311:3159] } 2026-01-07T17:45:51.167287Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-07T17:45:51.167319Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:45:51.167721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:45:51.167768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:45:51.168329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1325:3173], Recipient [1:691:2677]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:51.168382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:51.168429Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-07T17:45:51.168583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:918:2851], Recipient [1:691:2677]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2026-01-07T17:45:51.168622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:45:51.168700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:45:51.168799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:45:51.168835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1323:3171] 2026-01-07T17:45:51.169068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:1325:3173], Recipient [1:691:2677]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:45:51.169102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T17:45:51.169154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2026-01-07T17:45:51.169766Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1326:3174], Recipient [1:691:2677]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-07T17:45:51.169834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:51.169944Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:51.170261Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 265us result status StatusSuccess 2026-01-07T17:45:51.170750Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:51.171671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271188001, Sender [1:1327:3175], Recipient [1:691:2677]: NKikimrPQ.TEvPeriodicTopicStats PathId: 40 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2026-01-07T17:45:51.171727Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5286: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2026-01-07T17:45:51.171770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 40] DataSize 151 UsedReserveSize 151 2026-01-07T17:45:51.171806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2026-01-07T17:45:51.172269Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1328:3176], Recipient [1:691:2677]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-07T17:45:51.172329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:51.172440Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:51.174891Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 200us result status StatusSuccess 2026-01-07T17:45:51.175410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TExternalTableTest::SchemeErrors >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:48.115439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:48.115579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:48.115638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:48.115687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:48.115731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:48.115764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:48.115850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:48.115971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:48.116891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:48.117324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:48.205951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:48.206019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:48.224606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:48.225057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:48.225252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:48.233618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:48.234080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:48.234905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:48.235227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:48.239984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:48.240216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:48.241548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:48.241629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:48.241769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:48.241819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:48.241868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:48.242037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.420008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.421220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.421407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.421546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.421646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.421729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.421805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.421916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.421991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.422089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.422186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.422262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.422337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.422440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:48.422570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... oup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:49.704142Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:941:2862] connected; active server actors: 1 2026-01-07T17:45:49.727767Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:49.728112Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 323us result status StatusSuccess 2026-01-07T17:45:49.728674Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:50.362317Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-07T17:45:50.362436Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 1 2026-01-07T17:45:50.363459Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 0 2026-01-07T17:45:50.363575Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-07T17:45:50.363875Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2026-01-07T17:45:50.363967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-07T17:45:50.381325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:45:50.828811Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-07T17:45:50.828885Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2026-01-07T17:45:50.829711Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2026-01-07T17:45:50.829763Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-07T17:45:50.829973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-07T17:45:50.843195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:45:51.257215Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-07T17:45:51.257291Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2026-01-07T17:45:51.258028Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2026-01-07T17:45:51.258066Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-07T17:45:51.258355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-07T17:45:51.282257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:45:51.328447Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:51.328687Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 282us result status StatusSuccess 2026-01-07T17:45:51.329240Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:51.330068Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1015:2927] connected; active server actors: 1 2026-01-07T17:45:51.345958Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2026-01-07T17:45:51.346626Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2026-01-07T17:45:51.348175Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2026-01-07T17:45:51.378065Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1059:2961] connected; active server actors: 1 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> TExternalTableTest::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:50.788841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:50.788956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.789014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:50.789061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:50.789096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:50.789125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:50.789187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.789254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:50.790177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.790438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:50.933171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:50.933245Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:50.934225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.946456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:50.947265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:50.947480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:50.970343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:50.970671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:50.971417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:50.971735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:50.976386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.976569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:50.977794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:50.977865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.978057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:50.978114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:50.978242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:50.978427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:51.105051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.106944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.107045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.107134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.107294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.107376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... tate 2026-01-07T17:45:51.840064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:45:51.840098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:51.840133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:45:51.840174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:51.840211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:45:51.840252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:45:51.840286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:45:51.840322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:45:51.840395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:45:51.840435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:45:51.840484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-07T17:45:51.840513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:45:51.841558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:51.841638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:51.841668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:45:51.841720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:45:51.841758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:45:51.851010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:51.851129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:51.851171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:45:51.851205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:45:51.851237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:45:51.851332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:45:51.857240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:45:51.857898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:45:51.858169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:45:51.858219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:45:51.858644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:45:51.858768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:45:51.858850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:748:2737] TestWaitNotification: OK eventTxId 102 2026-01-07T17:45:51.859367Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:51.859622Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 235us result status StatusSuccess 2026-01-07T17:45:51.860063Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-07T17:45:51.863597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:51.863969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2026-01-07T17:45:51.864087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_table.cpp:304: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2026-01-07T17:45:51.864225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2026-01-07T17:45:51.867674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-07T17:45:51.867904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:45:51.868274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:45:51.868341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:45:51.868784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:45:51.868866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:45:51.868898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:756:2745] TestWaitNotification: OK eventTxId 103 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2026-01-07T17:45:46.612544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:46.612604Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:46.613155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:46.626791Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:46.627189Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:45:46.627487Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:45:46.639836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:46.689529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:45:46.690201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:45:46.692287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:45:46.692408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:45:46.692487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:45:46.692912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:45:46.693201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:45:46.693283Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:45:46.771220Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:45:46.815929Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:45:46.816147Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:45:46.816256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:45:46.816316Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:45:46.816357Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:45:46.816422Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.816604Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.816658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.817207Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:46.817325Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:46.817459Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.817499Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:46.817546Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:46.817583Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:46.817654Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:46.817691Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:46.817735Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:46.817847Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.817892Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.817939Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:45:46.831249Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:45:46.831330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:46.831442Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:46.831686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:45:46.831760Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:45:46.831818Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:45:46.831891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:46.832018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:45:46.832058Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:45:46.832099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.832447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:46.832489Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:45:46.832538Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:46.832580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.832639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:45:46.832677Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:46.832713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:45:46.832746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.832777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:45:46.850862Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:46.850940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.850978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.851077Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:45:46.851156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:45:46.851673Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.851728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.851793Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:45:46.851964Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:45:46.852016Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:45:46.852239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.852295Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:45:46.852335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:45:46.852370Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:45:46.856069Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:45:46.856142Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.856394Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.856435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.856508Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.856551Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:45:46.856587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:45:46.856633Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:45:46.856669Z node 1 :TX_DATASHARD TRACE: dat ... [0:2] at 9437184 on unit FinishPropose 2026-01-07T17:45:50.656094Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:50.656127Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:50.656164Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-07T17:45:50.656204Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-07T17:45:50.656255Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:45:50.656285Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-07T17:45:50.656320Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-07T17:45:50.705519Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:50.705580Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-07T17:45:50.705624Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2026-01-07T17:45:50.705710Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:51.351473Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-07T17:45:51.351559Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-07T17:45:51.351971Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:305:2285], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:51.352003Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:51.352055Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:304:2284], serverId# [3:305:2285], sessionId# [0:0:0] 2026-01-07T17:45:51.548030Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2026-01-07T17:45:51.550940Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:51.551130Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:51.614307Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2026-01-07T17:45:51.614442Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-07T17:45:51.614510Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2026-01-07T17:45:51.614571Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:45:51.614617Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:45:51.614677Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:45:51.614758Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 9437184 2026-01-07T17:45:51.614816Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-07T17:45:51.614844Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:45:51.614872Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:45:51.614899Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit BlockFailPoint 2026-01-07T17:45:51.614926Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-07T17:45:51.614951Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:45:51.614976Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:45:51.615003Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-07T17:45:51.615063Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:45:51.615135Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:3] at 9437184 requested 46269686 more memory 2026-01-07T17:45:51.615179Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Restart 2026-01-07T17:45:51.615342Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:51.615402Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-07T17:45:51.615469Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:45:51.641528Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:3] at 9437184 exceeded memory limit 50463990 and requests 403711920 more for the next try 2026-01-07T17:45:51.644743Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 3 released its data 2026-01-07T17:45:51.644851Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Restart 2026-01-07T17:45:51.645262Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:51.645307Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-07T17:45:51.705373Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 3 at 9437184 restored its data 2026-01-07T17:45:51.705500Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:45:51.798789Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2026-01-07T17:45:51.798870Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:45:51.798940Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:51.798970Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:45:51.799004Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:51.799037Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit FinishPropose 2026-01-07T17:45:51.799069Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is DelayComplete 2026-01-07T17:45:51.799093Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:51.799125Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit CompletedOperations 2026-01-07T17:45:51.799151Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2026-01-07T17:45:51.799189Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-07T17:45:51.799208Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2026-01-07T17:45:51.799243Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 9437184 has finished 2026-01-07T17:45:51.912765Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:51.912847Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:3] at 9437184 on unit FinishPropose 2026-01-07T17:45:51.912904Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 8 ms, status: COMPLETE 2026-01-07T17:45:51.913033Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:51.949730Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:45:51.949795Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2026-01-07T17:45:51.957355Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:238:2231], Recipient [3:240:2232]: NKikimr::TEvTablet::TEvFollowerGcApplied >> TExternalTableTest::ReadOnlyMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:50.459454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:50.459557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.459619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:50.459664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:50.459703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:50.459733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:50.459816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.459890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:50.460843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.461188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:50.558795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:50.558860Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:50.586378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:50.586833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:50.587038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:50.594414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:50.594842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:50.595642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:50.595980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:50.598960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.599185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:50.600484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:50.600565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.600709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:50.600756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:50.600806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:50.601018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:50.770240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.771954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.772016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.772109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.772205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7:45:52.083114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.083307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.083374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.083455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.083668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.083773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.084406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.084724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.084821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.084904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.085071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.085136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.085226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.085583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:45:52.090527Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:52.090707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:52.093715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:925:2855], Recipient [1:925:2855]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-07T17:45:52.093774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-07T17:45:52.095019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:52.095093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:52.095290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:52.095350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:52.095400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:52.095438Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:52.096779Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:961:2855], Recipient [1:925:2855]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:45:52.096820Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:45:52.096864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:925:2855] sender: [1:983:2058] recipient: [1:15:2062] 2026-01-07T17:45:52.146726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:982:2900], Recipient [1:925:2855]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-07T17:45:52.146785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:52.146876Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:52.147098Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 173us result status StatusSuccess 2026-01-07T17:45:52.147474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:52.148134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271188001, Sender [1:984:2901], Recipient [1:925:2855]: NKikimrPQ.TEvPeriodicTopicStats PathId: 38 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2026-01-07T17:45:52.148192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5286: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2026-01-07T17:45:52.148232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 19 UsedReserveSize 7 2026-01-07T17:45:52.148263Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2026-01-07T17:45:52.148313Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:141: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2026-01-07T17:45:52.148519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:985:2902], Recipient [1:925:2855]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-07T17:45:52.148544Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:52.148608Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:52.148750Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 136us result status StatusSuccess 2026-01-07T17:45:52.149117Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:50.797175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:50.797267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.797298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:50.797328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:50.797355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:50.797381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:50.797460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.797523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:50.798247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.798505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:50.928695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:50.928774Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:50.929668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.941916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:50.942694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:50.942875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:50.954710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:50.954999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:50.955739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:50.956164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:50.960717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.960898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:50.962080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:50.962138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.962326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:50.962374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:50.962476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:50.962640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:51.137531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.138578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.138701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.138788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.138848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.138919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.138999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.139070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.139155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.139229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.139298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.139400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.139547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.139651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.139733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... T_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.103964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.104120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 115, at schemeshard: 72057594046678944 2026-01-07T17:45:52.104194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.104217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.104317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2026-01-07T17:45:52.104407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.104429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.104490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 117, at schemeshard: 72057594046678944 2026-01-07T17:45:52.104637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 118, at schemeshard: 72057594046678944 2026-01-07T17:45:52.104707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.104730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.104852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2026-01-07T17:45:52.104918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-07T17:45:52.104992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.105015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.105084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-07T17:45:52.105208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.105240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.105331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.105353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.105452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.105480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.105530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-07T17:45:52.105668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-07T17:45:52.105728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.105752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.105858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.105898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.105949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-07T17:45:52.106045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.106072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.106247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.106273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.106317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-07T17:45:52.106364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.106387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.106450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-07T17:45:52.106564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.106597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.106772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.106796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.106882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.106904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.106982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.107003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.107160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.107192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:831:2820] 2026-01-07T17:45:52.107295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.107316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:831:2820] TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2026-01-07T17:45:52.111071Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:52.111357Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 314us result status StatusSuccess 2026-01-07T17:45:52.111807Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:50.513433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:50.513532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.513572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:50.513615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:50.513663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:50.513720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:50.513794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.513875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:50.514853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.515193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:50.637793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:50.637893Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:50.638729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.653522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:50.654448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:50.654626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:50.665638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:50.665877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:50.666655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:50.666988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:50.671494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.671669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:50.672789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:50.672849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.673036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:50.673093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:50.673198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:50.673361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:50.812030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.812813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.812897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.812968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.813711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... 8944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:45:52.709685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:45:52.709805Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:52.709838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:45:52.709887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 101, path id: 39 2026-01-07T17:45:52.709918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 101, path id: 39 2026-01-07T17:45:52.709944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:45:52.710015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.710073Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:45:52.710185Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:45:52.710221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:45:52.710263Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:45:52.710295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:45:52.710337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:45:52.710381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:45:52.710423Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:45:52.710461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:45:52.710556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:45:52.710608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:45:52.710648Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2026-01-07T17:45:52.710683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-07T17:45:52.710715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:45:52.710737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:45:52.712117Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:52.712213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:52.712251Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:45:52.712316Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:45:52.712601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:45:52.714367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:52.714456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:52.714498Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:45:52.714545Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:45:52.714585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:45:52.715505Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:52.715582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:52.715610Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:45:52.715639Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:45:52.715669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:45:52.715768Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:45:52.718175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:45:52.719626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:45:52.720072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:45:52.720283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:45:52.720332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:45:52.720726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:45:52.720821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:45:52.720860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:747:2736] TestWaitNotification: OK eventTxId 101 2026-01-07T17:45:52.721335Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:52.721542Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 255us result status StatusSuccess 2026-01-07T17:45:52.721923Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:50.725168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:50.725261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.725302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:50.725343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:50.725379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:50.725443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:50.725511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.725574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:50.726385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.726655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:50.857974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:50.858060Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:50.858916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.871660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:50.872492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:50.872667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:50.883736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:50.884056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:50.884837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:50.885207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:50.889232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.889386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:50.890307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:50.890374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.890572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:50.890632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:50.890757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:50.890931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:51.048425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.049524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.049681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.049764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.049861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.049942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.050807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... hildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.020252Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:53.020497Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 266us result status StatusSuccess 2026-01-07T17:45:53.020950Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.021940Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:53.022123Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 202us result status StatusSuccess 2026-01-07T17:45:53.022598Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.023308Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:53.023526Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 267us result status StatusSuccess 2026-01-07T17:45:53.023956Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.024620Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:53.024831Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 230us result status StatusSuccess 2026-01-07T17:45:53.025343Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:51.532187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:51.532271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:51.532312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:51.532355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:51.532392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:51.532433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:51.532519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:51.532580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:51.533306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:51.533538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:51.649021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:51.649110Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:51.650013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:51.661927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:51.662706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:51.662921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:51.673005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:51.673291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:51.674099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:51.674428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:51.678781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:51.678995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:51.680221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:51.680291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:51.680489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:51.680545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:51.680635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:51.680752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:51.826816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.827939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.828854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.829013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.829104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... schemeshard: 72057594046678944 2026-01-07T17:45:52.868947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2026-01-07T17:45:52.869066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:45:52.869679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:52.869786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:52.869832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:45:52.869872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-01-07T17:45:52.869933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-07T17:45:52.870462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:52.870570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:52.870604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:45:52.870631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-07T17:45:52.870658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:45:52.870739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2026-01-07T17:45:52.873183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2026-01-07T17:45:52.873347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2026-01-07T17:45:52.874877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-07T17:45:52.875015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-07T17:45:52.875280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:52.875403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:52.875456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2026-01-07T17:45:52.875631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 129:0 128 -> 240 2026-01-07T17:45:52.875808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:45:52.875884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 FAKE_COORDINATOR: Erasing txId 129 2026-01-07T17:45:52.878811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:52.878852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:52.879044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:45:52.879132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:52.879168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:896:2853], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-07T17:45:52.879262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:896:2853], at schemeshard: 72057594046678944, txId: 129, path id: 41 2026-01-07T17:45:52.879556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2026-01-07T17:45:52.879609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2026-01-07T17:45:52.879717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-07T17:45:52.879752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:45:52.879799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-07T17:45:52.879850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:45:52.879891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-07T17:45:52.879933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-07T17:45:52.879992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-07T17:45:52.880031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-07T17:45:52.880126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:45:52.880170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-07T17:45:52.880208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-07T17:45:52.880238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2026-01-07T17:45:52.881102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:52.881205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:52.881250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:45:52.881307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:45:52.881354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-07T17:45:52.882399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:52.882516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-07T17:45:52.882558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-07T17:45:52.882591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2026-01-07T17:45:52.882634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:45:52.882725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-07T17:45:52.885935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-07T17:45:52.888452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:50.054533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:50.054641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.054692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:50.054725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:50.054758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:50.054788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:50.054858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.054926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:50.055823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.056148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:50.143025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:50.143078Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:50.157904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:50.158237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:50.158415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:50.165686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:50.166065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:50.166849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:50.167126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:50.170341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.170570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:50.171793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:50.171855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:50.172018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:50.172063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:50.172107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:50.172308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:50.291486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.292967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.293018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:50.293114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.834019Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-07T17:45:52.875448Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:45:52.875524Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.875558Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.875592Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.875621Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-07T17:45:52.875693Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:45:52.875718Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.875738Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.875766Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.875787Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-07T17:45:52.875832Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:52.875855Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.875945Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.875972Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.875995Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-07T17:45:52.907023Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:45:52.907123Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.907171Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.907207Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.907241Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-07T17:45:52.907315Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:45:52.907344Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.907369Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.907396Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.907419Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-07T17:45:52.907508Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:52.907537Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.907582Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.907609Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.907634Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-07T17:45:52.938694Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-07T17:45:52.938781Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2026-01-07T17:45:52.939256Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-07T17:45:52.939389Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-07T17:45:52.939477Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-07T17:45:52.939952Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2026-01-07T17:45:52.940009Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-07T17:45:52.940293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 16975298 2026-01-07T17:45:52.955988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:45:52.976857Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:45:52.976948Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.976994Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.977032Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.977079Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-07T17:45:52.977152Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-07T17:45:52.977180Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.977201Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.977225Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.977249Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-07T17:45:52.977294Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:45:52.977317Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.977338Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:45:52.977361Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:45:52.977383Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-07T17:45:52.998709Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:52.999016Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 327us result status StatusSuccess 2026-01-07T17:45:52.999561Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> Other::UnknownPathNotFound >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:50.988681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:50.988774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.988816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:50.988857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:50.988904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:50.988977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:50.989028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:50.989078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:50.989770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:50.990007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:51.119985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:51.120071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:51.120943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:51.139327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:51.140236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:51.140416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:51.157535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:51.157892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:51.158629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:51.158981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:51.166052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:51.166231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:51.167442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:51.167510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:51.167701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:51.167767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:51.167874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:51.168067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:51.400342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.401937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.402090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.402208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.402296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.402362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:51.402449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... rd__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:45:53.316659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:45:53.317776Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:53.317861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:45:53.317891Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:45:53.317921Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:45:53.317951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:45:53.318027Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:45:53.320590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:45:53.321786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:45:53.322244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:45:53.322504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:45:53.322548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:45:53.322947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:45:53.323045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:45:53.323082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:747:2736] TestWaitNotification: OK eventTxId 102 2026-01-07T17:45:53.323507Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:53.323768Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 283us result status StatusSuccess 2026-01-07T17:45:53.324148Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-07T17:45:53.327455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:53.327901Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2026-01-07T17:45:53.328008Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2026-01-07T17:45:53.328169Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:45:53.330725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-07T17:45:53.330978Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:45:53.331316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:45:53.331365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:45:53.331767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:45:53.331865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:45:53.331903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:755:2744] TestWaitNotification: OK eventTxId 103 2026-01-07T17:45:53.332458Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:53.332689Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 253us result status StatusSuccess 2026-01-07T17:45:53.333431Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> ActorHandler::NoValidGroupForbidden ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:52.805016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:52.805104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:52.805145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:52.805194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:52.805247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:52.805280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:52.805353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:52.805428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:52.806340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:52.806680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:52.930992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:52.931076Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:52.931972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:52.943642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:52.944343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:52.944563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:52.954993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:52.955273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:52.956005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:52.956317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:52.960725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:52.960941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:52.962086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:52.962140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:52.962304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:52.962355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:52.962463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:52.962599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:53.127003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.127976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.128963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.129070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... 026-01-07T17:45:53.770966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2026-01-07T17:45:53.771218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2026-01-07T17:45:53.773021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.773265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-07T17:45:53.776124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:53.776405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2026-01-07T17:45:53.776482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2026-01-07T17:45:53.776577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2026-01-07T17:45:53.778302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.778497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2026-01-07T17:45:53.781386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:53.782012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2026-01-07T17:45:53.782135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2026-01-07T17:45:53.782286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2026-01-07T17:45:53.784298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.784471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2026-01-07T17:45:53.787367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:53.787701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2026-01-07T17:45:53.787785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2026-01-07T17:45:53.787919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2026-01-07T17:45:53.789806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.790065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2026-01-07T17:45:53.793121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:53.793464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2026-01-07T17:45:53.793561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2026-01-07T17:45:53.793754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2026-01-07T17:45:53.795715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.795999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2026-01-07T17:45:53.799330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:53.799683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2026-01-07T17:45:53.799781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2026-01-07T17:45:53.799933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-07T17:45:53.801795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.802017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:52.809040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:52.809158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:52.809241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:52.809316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:52.809377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:52.809414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:52.809593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:52.809726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:52.810826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:52.811219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:52.919852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:52.919926Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:52.920619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:52.930333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:52.931149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:52.931337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:52.939411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:52.939643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:52.940207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:52.940483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:52.944090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:52.944236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:52.945178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:52.945244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:52.945436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:52.945479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:52.945577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:52.945729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:53.078840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.079774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.079870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.079986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.080814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:53.715590Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:53.715787Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 164us result status StatusSuccess 2026-01-07T17:45:53.716135Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2026-01-07T17:45:53.716496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2026-01-07T17:45:53.716540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2026-01-07T17:45:53.716611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2026-01-07T17:45:53.716632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2026-01-07T17:45:53.716687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2026-01-07T17:45:53.716707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2026-01-07T17:45:53.717266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-07T17:45:53.717390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-07T17:45:53.717427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:747:2736] 2026-01-07T17:45:53.717691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-07T17:45:53.717803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-07T17:45:53.717832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:747:2736] 2026-01-07T17:45:53.717891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-07T17:45:53.717974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-07T17:45:53.718008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:747:2736] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-01-07T17:45:53.718574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:53.718769Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 228us result status StatusSuccess 2026-01-07T17:45:53.719088Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2026-01-07T17:45:53.722564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:53.722948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2026-01-07T17:45:53.723059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2026-01-07T17:45:53.723212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:45:53.725484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 125, at schemeshard: 72057594046678944 2026-01-07T17:45:53.725764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> ActorPage::NoValidGroupForbidden |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:52.993182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:52.993294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:52.993361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:52.993413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:52.993456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:52.993487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:52.993601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:52.993675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:52.994655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:52.994992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:53.133222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:53.133311Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:53.134244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:53.146893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:53.147731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:53.147912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:53.159581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:53.159926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:53.160683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:53.161035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:53.165826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:53.166004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:53.167193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:53.167258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:53.167447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:53.167496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:53.167600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:53.167778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:53.338862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.340945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.341106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.341191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... etID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:54.090156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_table.cpp:58: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-07T17:45:54.090292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:45:54.090473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:45:54.090571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:45:54.091625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:45:54.093335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:45:54.094740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:54.094794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:54.094971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:45:54.095066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:45:54.095188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:54.095251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-07T17:45:54.095310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:45:54.095345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:45:54.095597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.095645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:45:54.095755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:45:54.095796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:45:54.095844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:45:54.095879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:45:54.095939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:45:54.096034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:45:54.096088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:45:54.096127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:45:54.096211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:45:54.096253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:45:54.096289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-07T17:45:54.096370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-07T17:45:54.096406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-07T17:45:54.097718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:45:54.097814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:45:54.097866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:45:54.097922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:45:54.097971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:45:54.101364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:45:54.101506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:45:54.101560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:45:54.101594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:45:54.101627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:45:54.101718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:45:54.104675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:45:54.105915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:45:54.106219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:45:54.106279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:45:54.106836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:45:54.106953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:45:54.106999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:797:2786] TestWaitNotification: OK eventTxId 104 2026-01-07T17:45:54.107581Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:54.107877Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 284us result status StatusSuccess 2026-01-07T17:45:54.108319Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:114:2144] 2026-01-07T17:45:53.132617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:53.132687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:53.132734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:53.132771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:53.132803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:53.132823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:53.132890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:53.132969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:53.133887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:53.134136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:53.237683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:45:53.237769Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:53.238632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:53.250200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:53.251076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:53.251257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:53.261128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:53.261404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:53.262232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:53.262523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:53.267499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:53.269809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:53.271113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:53.271186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:53.271393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:53.271450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:53.271593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:53.271756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:53.435213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.436977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.437067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.437163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.437290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:53.437364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModif ... impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:45:54.082057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:45:54.082107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:45:54.082140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:45:54.082169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:45:54.083014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:54.083110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:54.083147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:45:54.083184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:45:54.083227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:45:54.083932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:54.084041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:45:54.084131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:45:54.084173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:45:54.084206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:45:54.084279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:45:54.087313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:45:54.087410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:45:54.087625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:45:54.087677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:45:54.088080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:45:54.088204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:45:54.088245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:709:2698] TestWaitNotification: OK eventTxId 101 2026-01-07T17:45:54.088692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:54.088988Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 253us result status StatusSuccess 2026-01-07T17:45:54.089474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-07T17:45:54.093141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:45:54.093471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2026-01-07T17:45:54.093546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-07T17:45:54.093590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-07T17:45:54.095912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:54.096167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:45:54.096499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:45:54.096549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:45:54.096914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:45:54.097081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:45:54.097119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:717:2706] TestWaitNotification: OK eventTxId 102 2026-01-07T17:45:54.097642Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:54.097840Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 208us result status StatusPathDoesNotExist 2026-01-07T17:45:54.098009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> ActorHandler::OptionsNoContent >> ActorHandler::HttpOk >> MonPage::HttpOk >> KqpQueryPerf::MultiRead-QueryService >> KqpQueryPerf::Insert-QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> Other::TraceHttpOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:49.592626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:49.592713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:49.592758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:49.592797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:49.592832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:49.592969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:49.593024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:49.593127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:49.594528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:49.594808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:49.694251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:49.694305Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:49.715211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:49.715686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:49.716005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:49.724359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:49.724787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:49.725692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:49.725992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:49.729123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:49.729332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:49.730282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:49.730327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:49.730427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:49.730466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:49.730499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:49.730675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:49.862454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.863461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.863589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.863677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.863767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.863841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.863928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.864009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.864062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.864122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.864168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.864224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.864268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.864314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.864423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... thId: 37] was 0 2026-01-07T17:45:54.858116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2026-01-07T17:45:54.858233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.858309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:54.858339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:45:54.858454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:54.858623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:54.858830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-07T17:45:54.859140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.859239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.859533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.859602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.859800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.859909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.859950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.860036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.860201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.860271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.860544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.860816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.860893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.860957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.861099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.861163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.861222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:54.866529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:54.873490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:54.873582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:54.873864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:54.873937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:54.873988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:54.874110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1162:3114] sender: [1:1215:2058] recipient: [1:15:2062] 2026-01-07T17:45:54.915886Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:54.916174Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 313us result status StatusSuccess 2026-01-07T17:45:54.916655Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82624 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:45:54.919170Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:45:54.919388Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 235us result status StatusSuccess 2026-01-07T17:45:54.919913Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SomeTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> THealthCheckTest::StaticGroupIssue >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 >> THealthCheckTest::TestNoSchemeShardResponse >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> THealthCheckTest::Issues100GroupsListing >> THealthCheckTest::DatabaseDoesNotExist >> THealthCheckTest::OneIssueListing >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceHttpOk [GOOD] Test command err: 2026-01-07T17:45:50.283820Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677332287943599:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:50.284322Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:50.627514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:50.630075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:50.630169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:50.654733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:50.768423Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:50.836174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:50.836202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:50.836213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:50.836340Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:50.844046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:51.065697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:51.072936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:45:51.133917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:51.138882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:45:51.315366Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> THealthCheckTest::Basic >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> THealthCheckTest::SpecificServerless >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:49.683536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:49.683633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:49.683683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:49.683728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:49.683764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:49.683794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:49.683857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:49.683947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:49.684833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:49.685149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:49.780639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:49.780702Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:49.803141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:49.803586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:49.803769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:49.810945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:49.811309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:49.812029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:49.812315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:49.815217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:49.815419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:49.816557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:49.816618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:49.816720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:49.816767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:49.816823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:49.817021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:49.947146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.947961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.948751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T17:45:58.030187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-07T17:45:58.030508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.030639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.031011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.031111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.031418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.031572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.031631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.031712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.031972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.032086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.032470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.032718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.032812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.032872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.033024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.033090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.033150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:58.033481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:45:58.038807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:58.038979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:58.043162Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:1415:3358], Recipient [1:1415:3358]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-07T17:45:58.043227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-07T17:45:58.044581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:58.044659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:58.045783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1415:3358], Recipient [1:1415:3358]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:58.045830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:58.046170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:58.046230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:58.046284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:58.046340Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:58.048180Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:1451:3358], Recipient [1:1415:3358]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:45:58.048229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:45:58.048267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1415:3358] sender: [1:1472:2058] recipient: [1:15:2062] 2026-01-07T17:45:58.087908Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1471:3403], Recipient [1:1415:3358]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:45:58.087975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:58.088118Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:58.088431Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 295us result status StatusSuccess 2026-01-07T17:45:58.089321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82624 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:09.927123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:10.006130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:10.013956Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:10.014338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:10.014519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:10.385684Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:10.490543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:10.490691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:10.525915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:10.607627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:11.270306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:11.271097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:11.271131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:11.271151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:11.271467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:11.338304Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:11.873975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:14.967834Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:14.976729Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:14.980748Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:15.012715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:15.012868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:15.052627Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:15.054557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:15.239653Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:15.239765Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:15.241088Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.241644Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.242202Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.242910Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.243361Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.243516Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.243669Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.243880Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.244007Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:15.258815Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:15.437508Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:15.498719Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:15.498812Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:15.527460Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:15.528412Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:15.528579Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:15.528625Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:15.528671Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:15.528709Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:15.528761Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:15.528806Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:15.529446Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:15.540386Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:15.540467Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:15.563544Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:42:15.564154Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:42:15.617314Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:42:15.619028Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:15.634412Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:42:15.634469Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:42:15.634536Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:15.640277Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:15.643684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:15.649525Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:15.649676Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:15.660113Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:15.843426Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:15.855902Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:16.165533Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:16.309661Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:16.309766Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:42:17.058131Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... TICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:43:42.520419Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-07T17:43:42.520514Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2026-01-07T17:43:42.520609Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2026-01-07T17:43:42.520685Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:43:42.526850Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:43:42.527775Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:4369:3886] Owner: [2:4368:3885]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:43:42.527867Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:4369:3886] Owner: [2:4368:3885]. Column diff is empty, finishing 2026-01-07T17:43:42.551782Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4376:3891] 2026-01-07T17:43:42.552153Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4376:3891], schemeshard id = 72075186224037897 2026-01-07T17:43:42.552235Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4377:3892] 2026-01-07T17:43:42.552348Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4335:3866], server id = [2:4377:3892], tablet id = 72075186224037894, status = OK 2026-01-07T17:43:42.552468Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4377:3892], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-07T17:43:42.699754Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4382:3895] 2026-01-07T17:43:42.700538Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3166:3439] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-07T17:43:42.700605Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:40: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3166:3439] 2026-01-07T17:43:42.700686Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-07T17:43:42.700905Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:43:42.701709Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4385:3897], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:43:42.706101Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4385:3897], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-07T17:43:42.706336Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4385:3897], Start read next stream part 2026-01-07T17:43:42.760694Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4401:3912]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:43:42.761495Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:43:42.761569Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4401:3912], StatRequests.size() = 1 2026-01-07T17:44:56.047341Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrxavj016dmye4qjgedk0e", SessionId: ydb://session/3?node_id=2&id=MjU4MjE5Yy0yZDViZGRjNi1hMjM3NjM1LWZhOWMzNTMy, Slow query, duration: 73.336463s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:44:56.049178Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4385:3897], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:44:56.049459Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4385:3897], Start read next stream part 2026-01-07T17:44:56.049840Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32190, txId: 18446744073709551615] shutting down 2026-01-07T17:44:56.049981Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4507:4009], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:44:56.053691Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4385:3897], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:44:56.053766Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4385:3897], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODRjOWZmMmMtOGNmMzk5NTItOTFlNWZhZDYtZjM0MjQ3ZjI=, TxId: 2026-01-07T17:44:56.054444Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4507:4009], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:44:56.054560Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4507:4009], Start read next stream part 2026-01-07T17:44:56.151669Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4526:4024]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:44:56.152034Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:44:56.152084Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4526:4024], StatRequests.size() = 1 2026-01-07T17:45:55.458884Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrzjfp7ahcn13j0w1bp8ds", SessionId: ydb://session/3?node_id=2&id=Mjg0NTlmMjAtOGMyM2UzZDYtY2VlOThhMTktZDFjYTUxNWM=, Slow query, duration: 59.400423s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:45:55.461292Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4507:4009], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:45:55.461448Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4507:4009], Start read next stream part 2026-01-07T17:45:55.461864Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32250, txId: 18446744073709551615] shutting down 2026-01-07T17:45:55.462459Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4507:4009], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:45:55.462513Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4383:3870], ActorId: [2:4507:4009], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzFiNWQ3MTctZDY2YTcwZjgtNjNmMWZmZTItYjQxMmJkNTc=, TxId: 2026-01-07T17:45:55.463441Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4620:4111], ActorId: [2:4622:4113], Starting query actor #1 [2:4623:4114] 2026-01-07T17:45:55.463509Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4622:4113], ActorId: [2:4623:4114], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:45:55.467669Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4622:4113], ActorId: [2:4623:4114], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=M2FlZDBkZDktYTg1MmJhMDktZGEzMDQxZjMtNWRmMmQ0YWM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:45:55.547234Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4635:4125]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:45:55.547506Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:45:55.547549Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4635:4125], StatRequests.size() = 1 2026-01-07T17:45:55.734741Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4622:4113], ActorId: [2:4623:4114], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2FlZDBkZDktYTg1MmJhMDktZGEzMDQxZjMtNWRmMmQ0YWM=, TxId: 2026-01-07T17:45:55.734843Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4622:4113], ActorId: [2:4623:4114], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2FlZDBkZDktYTg1MmJhMDktZGEzMDQxZjMtNWRmMmQ0YWM=, TxId: 2026-01-07T17:45:55.735207Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4620:4111], ActorId: [2:4622:4113], Got response [2:4623:4114] SUCCESS 2026-01-07T17:45:55.735559Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:45:55.766253Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:45:55.766350Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3166:3439] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:45:48.722863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:45:48.722934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:48.722969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:45:48.723001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:45:48.723039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:45:48.723060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:45:48.723099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:45:48.723159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:45:48.725291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:45:48.725601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:45:48.811075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:48.811172Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:48.828124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:45:48.828554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:45:48.828750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:45:48.835938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:45:48.836350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:45:48.837090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:45:48.837400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:45:48.840378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:48.840634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:48.841937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:48.841996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:48.842107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:48.842173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:48.842227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:48.842408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:45:48.999037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.000977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:45:49.001088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2026-01-07T17:45:59.166621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.166750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.167171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.167265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.167588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.167777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.167888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.167982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.168230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.168334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.168825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.169134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.169280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.169341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.169511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.169574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.169644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:45:59.170020Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-07T17:45:59.177039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:59.177218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:45:59.181483Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:1539:3470], Recipient [1:1539:3470]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-07T17:45:59.181552Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-07T17:45:59.183906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:45:59.184000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:45:59.184871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1539:3470], Recipient [1:1539:3470]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:59.184953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:59.185399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:45:59.185476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:45:59.185537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:45:59.185583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:45:59.195188Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:1575:3470], Recipient [1:1539:3470]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:45:59.195258Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:45:59.195301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1539:3470] sender: [1:1596:2058] recipient: [1:15:2062] 2026-01-07T17:45:59.248344Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1595:3515], Recipient [1:1539:3470]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-07T17:45:59.248424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-07T17:45:59.248550Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:45:59.248901Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 341us result status StatusSuccess 2026-01-07T17:45:59.249760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 25075 Memory: 141504 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> Other::UnknownPathNotFound [GOOD] >> THealthCheckTest::DatabaseDoesNotExist [GOOD] >> THealthCheckTest::BridgeGroupNoIssues >> ActorHandler::NoValidGroupForbidden [GOOD] >> ActorHandler::NoUseAuthOk >> ActorPage::NoValidGroupForbidden [GOOD] >> ActorPage::OptionsNoContent >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> MonPage::HttpOk [GOOD] >> MonPage::OptionsNoContent >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> ActorHandler::HttpOk [GOOD] >> ActorHandler::InvalidTokenForbidden >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> ActorHandler::OptionsNoContent [GOOD] >> ActorPage::HttpOk >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::UnknownPathNotFound [GOOD] Test command err: 2026-01-07T17:45:54.390779Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677351371872231:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:54.390858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:54.661299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:54.667090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:54.667241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:54.674620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:54.754014Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:54.755620Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677351371872203:2081] 1767807954387011 != 1767807954387014 2026-01-07T17:45:54.825321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:54.843631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:54.843665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:54.843674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:54.843778Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:55.063423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:55.069434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:55.118000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:55.134619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> THealthCheckTest::Issues100Groups100VCardListing >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] >> KqpQueryPerf::MultiRead-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:35.110644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:35.182444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:35.194431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:35.194787Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:35.194827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:35.387263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:35.387365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:35.431913Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807813035727 != 1767807813035731 2026-01-07T17:43:35.440494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:35.481378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:35.563285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:35.825215Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:35.825304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:35.825344Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:43:35.825442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-07T17:43:35.825466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:43:35.918262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-07T17:43:35.918525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:35.918776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:43:35.918834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:43:35.919042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:43:35.919136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:35.919233Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:35.919904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:43:35.920083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:43:35.920162Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:35.920198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:35.920411Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:35.920470Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:35.920552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:35.920608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:43:35.920650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:43:35.920688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:43:35.920790Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:35.921300Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:35.921344Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:35.921478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:35.921515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:35.921562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:35.921621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:43:35.921672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:43:35.921743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:35.922067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:35.922105Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:35.922230Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:35.922262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:35.922308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:35.922341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:35.922396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:43:35.922436Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:35.922472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:43:35.926342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:43:35.926889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:35.926940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:43:35.927085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:43:35.927345Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-07T17:43:35.927799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:35.927856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:35.927893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:43:35.928005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-07T17:43:35.928036Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:43:35.928100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 7205759404664 ... ck for operation: 281474976715672 2026-01-07T17:45:53.568053Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976715672 2026-01-07T17:45:53.667227Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:53.667325Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:53.667438Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:53.667480Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:53.859689Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:53.859788Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:53.859922Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:53.859979Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:53.968574Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037904, for tableId 49, but no stats yet 2026-01-07T17:45:53.968873Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037909, for tableId 46, but no stats yet 2026-01-07T17:45:53.969082Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037910, for tableId 43, but no stats yet 2026-01-07T17:45:53.969291Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037911, for tableId 40, but no stats yet 2026-01-07T17:45:54.050886Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.050961Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.051048Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.051077Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.125712Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037916, for tableId 54, but no stats yet 2026-01-07T17:45:54.125990Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037917, for tableId 51, but no stats yet 2026-01-07T17:45:54.126187Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037918, for tableId 53, but no stats yet 2026-01-07T17:45:54.126366Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037919, for tableId 56, but no stats yet 2026-01-07T17:45:54.144467Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2026-01-07T17:45:54.144576Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:45:54.144654Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:54.240240Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.240321Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.240414Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.240445Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.266212Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037888 state 5 2026-01-07T17:45:54.266514Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037889 state 5 2026-01-07T17:45:54.357404Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037890 state 5 2026-01-07T17:45:54.357563Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037891 state 5 2026-01-07T17:45:54.461195Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.461290Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.461420Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.461460Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.637006Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.637102Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.637230Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.637271Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.661946Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:54.672867Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:54.673066Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:54.673155Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037902 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:54.811795Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.811883Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:54.812010Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.812045Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:54.925375Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037904 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:54.936195Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037909 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:54.936334Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037910 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:54.936409Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037911 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:55.010839Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:55.010911Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:55.010995Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:55.011028Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:55.174113Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:55.174184Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:45:55.174281Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:413:2409], Recipient [10:413:2409]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:45:55.174313Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 300 } } { items { uint32_value: 1 } items { uint32_value: 1000 } }, { items { uint32_value: 2 } items { uint32_value: 2000 } }, { items { uint32_value: 3 } items { uint32_value: 3000 } } { items { uint32_value: 3 } } { items { uint32_value: 3 } } { items { uint64_value: 3 } } { items { uint64_value: 3 } } { items { uint32_value: 300 } items { uint32_value: 3 } } { items { uint32_value: 3000 } items { uint32_value: 3 } } >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [FAIL] |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink |92.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> THealthCheckTest::SpecificServerless [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> THealthCheckTest::SpecificServerlessWithExclusiveNodes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [FAIL] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [FAIL] |92.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::Upsert+QueryService-UseSink >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25838, MsgBus: 25282 2026-01-07T17:45:55.626424Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677354413484999:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:55.626495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:55.665708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:45:55.932863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:55.933012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:55.958705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:55.991915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:56.049441Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:56.050471Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677354413484974:2081] 1767807955624902 != 1767807955624905 2026-01-07T17:45:56.145560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:56.145584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:56.145703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:56.145789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:56.285033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:56.651649Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:56.731330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:56.741748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:45:56.805572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:56.969958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:57.135563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:57.211222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:58.957751Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677367298388740:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:58.957850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:58.958170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677367298388750:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:58.958234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.282696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.324265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.364286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.398997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.433291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.474712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.518054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.573610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.658194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677371593356918:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.658300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.658522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677371593356923:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.658573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677371593356924:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.658695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.662655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:45:59.672377Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677371593356927:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:45:59.737792Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677371593356978:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:00.626620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677354413484999:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:00.626689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> THealthCheckTest::StaticGroupIssue [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> THealthCheckTest::StorageLimit95 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> THealthCheckTest::Issues100GroupsListing [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> THealthCheckTest::OneIssueListing [GOOD] >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> THealthCheckTest::Issues100VCardListing |92.4%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test |92.4%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test |92.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> ActorHandler::NoUseAuthOk [GOOD] >> ActorPage::OptionsNoContent [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> ActorPage::HttpOk [GOOD] >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> ActorHandler::InvalidTokenForbidden [GOOD] >> MonPage::OptionsNoContent [GOOD] >> THealthCheckTest::TestTabletIsDead >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> KqpQueryPerf::IdxLookupJoin+QueryService >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] >> THealthCheckTest::BridgeGroupNoIssues [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> THealthCheckTest::BridgeTwoGroups >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> THealthCheckTest::Issues100Groups100VCardMerging >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |92.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |92.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::HttpOk [GOOD] Test command err: 2026-01-07T17:45:55.605920Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677353946425371:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:55.605968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:55.941693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:56.052543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:56.052674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:56.055560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:56.118540Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677353946425331:2081] 1767807955590830 != 1767807955590833 2026-01-07T17:45:56.120972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:56.147490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:56.249950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:56.249979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:56.249985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:56.250086Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:56.620109Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:56.628124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:56.708372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:56.719319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:45:56.724807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T17:46:01.977905Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677381512418857:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:01.977996Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:02.033815Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:02.187868Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:02.193132Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677381512418810:2081] 1767807961973924 != 1767807961973927 2026-01-07T17:46:02.206962Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:02.207066Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:02.240333Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:02.314575Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:02.408535Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:02.408561Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:02.408568Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:02.408662Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:02.743666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:02.751195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:46:02.848856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:46:02.856913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |92.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> MonPage::OptionsNoContent [GOOD] Test command err: 2026-01-07T17:45:55.555339Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677355668099946:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:55.555457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:55.822443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:55.828108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:55.828268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:55.890050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:55.904691Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:55.917155Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:45:55.917816Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:45:55.979628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:55.979642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:55.979648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:55.979721Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:56.123020Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:56.245125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:56.257586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:56.346467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:56.353213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:46:01.490320Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677382186980956:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:01.490715Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:01.514005Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:01.689390Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677382186980744:2081] 1767807961473669 != 1767807961473672 2026-01-07T17:46:01.689485Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:01.694041Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:01.702191Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:01.702327Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:01.705958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:01.857039Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:01.873087Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:01.873140Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:01.873152Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:01.873256Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:02.235622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:02.246069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:02.351048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:46:02.360233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:46:02.384341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:46:02.489088Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:35.505115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:35.576789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:35.591672Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:35.592063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:35.592100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:35.798631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:35.798768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:35.872619Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807813277694 != 1767807813277698 2026-01-07T17:43:35.884424Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:35.931269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:36.018755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:36.314084Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.314178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.314231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:43:36.314379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-07T17:43:36.314420Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:43:36.437384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-07T17:43:36.437660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.437914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:43:36.437983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:43:36.438204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:43:36.438283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:36.438382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.439067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:43:36.439257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:43:36.439320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.439354Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.439561Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.439606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.439693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.439746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:43:36.439786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:43:36.439820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:43:36.439931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.440493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.440536Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.440658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.440694Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.440748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.440791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:43:36.440826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:43:36.440899Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.441313Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.441365Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.441486Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.441520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.441568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.441607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.441659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:43:36.441696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.441733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:43:36.445412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:43:36.445945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.446001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:43:36.446139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:43:36.446425Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-07T17:43:36.446856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.446913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.446953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:43:36.447077Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-07T17:43:36.447113Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:43:36.447183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 7205759404664 ... 0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:03.225282Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:03.225413Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:03.225452Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:03.246793Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressIncrementalRestore 2026-01-07T17:46:03.246873Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5416: StateWork, processing event TEvPrivate::TEvProgressIncrementalRestore 2026-01-07T17:46:03.246913Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:444: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 281474976715670 tablet: 72057594046644480 2026-01-07T17:46:03.247003Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 281474976715670 tablet: 72057594046644480 2026-01-07T17:46:03.247046Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:55: [IncrementalRestore] Incremental restore already in state 3, skipping progress check for operation: 281474976715670 2026-01-07T17:46:03.247112Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976715670 2026-01-07T17:46:03.361716Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037896, for tableId 18, but no stats yet 2026-01-07T17:46:03.362088Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037897, for tableId 20, but no stats yet 2026-01-07T17:46:03.362303Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037895, for tableId 18, but no stats yet 2026-01-07T17:46:03.455629Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:03.455692Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:03.455764Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:03.455790Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:03.626563Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:03.626654Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:03.626804Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:03.626842Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:03.659674Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037898, for tableId 31, but no stats yet 2026-01-07T17:46:03.659935Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037903, for tableId 28, but no stats yet 2026-01-07T17:46:03.831795Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:03.831879Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:03.831967Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:03.832000Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:03.843508Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037904, for tableId 35, but no stats yet 2026-01-07T17:46:03.843749Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037905, for tableId 33, but no stats yet 2026-01-07T17:46:03.843930Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037906, for tableId 33, but no stats yet 2026-01-07T17:46:03.932902Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2026-01-07T17:46:03.932994Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:46:03.933083Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:04.079156Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.079236Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.079328Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.079367Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.104214Z node 11 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037888 state 5 2026-01-07T17:46:04.104432Z node 11 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037889 state 5 2026-01-07T17:46:04.284772Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.284841Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.284912Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.284960Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.410140Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:46:04.410347Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:46:04.410429Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:46:04.493130Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.493218Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.493314Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.493348Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.642338Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.642421Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.642511Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.642537Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.702282Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:46:04.713057Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:46:04.879882Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.879975Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:46:04.880084Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:401:2400], Recipient [11:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:46:04.880119Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 2 } items { text_value: "Bob" } items { uint32_value: 31 } }, { items { uint32_value: 12 } items { text_value: "David" } items { uint32_value: 41 } }, { items { uint32_value: 13 } items { text_value: "Frank" } items { uint32_value: 45 } } { items { uint32_value: 28 } items { uint32_value: 3 } }, { items { uint32_value: 31 } items { uint32_value: 2 } }, { items { uint32_value: 41 } items { uint32_value: 12 } }, { items { uint32_value: 45 } items { uint32_value: 13 } } { items { uint64_value: 4 } } |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31062, MsgBus: 65485 2026-01-07T17:45:56.171487Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677358569765372:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:56.172038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:56.441130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:56.444711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:56.444826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:56.480201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:56.556075Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677358569765344:2081] 1767807956168137 != 1767807956168140 2026-01-07T17:45:56.563109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:56.651438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:56.691782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:56.691807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:56.691814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:56.691893Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:57.160188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:57.166736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:45:57.184487Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:57.212884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:57.371684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:57.553615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:57.635312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.646840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677371454669111:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.646960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.650294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677371454669121:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.650390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.949232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.980856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.011982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.048037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.085209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.121107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.171911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.225764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.300821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677375749637289:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.301067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677375749637294:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.301114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.301470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677375749637297:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.301519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.305625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:00.318682Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677375749637296:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:46:00.394828Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677375749637349:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:01.173619Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677358569765372:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:01.178278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19259, MsgBus: 17689 2026-01-07T17:46:03.683916Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677387138667527:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:03.683959Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:03.765337Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:03.847520Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:03.847599Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:03.859156Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:03.862217Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:03.920318Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:03.920340Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:03.920348Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:03.920437Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:04.034594Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:04.401521Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:04.426442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.495661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.650758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.694626Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:04.725055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.977938Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677400023571234:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.978022Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.978306Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677400023571244:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.978346Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.043186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.119545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.150557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.184827Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.214578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.281668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.325763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.394581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.500381Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677404318539411:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.500468Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.500644Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677404318539416:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.500831Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677404318539417:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.500872Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.504692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:07.529040Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677404318539420:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:07.584190Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677404318539471:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:08.683994Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677387138667527:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:08.684069Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::NoUseAuthOk [GOOD] Test command err: 2026-01-07T17:45:54.921164Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677350106577538:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:54.921286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:55.111097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:55.176788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:55.176974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:55.221920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:55.279209Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:55.310135Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677350106577511:2081] 1767807954919793 != 1767807954919796 2026-01-07T17:45:55.322001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:55.409300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:55.409325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:55.409332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:55.409433Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:55.663318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:55.677320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:55.738962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:55.752145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:46:01.123853Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677380286749312:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:01.123939Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:01.156777Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:01.288907Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677380286749285:2081] 1767807961122399 != 1767807961122402 2026-01-07T17:46:01.300512Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:01.306414Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:01.306501Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:01.321431Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:01.377953Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:01.407226Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:01.407258Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:01.407265Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:01.407376Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:01.680229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:01.686823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:01.729638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:46:01.745676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:46:01.751398Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::OptionsNoContent [GOOD] Test command err: 2026-01-07T17:45:54.958227Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677349882795002:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:54.958382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:55.243866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:55.260639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:55.260836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:55.325391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:55.331788Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:55.475546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:55.475571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:55.475580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:55.475790Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:55.495895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:55.725916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:55.734765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:45:55.796760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:55.814778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:46:01.134581Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677381305501439:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:01.134649Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:01.208987Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:01.306508Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:01.358223Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:01.358327Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:01.367756Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:01.428433Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:01.501638Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:01.501666Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:01.501674Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:01.501764Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:01.799376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:01.807627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:01.850968Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:46:01.856168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::InvalidTokenForbidden [GOOD] Test command err: 2026-01-07T17:45:55.512877Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677356246396751:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:55.513684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:55.865067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:55.883607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:55.883767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:55.947269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:55.965185Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677356246396647:2081] 1767807955507784 != 1767807955507787 2026-01-07T17:45:55.966195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:56.169072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:56.268453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:56.268478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:56.268493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:56.268586Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:56.526772Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:56.555126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:56.561936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:45:56.621726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:45:56.627577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:45:56.647861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T17:46:01.732728Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677380037526309:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:01.732813Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:01.749278Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:01.890698Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:01.890780Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:01.897028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:01.919160Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:01.933202Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:02.062514Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:02.062540Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:02.062555Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:02.062673Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:02.179542Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:02.622361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:02.630050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:02.719522Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:46:02.725244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:46:02.741057Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:02.747937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 17182, MsgBus: 28144 2026-01-07T17:45:56.137223Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677356745434136:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:56.137798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:56.461290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:56.461403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:56.526893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:56.532654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:56.577894Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:56.789516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:56.827026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:56.827052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:56.827061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:56.827157Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:57.155260Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:45:57.370972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:45:57.376561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:45:57.440365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:57.593733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:57.793325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:57.874168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:45:59.734348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677369630337850:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.734518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.734927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677369630337860:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:45:59.734991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.076003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.108675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.143601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.182467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.221606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.277524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.313973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.360855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:00.432466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677373925306034:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.432530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.432838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677373925306039:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.432862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677373925306040:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.432903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:00.436468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:00.445892Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677373925306043:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:00.535506Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677373925306094:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:01.137826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677356745434136:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:01.137884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3280, MsgBus: 30853 2026-01-07T17:46:03.388445Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677389681190075:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:03.392118Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:03.408938Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:03.485345Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:03.517062Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:03.517142Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:03.522169Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:03.601022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:03.637583Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:03.637611Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:03.637620Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:03.637699Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:04.226370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:04.240205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.324611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.384299Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:04.493431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.560441Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.034395Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677406861060998:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.034487Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.035066Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677406861061008:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.035106Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.104365Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.140579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.177080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.228298Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.278183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.338339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.435356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.506853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.592577Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677406861061886:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.592666Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.592881Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677406861061891:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.592945Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677406861061892:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.592976Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.598174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:07.614925Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677406861061895:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:07.705119Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677406861061946:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:08.383342Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677389681190075:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:08.383411Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 |92.4%| [TA] $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {RESULT} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21278, MsgBus: 19926 2026-01-07T17:46:05.653200Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677396084928605:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:05.653279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:05.979812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:05.979923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:06.051414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:06.091760Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:06.096792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:06.212658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:06.212690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:06.212702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:06.212804Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:06.266980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:06.661817Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:06.709077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:06.789395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.949074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.124760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.199518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.121813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677413264799594:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.121929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.122280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677413264799604:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.122318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.397860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.426131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.456713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.484870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.511131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.553478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.583775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.628103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.701951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677413264800476:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.702036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.702341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677413264800482:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.702356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677413264800481:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.702392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.705626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:09.715528Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677413264800485:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:46:09.793866Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677413264800536:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:10.653393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677396084928605:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:10.653468Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1835, MsgBus: 25825 2026-01-07T17:46:05.005802Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677393687818611:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:05.005885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:05.046663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:05.430783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:05.484519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:05.484642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:05.487519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:05.576999Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:05.685660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:05.685688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:05.685698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:05.685775Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:05.690745Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:06.000662Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:06.148348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:06.154657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:46:06.202920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.366748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.548271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.637613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:08.408517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677410867689440:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:08.408652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:08.409154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677410867689450:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:08.409205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:08.724716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:08.756565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:08.789568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:08.823887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:08.864858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:08.898918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:08.932475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.003033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.079044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677415162657614:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.079095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.079129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677415162657619:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.079242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677415162657621:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.079274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:09.084249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:09.094899Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677415162657623:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:46:09.170344Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677415162657674:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:10.005139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677393687818611:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:10.005224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService-UseSink >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 >> KqpQueryPerf::Update-QueryService+UseSink >> KqpQueryPerf::Upsert+QueryService+UseSink >> KqpQueryPerf::Replace+QueryService-UseSink >> KqpQueryPerf::Upsert-QueryService+UseSink >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::Replace-QueryService-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageLimit87 >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::BridgeGroupDegradedInBothPiles >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsLimit999 >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23496, MsgBus: 29339 2026-01-07T17:46:08.037137Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677409603509881:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:08.037222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:08.081755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:08.277282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:08.277441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:08.279525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:08.347091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:08.367552Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:08.427381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:08.427406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:08.427432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:08.427560Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:08.614700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:08.833710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:08.886612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.018364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.109156Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:09.154236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:09.217160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.139189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677422488413592:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.139319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.139787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677422488413602:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.139846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.447553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.474067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.502116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.530233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.554491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.588355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.620334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.662369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.719666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677422488414470:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.719751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.719989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677422488414475:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.720029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677422488414476:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.720038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.723628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:11.733052Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677422488414479:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:11.835940Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677422488414530:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:13.037446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677409603509881:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:13.037527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService >> THealthCheckTest::TestNoSchemeShardResponse [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2026-01-07T17:45:46.174807Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:46.205716Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:46.206551Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2152] 2026-01-07T17:45:46.206958Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:45:46.246128Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:46.694868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:46.694931Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:46.717729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:45:46.718074Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:45:46.719975Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:45:46.720061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:45:46.720125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:45:46.720698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:45:46.721228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:45:46.721346Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:195:2152] in generation 2 2026-01-07T17:45:46.921231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:45:46.951969Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:45:46.952258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:45:46.952394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-07T17:45:46.952453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:45:46.952502Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:45:46.952561Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.952943Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:128:2152], Recipient [1:128:2152]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.953013Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.953259Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:46.953396Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:46.953538Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.953597Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.953647Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-07T17:45:46.953710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.953765Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:46.953827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:46.953867Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:46.953918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:46.953961Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:46.954013Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:46.962121Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:128:2152]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:45:46.962209Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:46.962315Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:46.962497Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:45:46.962582Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:45:46.962645Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:45:46.962698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:46.962739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:45:46.962779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:45:46.962813Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.963163Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:46.963205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:45:46.963239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:46.963274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.963336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:45:46.963366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:46.963428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:45:46.963467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.963488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:45:46.977609Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:46.977679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.977714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.977763Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:45:46.977848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:45:46.978274Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.978315Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.978349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-07T17:45:46.978462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:128:2152]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:45:46.978484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:45:46.978604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.978651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:45:46.978698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:45:46.978725Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:45:46.981553Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:45:46.981615Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.981807Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:128:2152], Recipient [1:128:2152]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.981838Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.981885Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.981922Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:45:46.981951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:45:46.982012Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:45:46.982043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... sion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:46:16.014188Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2026-01-07T17:46:16.014247Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:46:16.014305Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:46:16.014330Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:46:16.014356Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1002] at 9437184 to execution unit FinishPropose 2026-01-07T17:46:16.014383Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2026-01-07T17:46:16.014417Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is DelayComplete 2026-01-07T17:46:16.014445Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2026-01-07T17:46:16.014470Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1002] at 9437184 to execution unit CompletedOperations 2026-01-07T17:46:16.014496Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2026-01-07T17:46:16.014538Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is Executed 2026-01-07T17:46:16.014562Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2026-01-07T17:46:16.014586Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1002] at 9437184 has finished 2026-01-07T17:46:16.031540Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:46:16.031615Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2026-01-07T17:46:16.031655Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-07T17:46:16.031742Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2026-01-07T17:46:16.037269Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-07T17:46:16.037335Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-07T17:46:16.038283Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:4553:6470], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:46:16.038334Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:46:16.038372Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:4552:6469], serverId# [3:4553:6470], sessionId# [0:0:0] 2026-01-07T17:46:16.039293Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2026-01-07T17:46:16.039337Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:46:16.039426Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:46:16.040032Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2026-01-07T17:46:16.040099Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-07T17:46:16.040152Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2026-01-07T17:46:16.040198Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:46:16.040225Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:46:16.040262Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:46:16.040318Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1003] at 9437184 2026-01-07T17:46:16.040351Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-07T17:46:16.040373Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:46:16.040394Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:46:16.040419Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit BlockFailPoint 2026-01-07T17:46:16.040443Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-07T17:46:16.040464Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:46:16.040483Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:46:16.040502Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2026-01-07T17:46:16.040541Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:46:16.040887Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2026-01-07T17:46:16.040963Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:46:16.041055Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:46:16.041086Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:46:16.041112Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit FinishPropose 2026-01-07T17:46:16.041140Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2026-01-07T17:46:16.041177Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is DelayComplete 2026-01-07T17:46:16.041203Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2026-01-07T17:46:16.041226Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit CompletedOperations 2026-01-07T17:46:16.041251Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2026-01-07T17:46:16.041293Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-07T17:46:16.041315Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2026-01-07T17:46:16.041338Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1003] at 9437184 has finished 2026-01-07T17:46:16.087426Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:46:16.087489Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2026-01-07T17:46:16.089782Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:46:16.089849Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2026-01-07T17:46:16.089887Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2026-01-07T17:46:16.089967Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:46:16.093968Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:238:2231], Recipient [3:240:2232]: NKikimr::TEvTablet::TEvFollowerGcApplied .2026-01-07T17:46:16.097655Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:4567:6483], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:46:16.097740Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:46:16.097795Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:4566:6482], serverId# [3:4567:6483], sessionId# [0:0:0] 2026-01-07T17:46:16.098413Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553160, Sender [3:4565:6481], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1719 LastUpdateTime: 1719 KeyAccessSample { } } |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11028, MsgBus: 4562 2026-01-07T17:46:03.264021Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677387859909305:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:03.264277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:03.541043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:03.568036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:03.568157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:03.607575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:03.655628Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677387859909206:2081] 1767807963256109 != 1767807963256112 2026-01-07T17:46:03.658885Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:03.729618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:03.729641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:03.729647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:03.729718Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:03.846805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:04.236419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:04.245237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:04.280251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:04.314597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.498155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.680727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:04.749500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.784880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677400744812960:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.785066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.785479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677400744812970:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.785530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.107664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.146831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.186071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.225124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.261713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.322441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.377628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.439059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:07.536463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677405039781142:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.536563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.536980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677405039781147:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.537040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677405039781148:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.537125Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:07.541229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:07.553693Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677405039781151:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:07.622647Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677405039781202:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:08.263248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677387859909305:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:08.263347Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18727, MsgBus: 17474 2026-01-07T17:46:10.314176Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677417619351216:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:10.314244Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:10.331386Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:10.401049Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:10.401128Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:10.417228Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677417619351191:2081] 1767807970313737 != 1767807970313740 2026-01-07T17:46:10.428895Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:10.438690Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:10.530842Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:10.530869Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:10.530876Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:10.530947Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:10.609424Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:10.924414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:10.934652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:10.983889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.129899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.211931Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.330862Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:13.564312Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677430504254948:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:13.564397Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:13.564697Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677430504254957:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:13.564763Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:13.637548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.668326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.709333Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.743681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.783213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.828455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.869330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.925149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.024256Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677434799223123:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.024342Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.024362Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677434799223128:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.024640Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677434799223130:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.024695Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.027830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:14.037041Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677434799223131:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:14.098509Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677434799223183:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:15.317052Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677417619351216:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:15.317128Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> AnalyzeColumnshard::AnalyzeShard >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> THealthCheckTest::BridgeTwoGroups [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |92.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2026-01-07T17:43:51.508717Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1767807831508683 2026-01-07T17:43:51.955078Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592676820546662751:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:51.955139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:43:52.009457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:52.010377Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:43:52.040952Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:43:52.261151Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:52.261326Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:52.304407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:43:52.353849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:52.353951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:52.357302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:52.357368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:52.404730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:52.411138Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:43:52.411348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:52.470043Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:52.478087Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:52.624882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:43:52.651702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038b7/r3tmp/yandexwF2Kc1.tmp 2026-01-07T17:43:52.651730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0038b7/r3tmp/yandexwF2Kc1.tmp 2026-01-07T17:43:52.651870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0038b7/r3tmp/yandexwF2Kc1.tmp 2026-01-07T17:43:52.651951Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:52.732562Z INFO: TTestServer started on Port 65134 GrpcPort 24327 PQClient connected to localhost:24327 2026-01-07T17:43:53.005141Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:53.027677Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:43:53.037854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:56.382557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676842021500434:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:56.382761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:56.384767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592676842021500453:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:56.388850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:56.444692Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592676842021500455:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2026-01-07T17:43:56.964423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592676820546662751:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:43:56.977330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:43:57.370717Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592676842021500542:2973] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:57.826746Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592676846316467863:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:43:57.830946Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592676841491567699:2308], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:43:57.832420Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YTc1MzY2NzMtNGRhN2M0YjMtOGMyMmFmY2ItMjQ3Nzk5MDU=, ActorId: [2:7592676841491567644:2295], ActorState: ExecuteState, LegacyTraceId: 01kecrxr8f0hjgxjvkj88whmdf, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:43:57.847481Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzUwZjg5MzYtOTUwN2QyMGYtZjExYzI5ZTUtZmJkNTVlMzE=, ActorId: [1:7592676842021500422:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrxr34bt08fwazc7sdk1nd, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:43:57.871148Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:43:57.871645Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:43:58.198282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:58.771537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... 16.581275Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:46:16.581294Z node 16 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:46:16.581310Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:16.581361Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-07T17:46:16.581416Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:46:16.582477Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-07T17:46:16.582507Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-07T17:46:16.582610Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:46:16.583028Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|88c3157f-ac0f748a-4daf3862-13fe9602_0 2026-01-07T17:46:16.585080Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767807976585 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:46:16.585210Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|88c3157f-ac0f748a-4daf3862-13fe9602_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-07T17:46:16.589021Z :INFO: [] MessageGroupId [src] SessionId [src|88c3157f-ac0f748a-4daf3862-13fe9602_0] Write session: close. Timeout = 0 ms 2026-01-07T17:46:16.589091Z :INFO: [] MessageGroupId [src] SessionId [src|88c3157f-ac0f748a-4daf3862-13fe9602_0] Write session will now close 2026-01-07T17:46:16.589145Z :DEBUG: [] MessageGroupId [src] SessionId [src|88c3157f-ac0f748a-4daf3862-13fe9602_0] Write session: aborting 2026-01-07T17:46:16.589730Z :INFO: [] MessageGroupId [src] SessionId [src|88c3157f-ac0f748a-4daf3862-13fe9602_0] Write session: gracefully shut down, all writes complete 2026-01-07T17:46:16.589778Z :DEBUG: [] MessageGroupId [src] SessionId [src|88c3157f-ac0f748a-4daf3862-13fe9602_0] Write session: destroy 2026-01-07T17:46:16.594481Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|88c3157f-ac0f748a-4daf3862-13fe9602_0 grpc read done: success: 0 data: 2026-01-07T17:46:16.594515Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|88c3157f-ac0f748a-4daf3862-13fe9602_0 grpc read failed 2026-01-07T17:46:16.594550Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|88c3157f-ac0f748a-4daf3862-13fe9602_0 grpc closed 2026-01-07T17:46:16.594570Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|88c3157f-ac0f748a-4daf3862-13fe9602_0 is DEAD 2026-01-07T17:46:16.595455Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:46:16.595945Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [15:7592677445965790971:2456] destroyed 2026-01-07T17:46:16.596000Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:46:16.596032Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:46:16.596049Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:16.596063Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:46:16.596081Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:16.596095Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:16.638450Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:46:16.638491Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:16.638513Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:46:16.638538Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:16.638557Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:16.643381Z :INFO: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Starting read session 2026-01-07T17:46:16.643439Z :DEBUG: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Starting cluster discovery 2026-01-07T17:46:16.643672Z :INFO: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26570: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26570
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26570. " 2026-01-07T17:46:16.643732Z :DEBUG: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Restart cluster discovery in 0.005778s 2026-01-07T17:46:16.651875Z :DEBUG: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Starting cluster discovery 2026-01-07T17:46:16.652259Z :INFO: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26570: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26570
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26570. " 2026-01-07T17:46:16.652306Z :DEBUG: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Restart cluster discovery in 0.016786s 2026-01-07T17:46:16.673091Z :DEBUG: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Starting cluster discovery 2026-01-07T17:46:16.673312Z :INFO: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26570: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26570
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26570. " 2026-01-07T17:46:16.673349Z :DEBUG: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Restart cluster discovery in 0.021799s 2026-01-07T17:46:16.697112Z :DEBUG: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Starting cluster discovery 2026-01-07T17:46:16.697497Z :NOTICE: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26570: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26570
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26570. " } 2026-01-07T17:46:16.697720Z :NOTICE: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26570: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26570
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26570. " } 2026-01-07T17:46:16.697842Z :INFO: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Closing read session. Close timeout: 0.000000s 2026-01-07T17:46:16.697941Z :NOTICE: [/Root] [/Root] [de342df0-e4dbb178-488712db-9872a15e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:46:16.738554Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:46:16.738591Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:16.738611Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:46:16.738634Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:16.738655Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:16.838849Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:46:16.838892Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:16.838922Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:46:16.838944Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:16.838964Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:17.046243Z node 15 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [15:7592677450260758304:2468] TxId: 281474976710674. Ctx: { TraceId: 01kecs21407eyqej6h4qtah2nn, Database: /Root, SessionId: ydb://session/3?node_id=15&id=YjJjODYzMWUtNGY2ZWYwNDEtMWQwOWI1NjUtOTdkNTU5ZGU=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 16 trace_id# 2026-01-07T17:46:17.046411Z node 15 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [15:7592677450260758308:2468], TxId: 281474976710674, task: 3. Ctx: { CheckpointId : . TraceId : 01kecs21407eyqej6h4qtah2nn. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=15&id=YjJjODYzMWUtNGY2ZWYwNDEtMWQwOWI1NjUtOTdkNTU5ZGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [15:7592677450260758304:2468], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |92.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::TestStoppedTabletIsNotDead >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15343, MsgBus: 3316 2026-01-07T17:46:02.479862Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677383443560952:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:02.509197Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:02.781034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:02.789194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:02.789318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:02.856809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:02.872377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:02.961567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:02.961607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:02.961619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:02.961715Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:03.058739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:03.402561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:03.413765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:03.459149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:03.478466Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:03.660533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:03.856120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:03.932809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:05.674575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677396328464629:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:05.674694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:05.675071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677396328464639:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:05.675121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:05.968752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.007216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.034406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.066456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.102963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.142577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.178432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.237689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:06.345355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677400623432801:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.345457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.345597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677400623432806:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.345657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677400623432808:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.345752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:06.352702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:06.371845Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677400623432810:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:06.454343Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677400623432861:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:07.477032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677383443560952:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:07.477090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:46:08.042106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:11.072747Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:11.113029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:11.113107Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:11.118212Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:11.162549Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:11.162573Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:11.162581Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:11.162654Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:11.297640Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:11.508353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:11.517147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.559821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.673809Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.749910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:12.010157Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:14.473080Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677437813306715:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.473182Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.477073Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677437813306725:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.477186Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.554448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.594987Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.640611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.684983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.731636Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.791152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.840575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.896478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.001132Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677437813307596:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:15.001236Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:15.001532Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677442108274897:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:15.001534Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677442108274898:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:15.001588Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:15.007008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:15.030933Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677442108274901:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:15.100317Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677442108274954:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:16.003680Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677424928402996:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:16.003757Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:46:16.838875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:16.906925Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:16.951610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18361, MsgBus: 4290 2026-01-07T17:46:14.367229Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677436097752329:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:14.367286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:14.415322Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:14.688914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:14.729203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:14.729319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:14.796851Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677436097752307:2081] 1767807974366445 != 1767807974366448 2026-01-07T17:46:14.798426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:14.804370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:14.875270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:14.973576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:14.973615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:14.973625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:14.973703Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:15.387987Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:15.504483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:15.588010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.792494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.974205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:16.053212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.754113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677448982656069:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.754256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.754640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677448982656079:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.754706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.030944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.062678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.093258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.126669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.156781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.198423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.234430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.281198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.375060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453277624249:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.375126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.375186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453277624254:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.375314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453277624256:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.375338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.379240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:18.390382Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677453277624258:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:18.495717Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677453277624311:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:19.369032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677436097752329:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:19.369102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2500, MsgBus: 20682 2026-01-07T17:46:14.410231Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677436407282777:2188];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:14.411926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:14.701087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:14.721666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:14.721780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:14.764041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:14.812520Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:14.817095Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677436407282618:2081] 1767807974390084 != 1767807974390087 2026-01-07T17:46:14.956404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:14.956427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:14.956433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:14.956492Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:14.985750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:15.381102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:15.411950Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:15.454332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.657342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.828897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.920576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.839104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677449292186379:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.839218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.839619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677449292186389:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.839674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.158308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.203655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.230579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.264702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.300052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.342913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.407998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.482808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.562842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453587154569:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.562935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.562937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453587154574:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.563096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453587154576:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.563132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.567552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:18.580975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677453587154577:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:18.660675Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677453587154629:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:19.409335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677436407282777:2188];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:19.409409Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4059, MsgBus: 19490 2026-01-07T17:46:14.518143Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677434562953972:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:14.545904Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:14.849160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:14.915101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:14.915223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:14.916958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:14.993102Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677434562953917:2081] 1767807974511364 != 1767807974511367 2026-01-07T17:46:14.997218Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:15.054318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:15.088317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:15.088359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:15.088373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:15.088475Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:15.548954Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:15.575164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:15.587941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:15.667868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.873568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:16.036484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:16.114351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.855583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677447447857693:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.855700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.857070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677447447857703:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.857133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.149153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.186673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.214342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.243010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.270939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.303293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.373620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.441863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.502590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677451742825875:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.502681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.502924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677451742825880:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.502947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677451742825881:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.502978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.506270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:18.514857Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677451742825884:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:18.577145Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677451742825937:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:19.525090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677434562953972:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:19.525166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessBadTablets >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22832, MsgBus: 25727 2026-01-07T17:46:14.337037Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677437776135059:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:14.341262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:14.379662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:14.661753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:14.661885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:14.671500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:14.745765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:14.807811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:14.820499Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677437776134955:2081] 1767807974328410 != 1767807974328413 2026-01-07T17:46:14.956300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:14.956808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:14.956822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:14.956828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:14.956944Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:15.345308Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:15.442855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:15.452619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:15.515955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.682663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.855208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.948704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.961088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677450661038732:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.961236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.962141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677450661038742:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.962208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.282915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.313908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.350240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.390895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.421961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.456636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.493039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.537271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.609732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677454956006908:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.609908Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.609928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677454956006913:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.610390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677454956006915:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.610460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.614320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:18.626687Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677454956006916:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:18.714816Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677454956006968:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:19.335368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677437776135059:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:19.335455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14096, MsgBus: 6880 2026-01-07T17:46:14.406560Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677435285254711:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:14.407068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:14.722294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:14.722420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:14.738619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:14.781146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:14.811816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:15.015808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:15.036879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:15.036901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:15.036942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:15.037019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:15.416890Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:15.525242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:15.561300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:46:15.647867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.817939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.971954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:16.053717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.964960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677448170158424:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.965493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.965887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677448170158434:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.965951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.270350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.299914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.329897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.363086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.398028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.439998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.484163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.530858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.610608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677452465126599:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.610688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.610878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677452465126604:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.610913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677452465126606:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.611153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.614209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:18.630485Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677452465126608:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:46:18.692807Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677452465126659:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:19.409005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677435285254711:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:19.409083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> TraverseColumnShard::TraverseServerlessColumnTable >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2026-01-07T17:44:43.057084Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1767807883057053 2026-01-07T17:44:43.311095Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677043502378598:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:43.311195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:43.359309Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:44:43.359800Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677043321919931:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:43.359972Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:43.366978Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:44:43.509364Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:43.563767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:43.605325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:43.605481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:43.607419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:43.607514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:43.617585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:43.618891Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:44:43.632662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:43.654184Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:43.752796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003849/r3tmp/yandexhQoQ0m.tmp 2026-01-07T17:44:43.752821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003849/r3tmp/yandexhQoQ0m.tmp 2026-01-07T17:44:43.757636Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:43.758278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003849/r3tmp/yandexhQoQ0m.tmp 2026-01-07T17:44:43.758468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:43.758599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:43.782421Z INFO: TTestServer started on Port 5733 GrpcPort 29166 PQClient connected to localhost:29166 2026-01-07T17:44:43.932545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:44.325320Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:44.364245Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:46.248332Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677056206822055:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:46.249036Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677056206822044:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:46.249103Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:46.258760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:46.274441Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677056206822058:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-07T17:44:46.386187Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677056206822097:2139] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:46.704584Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592677056387281891:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:46.705351Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592677056206822112:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:46.705817Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZjcyMDQ0NWItZjM3MjMyNDgtZjAyOTg5MDUtMTdiYmFlYjQ=, ActorId: [2:7592677056206822042:2300], ActorState: ExecuteState, LegacyTraceId: 01kecrz8x5fksc9bgkecb304ze, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:46.707510Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjNlNmY2MjMtZGI0YjQ0OTUtN2JlODQwYWUtNDcyODIxMGY=, ActorId: [1:7592677056387281842:2329], ActorState: ExecuteState, LegacyTraceId: 01kecrz9737awm7zbt6pgg527g, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:46.708321Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:44:46.708323Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:44:46.740159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:46.852077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:46.945391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057 ... ie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-07T17:46:19.785507Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid b0f59acb-af8c72c8-9f67c6b1-ade68a89 has messages 1 2026-01-07T17:46:19.785634Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 read done: guid# b0f59acb-af8c72c8-9f67c6b1-ade68a89, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 549 2026-01-07T17:46:19.785665Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 response to read: guid# b0f59acb-af8c72c8-9f67c6b1-ade68a89 2026-01-07T17:46:19.785914Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 Process answer. Aval parts: 0 2026-01-07T17:46:19.786584Z :DEBUG: [/Root] [/Root] [39dff04a-9a6730a5-de40abdb-14c7294b] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:46:19.786615Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:46:19.786640Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:19.786657Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:46:19.786676Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:19.786891Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2026-01-07T17:46:19.786691Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:19.787324Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 grpc read done: success# 1, data# { read { } } 2026-01-07T17:46:19.787442Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 got read request: guid# 992915f3-e59642f8-b1c1d22b-2a2032ca 2026-01-07T17:46:19.787892Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-07T17:46:19.787962Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-07T17:46:19.787998Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-07T17:46:19.788028Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2026-01-07T17:46:19.788076Z :DEBUG: [/Root] [/Root] [39dff04a-9a6730a5-de40abdb-14c7294b] [null] The application data is transferred to the client. Number of messages 4, size 14 bytes 2026-01-07T17:46:19.788304Z :INFO: [/Root] [/Root] [39dff04a-9a6730a5-de40abdb-14c7294b] Closing read session. Close timeout: 0.000000s 2026-01-07T17:46:19.788381Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:test-topic:0:1:3:0 2026-01-07T17:46:19.788434Z :INFO: [/Root] [/Root] [39dff04a-9a6730a5-de40abdb-14c7294b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 37 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:46:19.788540Z :NOTICE: [/Root] [/Root] [39dff04a-9a6730a5-de40abdb-14c7294b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:46:19.788591Z :DEBUG: [/Root] [/Root] [39dff04a-9a6730a5-de40abdb-14c7294b] [null] Abort session to cluster 2026-01-07T17:46:19.789128Z :NOTICE: [/Root] [/Root] [39dff04a-9a6730a5-de40abdb-14c7294b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:46:19.789336Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0] Write session: close. Timeout = 0 ms 2026-01-07T17:46:19.789377Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0] Write session will now close 2026-01-07T17:46:19.789425Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0] Write session: aborting 2026-01-07T17:46:19.789788Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0] Write session: gracefully shut down, all writes complete 2026-01-07T17:46:19.789830Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0] Write session: destroy 2026-01-07T17:46:19.790630Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 grpc read done: success# 0, data# { } 2026-01-07T17:46:19.790654Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 grpc read failed 2026-01-07T17:46:19.790683Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 grpc closed 2026-01-07T17:46:19.790717Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_2871028360951358896_v1 is DEAD 2026-01-07T17:46:19.791498Z node 11 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0 grpc read done: success: 0 data: 2026-01-07T17:46:19.791515Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0 grpc read failed 2026-01-07T17:46:19.791542Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0 grpc closed 2026-01-07T17:46:19.791578Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|1096080b-9b18aded-48d0a2d9-8105036c_0 is DEAD 2026-01-07T17:46:19.792457Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:46:19.792693Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7592677456462441387:2538] disconnected. 2026-01-07T17:46:19.792727Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7592677456462441387:2538] disconnected; active server actors: 1 2026-01-07T17:46:19.792748Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7592677456462441387:2538] client user disconnected session shared/user_11_1_2871028360951358896_v1 2026-01-07T17:46:19.793331Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7592677430692637324:2481] destroyed 2026-01-07T17:46:19.793381Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_2871028360951358896_v1 2026-01-07T17:46:19.793403Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7592677456462441390:2543] destroyed 2026-01-07T17:46:19.793435Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:46:19.793473Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:46:19.793490Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:19.793506Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:46:19.793528Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:19.793545Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:19.793608Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_2871028360951358896_v1 2026-01-07T17:46:19.887017Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:46:19.887058Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:19.887073Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:46:19.887097Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:19.887122Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:19.987391Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:46:19.987428Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:19.987442Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:46:19.987467Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:46:19.987480Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:46:20.245280Z node 11 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [11:7592677460757408715:2539] TxId: 281474976715683. Ctx: { TraceId: 01kecs247j78syza65nsr0q59e, Database: /Root, SessionId: ydb://session/3?node_id=11&id=ZjBhNzRhZDUtYWE0OTg3NzgtMmU2OTcwYWUtNmQyNGU5OWU=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 trace_id# 2026-01-07T17:46:20.245444Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [11:7592677460757408719:2539], TxId: 281474976715683, task: 3. Ctx: { CheckpointId : . TraceId : 01kecs247j78syza65nsr0q59e. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=11&id=ZjBhNzRhZDUtYWE0OTg3NzgtMmU2OTcwYWUtNmQyNGU5OWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [11:7592677460757408715:2539], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |92.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3655, MsgBus: 9859 2026-01-07T17:46:16.014429Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677443285763198:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:16.014507Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:16.371545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:16.380572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:16.380671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:16.434712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:16.485929Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677438990795754:2081] 1767807976006860 != 1767807976006863 2026-01-07T17:46:16.486323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:16.566512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:16.566539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:16.566546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:16.566629Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:16.626107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:17.003762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:17.049882Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:17.085595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.265594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.424370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.509083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:19.415837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677456170666816:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:19.415993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:19.416458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677456170666826:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:19.416548Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:19.830022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:19.866076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:19.907477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:19.942485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:19.976290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.037577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.076760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.127308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.220713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677460465634996:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.220792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.221088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677460465635002:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.221381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677460465635001:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.221419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.224813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:20.238227Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677460465635005:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:20.338382Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677460465635056:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:21.015012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677443285763198:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:21.015110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> THealthCheckTest::BridgeGroupDegradedInBothPiles [GOOD] >> THealthCheckTest::BridgeGroupDegradedInOnePile >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues [GOOD] >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> AnalyzeColumnshard::AnalyzeMultiOperationId |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28259, MsgBus: 14351 2026-01-07T17:46:10.174849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677417159074057:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:10.174928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:10.386116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:10.417081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:10.417202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:10.444414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:10.563932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677417159074028:2081] 1767807970173063 != 1767807970173066 2026-01-07T17:46:10.570051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:10.586726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:10.586754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:10.586764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:10.586855Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:10.675767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:10.986989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:11.043990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.183283Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:11.197090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.341588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.412735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.392770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677430043977792:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:13.392899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:13.393184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677430043977802:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:13.393253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:13.707130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.738231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.768918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.806323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.837293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.879542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.929147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:13.970335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:14.040370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677434338945971:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.040464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.040534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677434338945976:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.040564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677434338945978:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.040617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:14.043968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:14.054585Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677434338945980:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:14.126698Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677434338946031:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:15.175480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677417159074057:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:15.175550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14819, MsgBus: 23548 2026-01-07T17:46:17.035099Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677450652310908:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:17.035147Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:17.062239Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:17.144163Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:17.146810Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677450652310875:2081] 1767807977033654 != 1767807977033657 2026-01-07T17:46:17.179319Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:17.179408Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:17.194487Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:17.224396Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:17.257081Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:17.257104Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:17.257112Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:17.257182Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:17.632108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:17.638790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:46:17.651919Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.720681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.919918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.994241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.149850Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:20.233371Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677463537214647:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.233440Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.233824Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677463537214657:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.233865Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.305915Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.341855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.370943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.405894Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.440662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.479282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.545767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.601972Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.712673Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677463537215528:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.712768Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.712949Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677463537215533:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.713020Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677463537215534:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.713106Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.717232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:20.735282Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677463537215537:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:46:20.820649Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677463537215588:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:22.035349Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677450652310908:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:22.035433Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> AnalyzeColumnshard::AnalyzeEmptyTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17426, MsgBus: 23626 2026-01-07T17:46:16.314235Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677444475647496:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:16.314397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:16.580469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:16.600272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:16.600396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:16.616526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:16.695026Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:16.701134Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677444475647448:2081] 1767807976295814 != 1767807976295817 2026-01-07T17:46:16.784495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:16.806078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:16.806107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:16.806115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:16.806226Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:17.268862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:17.324718Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:17.328776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.443480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.587983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.666942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:19.878067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677457360551203:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:19.878188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:19.878704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677457360551213:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:19.878759Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.180005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.215549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.244466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.284666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.317565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.353498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.389636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.430493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:20.502311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677461655519379:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.502387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.502462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677461655519384:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.502518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677461655519386:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.502568Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:20.505907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:20.516241Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677461655519388:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:46:20.600520Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677461655519439:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:21.313652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677444475647496:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:21.313705Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> THealthCheckTest::BridgeNoBscResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] Leader for TabletID 9437184 is [1:135:2157] sender: [1:138:2057] recipient: [1:112:2143] 2026-01-07T17:45:46.549178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:46.549239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:46.553190Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:46.567719Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:46.568078Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:45:46.568353Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:45:46.618321Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:46.625931Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:45:46.626035Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:45:46.627833Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:45:46.627909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:45:46.627965Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:45:46.628245Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:45:46.628312Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:45:46.628384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:204:2157] in generation 2 Leader for TabletID 9437184 is [1:135:2157] sender: [1:214:2057] recipient: [1:14:2061] 2026-01-07T17:45:46.721067Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:45:46.760721Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:45:46.761124Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:45:46.761274Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:45:46.761332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:45:46.761378Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:45:46.761433Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.761706Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.761754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.762100Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:46.762197Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:46.762248Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.762292Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:46.762338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:46.762375Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:46.762429Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:46.762464Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:46.762508Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:46.762616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:215:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.762674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.762715Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:215:2214], sessionId# [0:0:0] 2026-01-07T17:45:46.772895Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:45:46.773000Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:46.773109Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:46.773341Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:45:46.773422Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:45:46.773477Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:45:46.773620Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:46.773670Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:45:46.773714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:45:46.773752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.774127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:46.774174Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:45:46.774219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:46.774254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.774316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:45:46.774345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:46.774383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:45:46.774418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.774447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:45:46.794501Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:46.794620Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.794660Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.794715Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:45:46.794795Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:45:46.795646Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.795704Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.795749Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:45:46.795909Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-07T17:45:46.795950Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:45:46.796099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.796142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-07T17:45:46.796178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:45:46.796216Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:45:46.799758Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:45:46.799838Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.800076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.800121Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.800182Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.800224Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:45:46.800264Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... rd_impl.h:3153: StateWork, received event# 269877761, Sender [23:298:2279], Recipient [23:239:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:46:24.817307Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:46:24.817403Z node 23 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [23:297:2278], serverId# [23:298:2279], sessionId# [0:0:0] 2026-01-07T17:46:24.817656Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [23:103:2137], Recipient [23:239:2231]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 98784249945 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2026-01-07T17:46:24.817708Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:46:24.818157Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:46:24.819141Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2026-01-07T17:46:24.819219Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:46:24.819263Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2026-01-07T17:46:24.819303Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:46:24.819342Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:46:24.819378Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:46:24.819437Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2026-01-07T17:46:24.819481Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:46:24.819508Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:46:24.819535Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:46:24.819564Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2026-01-07T17:46:24.819591Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:46:24.819616Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:46:24.819640Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:46:24.819667Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-07T17:46:24.819718Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:46:24.819776Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:2] at 9437184 requested 132390 more memory 2026-01-07T17:46:24.819815Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-07T17:46:24.820099Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:46:24.820145Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-07T17:46:24.820192Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:46:24.821047Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 132518 and requests 1060144 more for the next try 2026-01-07T17:46:24.821198Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-07T17:46:24.821241Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-07T17:46:24.821437Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:46:24.821476Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-07T17:46:24.822200Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-07T17:46:24.822246Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:46:24.822671Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 1192662 and requests 9541296 more for the next try 2026-01-07T17:46:24.822758Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-07T17:46:24.822786Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-07T17:46:24.822925Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:46:24.822957Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-07T17:46:24.823390Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-07T17:46:24.823426Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:46:24.823771Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 10733958 and requests 85871664 more for the next try 2026-01-07T17:46:24.823845Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-07T17:46:24.823874Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-07T17:46:24.824013Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:46:24.824046Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-07T17:46:24.824551Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-07T17:46:24.824601Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:46:25.069689Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2026-01-07T17:46:25.069816Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:46:25.069911Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:46:25.069956Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:46:25.070006Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2026-01-07T17:46:25.070057Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2026-01-07T17:46:25.070166Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:46:25.070205Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-07T17:46:25.070258Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-07T17:46:25.070307Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-07T17:46:25.070360Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-07T17:46:25.070398Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-07T17:46:25.070446Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-07T17:46:25.085994Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:46:25.086082Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-07T17:46:25.086142Z node 23 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-07T17:46:25.086245Z node 23 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:46:25.087347Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [23:303:2284], Recipient [23:239:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:46:25.087419Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:46:25.087475Z node 23 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [23:302:2283], serverId# [23:303:2284], sessionId# [0:0:0] 2026-01-07T17:46:25.087666Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268830214, Sender [23:301:2282], Recipient [23:239:2231]: NKikimrTabletBase.TEvGetCounters |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> TStreamingQueryTest::AlterStreamingQuery >> TStreamingQueryTest::CreateStreamingQueryOrReplace >> THealthCheckTest::TestStoppedTabletIsNotDead [GOOD] >> THealthCheckTest::TestTabletsInUnresolvaleDatabase >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> ColumnShardTiers::TieringUsage [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> TSchemeShardTest::CreateTable >> TSchemeShardTest::RmDirTwice >> TSchemeShardTest::CreateAlterTableWithCodec >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> TSchemeShardTest::InitRootAgain >> TStreamingQueryTest::AlterStreamingQuery [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplace [GOOD] >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:32.819655Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:32.915958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:32.929271Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:32.929734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:32.929805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:33.233002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:33.233159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:33.324037Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807810388702 != 1767807810388706 2026-01-07T17:43:33.340466Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:33.383871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:33.481358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:33.942012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:33.943487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:43:33.943555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:43:33.943587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:43:33.943836Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:43:33.957631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-07T17:43:44.438432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2764], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.438688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.439439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:925:2771], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.439512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.443093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:44.783990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1018:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.784256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.784736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1022:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.784802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.784978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1025:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:43:44.790631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:43:45.147017Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1027:2854], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:43:45.223901Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1118:2918] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:43:45.894931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:46.372479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:47.363929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:48.459375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:49.345972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:43:50.630743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:43:51.853881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-07T17:44:09.388570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA S ... b_bytes=132224;raw_bytes=3757338;count=3;records=3111} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037892 2026-01-07T17:46:26.108489Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.108720Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadBlobs;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.109033Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2026-01-07T17:46:26.109370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.112318Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.112651Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.118025Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037892;parent_id=[1:2982:4356];task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;task_class=CS::GENERAL;fline=general_compaction.cpp:138;event=blobs_created_diff;appended=0;;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:192];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:192:232];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:424:256];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:680:192];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:872:264];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1520:192];;;;switched=(portion_id:20;path_id:1000000894;records_count:1;schema_version:2;level:0;cs:plan_step=1759169711500;tx_id=18446744073709551615;;wi:10;;column_size:1328;index_size:0;meta:(()););(portion_id:19;path_id:1000000894;records_count:1;schema_version:2;level:0;;column_size:1712;index_size:0;meta:(()););; 2026-01-07T17:46:26.118103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037892;parent_id=[1:2982:4356];task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2026-01-07T17:46:26.118152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037892;parent_id=[1:2982:4356];task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.118326Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2026-01-07T17:46:26.118425Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:63;event=Limiter; 2026-01-07T17:46:26.118460Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=AskDiskQuota;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.118498Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Writing;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.118830Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 72075186224037892 2026-01-07T17:46:26.118954Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=70;sum=2726;count=57; 2026-01-07T17:46:26.119000Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:66;memory_size=190;data_size=182;sum=5510;count=58;size_of_meta=112; 2026-01-07T17:46:26.119045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:44;memory_size=270;data_size=262;sum=7830;count=29;size_of_portion=192; 2026-01-07T17:46:26.119111Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.119259Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[49] (CS::GENERAL) apply at tablet 72075186224037892 2026-01-07T17:46:26.120006Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2982:4356];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=72075186224037892;external_task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; 2026-01-07T17:46:26.120054Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 72075186224037892 Save Batch GenStep: 1:17 Blob count: 1 2026-01-07T17:46:26.120170Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=132224;raw_bytes=3757338;count=3;records=3111} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037892 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=51;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=53;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=54;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=50;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=52;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=c9ee53fc-ebf011f0-ab333102-6f7eb475; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=560;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=6240;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=5680;delta=560; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=c9eed886-ebf011f0-ac3f8e90-abd7db68; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> TSchemeShardCheckProposeSize::CopyTable >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> IncrementalBackup::ComplexBackupSequenceWithDataVerification [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19811, MsgBus: 28923 2026-01-07T17:46:14.206852Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677436090178079:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:14.206919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:14.455927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:14.456016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:14.473408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:14.535696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:14.609712Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677436090178051:2081] 1767807974204907 != 1767807974204910 2026-01-07T17:46:14.617301Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:14.694979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:14.694999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:14.695007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:14.695088Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:14.810585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:15.207248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:15.220508Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:15.272908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.426587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.586605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.683915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.701853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677448975081835:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.702001Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.709116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677448975081845:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.709256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.033363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.062830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.095634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.132788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.169994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.208185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.244223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.325411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.399053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453270050021:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.399121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453270050026:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.399152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.399347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677453270050028:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.399380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.403035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:18.412669Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677453270050029:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:18.512260Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677453270050081:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:19.206265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677436090178079:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:19.206333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10804, MsgBus: 9052 2026-01-07T17:46:21.329765Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677466033965297:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:21.329824Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:21.350384Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:21.413273Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:21.459705Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:21.459795Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:21.463029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:21.519754Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:21.537598Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:21.537634Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:21.537643Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:21.537716Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:21.921238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:21.929506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:21.942137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:22.021157Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:22.180959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:22.245126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:22.364974Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:24.674072Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677478918869015:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:24.674161Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:24.674741Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677478918869025:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:24.674785Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:24.726693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.759008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.793501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.824483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.855435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.888436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.926657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.974456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.060975Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677483213837186:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.061075Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.061132Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677483213837191:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.061419Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677483213837194:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.061486Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.064838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:25.075779Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677483213837193:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:25.178581Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677483213837246:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:26.330275Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677466033965297:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:26.330345Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CreateAlterTableWithCacheMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:26.752149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:26.752251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:26.752305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:26.752347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:26.752381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:26.752408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:26.752478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:26.752562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:26.753419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:26.753731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:26.839831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:26.839889Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:26.854155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:26.854439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:26.854624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:26.860619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:26.860921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:26.861572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:26.861857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:26.864307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:26.864489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:26.865633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:26.865687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:26.865793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:26.865870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:26.865910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:26.866076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:26.986069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.986913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:26.987899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:46:28.563381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:46:28.563634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:46:28.563674Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:46:28.563759Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:46:28.563812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:46:28.563857Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:46:28.563885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:46:28.563917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:46:28.563952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:46:28.563988Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:46:28.564016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:46:28.564077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:46:28.564111Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:46:28.564142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:46:28.564169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-07T17:46:28.564800Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:46:28.564896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:46:28.564948Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:46:28.564982Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:46:28.565023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:46:28.565541Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:46:28.565623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:46:28.565654Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:46:28.565680Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:46:28.565709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:46:28.565769Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:46:28.568672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:46:28.568900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:46:28.569122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:46:28.569160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:46:28.569453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:46:28.569527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:46:28.569560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:712:2701] TestWaitNotification: OK eventTxId 101 2026-01-07T17:46:28.569910Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:46:28.570072Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 197us result status StatusSuccess 2026-01-07T17:46:28.570425Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-07T17:46:28.573481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "UniqueName" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:46:28.573645Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 102:0, path# /MyRoot/UniqueName 2026-01-07T17:46:28.573773Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, at schemeshard: 72057594046678944 2026-01-07T17:46:28.575713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-07T17:46:28.575903Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, operation: ALTER STREAMING QUERY, path: UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:46:28.576165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:46:28.576204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:46:28.576550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:46:28.576619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:46:28.576654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:720:2709] TestWaitNotification: OK eventTxId 102 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:26.823914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:26.824021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:26.824065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:26.824105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:26.824140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:26.824168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:26.824239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:26.824321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:26.825240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:26.825539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:26.914544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:26.914632Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:26.930248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:26.931226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:26.931475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:26.937850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:26.938096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:26.938706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:26.938931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:26.941260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:26.941422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:26.942377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:26.942422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:26.942503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:26.942545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:26.942580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:26.942716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.082358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.083957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.084021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.084080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.084195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.084316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.084408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:46:28.825452Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:46:28.825487Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:46:28.825527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:46:28.826165Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:46:28.826252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:46:28.826287Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:46:28.826312Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:46:28.826339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:46:28.826404Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:46:28.829881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:46:28.830121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:46:28.830309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:46:28.830338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:46:28.830583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:46:28.830683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:46:28.830716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:712:2701] TestWaitNotification: OK eventTxId 101 2026-01-07T17:46:28.831088Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:46:28.831302Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 241us result status StatusSuccess 2026-01-07T17:46:28.831710Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-07T17:46:28.834057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:46:28.834367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 102:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } 2026-01-07T17:46:28.834434Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 102:0, path# /MyRoot/MyStreamingQuery 2026-01-07T17:46:28.834522Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:46:28.840583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-07T17:46:28.840803Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: MyStreamingQuery TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:46:28.841134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:46:28.841183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:46:28.841533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:46:28.841625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:46:28.841658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:720:2709] TestWaitNotification: OK eventTxId 102 2026-01-07T17:46:28.842056Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:46:28.842192Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 159us result status StatusSuccess 2026-01-07T17:46:28.842501Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64402, MsgBus: 25824 2026-01-07T17:46:14.308002Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677437508173011:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:14.308109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:14.581487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:14.583125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:14.672093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:14.682015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:14.705194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:14.829254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:14.829279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:14.829289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:14.829391Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:14.859296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:15.308294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:15.313766Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:15.378086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.555267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.724026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.797127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.687848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677450393076641:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.687997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.688680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677450393076651:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.688749Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.073284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.103603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.138641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.165916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.193107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.227237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.258091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.314267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.401670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677454688044820:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.401751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.402162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677454688044825:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.402223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677454688044826:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.402275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.406289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:18.418844Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677454688044829:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:18.490164Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677454688044880:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:19.307287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677437508173011:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:19.307371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6392, MsgBus: 2393 2026-01-07T17:46:21.903802Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677465836848085:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:21.903863Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:21.973019Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:22.006859Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:22.012417Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677465836848043:2081] 1767807981901228 != 1767807981901231 2026-01-07T17:46:22.046778Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:22.046863Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:22.057179Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:22.237005Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:22.245728Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:22.245751Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:22.245758Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:22.245829Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:22.617114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:22.626079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:22.679913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:22.806883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:22.874578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:22.991421Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:25.343402Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677483016719098:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.343519Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.343827Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677483016719107:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.343874Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.419624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.453238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.482131Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.512402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.551708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.604767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.678942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.728882Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:25.817089Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677483016719984:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.817202Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.817417Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677483016719989:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.817451Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677483016719990:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.817492Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:25.821746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:25.835250Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677483016719993:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:46:25.932891Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677483016720044:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:26.904265Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677465836848085:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:26.904377Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> TSchemeShardTest::CreateAlterTableWithCacheMode [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> THealthCheckTest::TestOnlyRequestNeededTablets >> TSchemeShardTest::Boot >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> YdbProxy::DropTable >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> YdbProxy::MakeDirectory >> YdbProxy::RemoveDirectory >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> AnalyzeColumnshard::AnalyzeShard [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16229, MsgBus: 64485 2026-01-07T17:46:14.136885Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677435031434427:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:14.138519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:14.169316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:14.425098Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677435031434363:2081] 1767807974128384 != 1767807974128387 2026-01-07T17:46:14.432855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:14.442963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:14.443077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:14.446852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:14.489598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:14.637477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:14.637501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:14.637507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:14.637588Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:14.676591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:15.127501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:15.133246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:15.143393Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:15.177465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.308705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.480405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:15.560547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.537738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677447916338129:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.537853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.538190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677447916338139:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.538295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:17.884260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.911162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.942961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:17.972328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.000220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.031485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.060791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.108992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:18.182213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677452211306311:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.182296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.182383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677452211306316:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.182471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677452211306318:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.182509Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:18.188688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:18.200015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677452211306320:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:46:18.294475Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677452211306371:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:19.138784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677435031434427:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:19.141754Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... , (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:23.368220Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:23.369080Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677475806661503:2081] 1767807983230543 != 1767807983230546 2026-01-07T17:46:23.384675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:23.449535Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:23.449562Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:23.449572Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:23.449650Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:23.475683Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:23.872498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:23.878027Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:46:23.890660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:23.942283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.139403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.212057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:24.364297Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:26.732878Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677488691565253:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:26.732981Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:26.733215Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677488691565263:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:26.733259Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:26.799882Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:26.832891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:26.862287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:26.892903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:26.922400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:26.956884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:26.994549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:27.042315Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:27.120688Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677492986533426:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:27.120792Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:27.120808Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677492986533431:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:27.121094Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677492986533433:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:27.121137Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:27.124420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:27.135163Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677492986533434:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:46:27.230270Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677492986533486:3760] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:28.232168Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677475806661530:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:28.232242Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:46:28.733660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:28.765966Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:28.810189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> THealthCheckTest::BridgeGroupDegradedInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInOnePile >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:21.667198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.754666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.761622Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.761969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:21.762092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:22.274341Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:22.378517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:22.378646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:22.415153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:22.490735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:23.225313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:23.227848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:23.227914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:23.227949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:23.228180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:23.297051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:23.916870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:46:27.124391Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:27.132552Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:46:27.168586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:27.168711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:27.198034Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:27.200013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:27.394816Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.395458Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.396313Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.396712Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.396817Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.396994Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.397201Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.397364Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.397507Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:27.563792Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:46:27.575587Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:27.575701Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:27.589219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:27.729379Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:27.782205Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:46:27.782357Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:46:27.811826Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:46:27.813308Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:46:27.813541Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:46:27.813603Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:46:27.813660Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:46:27.813718Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:46:27.813779Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:46:27.813831Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:46:27.814689Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:46:27.939684Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:27.939771Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2204:2607], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:27.953920Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2230:2630] 2026-01-07T17:46:27.954510Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2230:2630], schemeshard id = 72075186224037897 2026-01-07T17:46:28.068052Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2350:2695] 2026-01-07T17:46:28.068593Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:46:28.070733Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. Describe result: PathErrorUnknown 2026-01-07T17:46:28.070780Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. Creating table 2026-01-07T17:46:28.070862Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:46:28.074066Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2369:2702], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:46:28.077635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:28.083645Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:46:28.083760Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. Subscribe on create table tx: 281474976720657 2026-01-07T17:46:28.095446Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:46:28.248146Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:46:28.656184Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:46:28.765043Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:46:28.765144Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2352:2697] Owner: [2:2351:2696]. Column diff is empty, finishing 2026-01-07T17:46:29.396706Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:29.644205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:3385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:29.644417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:29.644855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2817:3390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:29.644945Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:29.663284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:46:29.772980Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:46:29.773259Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:46:29.773568Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:46:29.773724Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:46:29.773852Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:46:29.774012Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:46:29.774138Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:46:29.774263Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:46:29.774412Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:46:29.774566Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:46:29.774694Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:46:29.774825Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:46:29.774950Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:46:29.801585Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:46:29.801688Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:46:29.801805Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:46:29.801835Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:46:29.801995Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:46:29.802029Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:46:29.802110Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:46:29.802165Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:46:29.802221Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:46:29.802258Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:46:29.802296Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:46:29.802321Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:46:29.802517Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:46:29.802558Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:46:29.802665Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:46:29.802695Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:46:29.802743Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:46:29.802790Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:46:29.802842Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:46:29.802878Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:46:29.803001Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:46:29.803039Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-01-07T17:46:29.803163Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-07T17:46:29.803218Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-07T17:46:29.880815Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2875:3046];ev=NActors::IEventHandle;tablet_id=72075186224037899;tx_id=281474976715659;this=137107594697504;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=2969;max=18446744073709551615;plan=0;src=[2:1946:2452];cookie=121:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:46:29.917097Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:46:29.917230Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-07T17:46:29.917308Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000016s |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> THealthCheckTest::BridgeNoBscResponse [GOOD] >> THealthCheckTest::CLusterNotBootstrapped >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> YdbProxy::OAuthToken [GOOD] >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableById >> YdbProxy::DescribeTopic [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true >> YdbProxy::StaticCreds [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2026-01-07T17:46:32.133957Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677512421277014:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:32.134455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:32.389468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:32.389559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:32.440601Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:32.450614Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677512421276904:2081] 1767807992126318 != 1767807992126321 2026-01-07T17:46:32.467780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:32.473710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:32.611842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:32.688050Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:32.688104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:32.688112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:32.688194Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:32.924281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:32.930622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:35.208220Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677524750707673:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:35.208381Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:35.213643Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:35.309582Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:35.314391Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677524750707522:2081] 1767807995203348 != 1767807995203351 2026-01-07T17:46:35.318473Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:35.346750Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:35.347004Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:35.353775Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:35.541108Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:35.591931Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:35.591964Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:35.591972Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:35.592059Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:35.850922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2026-01-07T17:46:32.148203Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677512700381755:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:32.150168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:32.375137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:32.375254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:32.451867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:32.481775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:32.484417Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677512700381694:2081] 1767807992131432 != 1767807992131435 2026-01-07T17:46:32.484591Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:32.685803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:32.707268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:32.707293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:32.707302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:32.707410Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:32.965631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:33.017394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:46:33.080616Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677516995349868:2523] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-07T17:46:33.154714Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:35.303336Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677525910292613:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:35.303534Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:35.354376Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:35.431212Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:35.432467Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:35.432540Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:35.474972Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:35.594080Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:35.702638Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:35.702659Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:35.702667Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:35.702752Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:35.924512Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:35.961942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2026-01-07T17:46:31.642189Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677507411507019:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:31.642352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:31.852594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:31.877720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:31.877847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:31.924957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:31.950061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:32.148618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:32.149737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:32.149756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:32.149799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:32.149930Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:32.399716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:32.406575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:46:32.655796Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:34.560155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:34.710910Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:46:34.715196Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677520296409883:2598] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-07T17:46:35.309929Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677525866317623:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:35.310335Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:35.333313Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:35.437485Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:35.437565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:35.437780Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:35.440059Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:35.475614Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:35.607235Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:35.704590Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:35.704609Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:35.704615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:35.704671Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:35.948149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ShardsLimit800 >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> THealthCheckTest::TestTabletsInUnresolvaleDatabase [GOOD] >> THealthCheckTest::UnknowPDiskState >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::StorageNoQuota >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2026-01-07T17:46:35.891508Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677526085086393:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:35.891612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:36.127077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:36.127158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:36.133454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:36.154032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:36.178097Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:36.313191Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677526085086529:2143] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:36.313280Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677530380054264:2445] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:36.313486Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677526085086536:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:36.313626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677530380054070:2306][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677526085086536:2146], cookie# 1 2026-01-07T17:46:36.320638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677530380054123:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677530380054120:2306], cookie# 1 2026-01-07T17:46:36.320725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677530380054124:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677530380054121:2306], cookie# 1 2026-01-07T17:46:36.320773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677530380054125:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677530380054122:2306], cookie# 1 2026-01-07T17:46:36.320818Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677526085086178:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677530380054123:2306], cookie# 1 2026-01-07T17:46:36.320829Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677526085086181:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677530380054124:2306], cookie# 1 2026-01-07T17:46:36.320854Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677526085086184:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677530380054125:2306], cookie# 1 2026-01-07T17:46:36.320904Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677530380054123:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677526085086178:2051], cookie# 1 2026-01-07T17:46:36.320955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677530380054124:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677526085086181:2054], cookie# 1 2026-01-07T17:46:36.320981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677530380054125:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677526085086184:2057], cookie# 1 2026-01-07T17:46:36.321038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677530380054070:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677530380054120:2306], cookie# 1 2026-01-07T17:46:36.321069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677530380054070:2306][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:36.321091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677530380054070:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677530380054121:2306], cookie# 1 2026-01-07T17:46:36.321116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677530380054070:2306][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:36.321163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677530380054070:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677530380054122:2306], cookie# 1 2026-01-07T17:46:36.321200Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677530380054070:2306][/dc-1] Sync cookie mismatch: sender# [1:7592677530380054122:2306], cookie# 1, current cookie# 0 2026-01-07T17:46:36.321208Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677526085086536:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:36.325669Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677526085086536:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677530380054070:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:36.325786Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677526085086536:2146], cacheItem# { Subscriber: { Subscriber: [1:7592677530380054070:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:36.327793Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677530380054265:2446], recipient# [1:7592677530380054264:2445], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:36.327861Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677530380054264:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:36.353973Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677530380054264:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:46:36.356866Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677530380054264:2445] Handle TEvDescribeSchemeResult Forward to# [1:7592677530380054263:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:46:36.358128Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677526085086529:2143] Handle TEvProposeTransaction 2026-01-07T17:46:36.358153Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677526085086529:2143] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:46:36.358237Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677526085086529:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592677530380054270:2450] 2026-01-07T17:46:36.441274Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677530380054270:2450] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-07T17:46:36.441333Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677530380054270:2450] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:46:36.441421Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677530380054270:2450] txid# 2814749767156 ... HEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:46:37.655351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:46:37.655458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2026-01-07T17:46:37.655490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2026-01-07T17:46:37.655509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2026-01-07T17:46:37.658466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-07T17:46:37.658947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2026-01-07T17:46:37.659295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-07T17:46:37.659538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2026-01-07T17:46:37.659739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:46:37.659869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2026-01-07T17:46:37.659975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-07T17:46:37.660074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2026-01-07T17:46:37.660146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-07T17:46:37.660223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2026-01-07T17:46:37.660290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:46:37.660357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2026-01-07T17:46:37.660441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2026-01-07T17:46:37.660518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2026-01-07T17:46:37.660613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-07T17:46:37.660707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2026-01-07T17:46:37.660796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:46:37.660819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2026-01-07T17:46:37.660852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2026-01-07T17:46:37.661040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:46:37.661052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-01-07T17:46:37.661180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2026-01-07T17:46:37.681715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-07T17:46:37.681757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-07T17:46:37.681816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-07T17:46:37.681825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-07T17:46:37.681843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:46:37.681856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:46:37.681879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-07T17:46:37.681887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-07T17:46:37.681906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-07T17:46:37.681916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-07T17:46:37.681945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:46:37.681953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:46:37.681969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:8 2026-01-07T17:46:37.681980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2026-01-07T17:46:37.682023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-07T17:46:37.682049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-07T17:46:37.682081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2026-01-07T17:46:37.682152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:46:37.682186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:46:37.682213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-01-07T17:46:37.682288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:46:37.692108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:46:38.045705Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677534320534648:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:38.045944Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677538615502281:2309], recipient# [3:7592677538615502280:2306], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:38.056298Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:39.049074Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677534320534648:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:39.049262Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677542910469579:2310], recipient# [3:7592677542910469578:2307], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TStorageTenantTest::DeclareAndDefine >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsNoLimit >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail >> TStorageTenantTest::GenericCases >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> THealthCheckTest::BridgeGroupDeadInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInBothPiles >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> THealthCheckTest::CLusterNotBootstrapped [GOOD] >> THealthCheckTest::BridgeTimeDifference >> THealthCheckTest::TestOnlyRequestNeededTablets [GOOD] >> THealthCheckTest::TestNodeDisconnected >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> THealthCheckTest::UnknowPDiskState [GOOD] >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:27.650022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:27.650097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:27.650129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:27.650158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:27.650190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:27.650211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:27.650262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:27.650327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:27.651053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:27.651303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:27.740636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:27.740702Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:27.757075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:27.757519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:27.757720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:27.764981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:27.765323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:27.766053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:27.766289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:27.768625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:27.768799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:27.769734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:27.769780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:27.769883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:27.769926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:27.769958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:27.770155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.899906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.900965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.901983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.902040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2, partId: 0 2026-01-07T17:46:45.384975Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2026-01-07T17:46:45.385037Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2026-01-07T17:46:45.385080Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:46:45.385111Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:46:45.385264Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:46:45.385484Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:46:45.390558Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:46:45.390680Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:46:45.390743Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:46:45.390802Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:46:45.390858Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:46:45.393951Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:46:45.394154Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:45.394186Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:46:45.394404Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:45.394447Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [14:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-07T17:46:45.394836Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:46:45.394888Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:46:45.395044Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:46:45.395088Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:46:45.395143Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:46:45.395188Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:46:45.395249Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:46:45.395307Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:46:45.395357Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:46:45.395395Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:46:45.395656Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 10 2026-01-07T17:46:45.395725Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2026-01-07T17:46:45.395781Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:46:45.396748Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:46:45.396836Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:46:45.396871Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:46:45.396953Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:46:45.397025Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-07T17:46:45.397131Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:46:45.399249Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:46:45.399536Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:46:45.399599Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:46:45.400054Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:46:45.400149Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:46:45.400202Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [14:1429:3253] TestWaitNotification: OK eventTxId 104 2026-01-07T17:46:45.400767Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:46:45.400985Z node 14 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 227us result status StatusSuccess 2026-01-07T17:46:45.401633Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> TStorageTenantTest::DeclareAndDefine [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> TStorageTenantTest::GenericCases [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2026-01-07T17:46:41.901677Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677550247436449:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:41.901719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:42.109371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:42.135230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:42.135349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:42.149162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:42.236503Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:42.270761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:46:42.413555Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677550247436698:2143] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:42.413654Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677554542404452:2445] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:42.413777Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677550247436705:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:42.413879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677554542404225:2291][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677550247436705:2146], cookie# 1 2026-01-07T17:46:42.415598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677554542404274:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677554542404271:2291], cookie# 1 2026-01-07T17:46:42.415660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677554542404275:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677554542404272:2291], cookie# 1 2026-01-07T17:46:42.415675Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677554542404277:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677554542404273:2291], cookie# 1 2026-01-07T17:46:42.415710Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677550247436350:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677554542404275:2291], cookie# 1 2026-01-07T17:46:42.415724Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677550247436347:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677554542404274:2291], cookie# 1 2026-01-07T17:46:42.415772Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677550247436353:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677554542404277:2291], cookie# 1 2026-01-07T17:46:42.415830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677554542404275:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677550247436350:2054], cookie# 1 2026-01-07T17:46:42.415894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677554542404274:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677550247436347:2051], cookie# 1 2026-01-07T17:46:42.415914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677554542404277:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677550247436353:2057], cookie# 1 2026-01-07T17:46:42.415980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677554542404225:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677554542404272:2291], cookie# 1 2026-01-07T17:46:42.416040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677554542404225:2291][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:42.416083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677554542404225:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677554542404271:2291], cookie# 1 2026-01-07T17:46:42.416112Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677554542404225:2291][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:42.416155Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677554542404225:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677554542404273:2291], cookie# 1 2026-01-07T17:46:42.416178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677554542404225:2291][/dc-1] Sync cookie mismatch: sender# [1:7592677554542404273:2291], cookie# 1, current cookie# 0 2026-01-07T17:46:42.416211Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677550247436705:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:42.421270Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677550247436705:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677554542404225:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:42.421372Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677550247436705:2146], cacheItem# { Subscriber: { Subscriber: [1:7592677554542404225:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:42.423595Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677554542404453:2446], recipient# [1:7592677554542404452:2445], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:42.423671Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677554542404452:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:42.454916Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677554542404452:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:46:42.459320Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677554542404452:2445] Handle TEvDescribeSchemeResult Forward to# [1:7592677554542404451:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:46:42.460812Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677550247436698:2143] Handle TEvProposeTransaction 2026-01-07T17:46:42.460847Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677550247436698:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:46:42.460966Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677550247436698:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677554542404458:2450] 2026-01-07T17:46:42.583085Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677554542404458:2450] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-07T17:46:42.583131Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# ... de 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677571097330140:2243][/dc-1/USER_0] Ignore empty state: owner# [3:7592677558212428236:2239], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:45.869085Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677571097330140:2243][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7592677571097330142:2243] 2026-01-07T17:46:45.869120Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677571097330140:2243][/dc-1/USER_0] Ignore empty state: owner# [3:7592677558212428236:2239], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:45.869152Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677571097330140:2243][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7592677571097330143:2243] 2026-01-07T17:46:45.869178Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677571097330140:2243][/dc-1/USER_0] Ignore empty state: owner# [3:7592677558212428236:2239], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:45.931001Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677558212428236:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:45.931289Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677571097330188:2252], recipient# [3:7592677571097330149:2520], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:45.931456Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7592677571097330149:2520], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:45.935034Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677558212428236:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:45.935183Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677571097330204:2253], recipient# [3:7592677571097330200:2539], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:45.935335Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7592677571097330200:2539], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:45.935496Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:45.944843Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677558212428236:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:45.945036Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677571097330205:2254], recipient# [3:7592677558212428223:2496], result# { ErrorCount: 3 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:45.945117Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/USER_0/.metadata/script_executions 2026-01-07T17:46:46.025667Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677558212428236:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.025850Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677575392297502:2255], recipient# [3:7592677571097330200:2539], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.026023Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7592677571097330200:2539], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:46.028866Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677571097330140:2243][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7592677571097330141:2243] 2026-01-07T17:46:46.028968Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677571097330140:2243][/dc-1/USER_0] Ignore empty state: owner# [3:7592677558212428236:2239], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:46.029326Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677571097330140:2243][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7592677571097330142:2243] 2026-01-07T17:46:46.029370Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677571097330140:2243][/dc-1/USER_0] Ignore empty state: owner# [3:7592677558212428236:2239], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:46.029393Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677571097330140:2243][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7592677571097330143:2243] 2026-01-07T17:46:46.029420Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677571097330140:2243][/dc-1/USER_0] Ignore empty state: owner# [3:7592677558212428236:2239], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2026-01-07T17:46:42.239014Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677557937545896:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:42.239173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:42.469058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:42.500259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:42.500361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:42.506983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:42.561083Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:42.683143Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:46:42.733903Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677557937546110:2117] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:42.733974Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677557937546599:2438] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:42.734103Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677557937546118:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:42.734206Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677557937546378:2292][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677557937546118:2121], cookie# 1 2026-01-07T17:46:42.736082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677557937546437:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557937546434:2292], cookie# 1 2026-01-07T17:46:42.736116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677557937546438:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557937546435:2292], cookie# 1 2026-01-07T17:46:42.736148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677557937546439:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557937546436:2292], cookie# 1 2026-01-07T17:46:42.736198Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677557937545798:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557937546437:2292], cookie# 1 2026-01-07T17:46:42.736246Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677557937545801:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557937546438:2292], cookie# 1 2026-01-07T17:46:42.736273Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677557937545804:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557937546439:2292], cookie# 1 2026-01-07T17:46:42.736330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677557937546437:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557937545798:2050], cookie# 1 2026-01-07T17:46:42.736367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677557937546438:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557937545801:2053], cookie# 1 2026-01-07T17:46:42.736385Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677557937546439:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557937545804:2056], cookie# 1 2026-01-07T17:46:42.736420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677557937546378:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557937546434:2292], cookie# 1 2026-01-07T17:46:42.736462Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677557937546378:2292][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:42.736480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677557937546378:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557937546435:2292], cookie# 1 2026-01-07T17:46:42.736502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677557937546378:2292][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:42.736526Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677557937546378:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557937546436:2292], cookie# 1 2026-01-07T17:46:42.736543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677557937546378:2292][/dc-1] Sync cookie mismatch: sender# [1:7592677557937546436:2292], cookie# 1, current cookie# 0 2026-01-07T17:46:42.736596Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677557937546118:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:42.746881Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677557937546118:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677557937546378:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:42.747159Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557937546118:2121], cacheItem# { Subscriber: { Subscriber: [1:7592677557937546378:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:42.750104Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677557937546600:2439], recipient# [1:7592677557937546599:2438], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:42.750209Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677557937546599:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:42.792571Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677557937546599:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:46:42.796118Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677557937546599:2438] Handle TEvDescribeSchemeResult Forward to# [1:7592677557937546598:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:46:42.798965Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677557937546110:2117] Handle TEvProposeTransaction 2026-01-07T17:46:42.798995Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677557937546110:2117] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:46:42.799059Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677557937546110:2117] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592677557937546605:2443] 2026-01-07T17:46:42.920494Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677557937546605:2443] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-07T17:46:42.920552Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# ... meShards: there are 0 elements } 2026-01-07T17:46:45.250526Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557937545801:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570822449262:3026] 2026-01-07T17:46:45.250546Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557937545801:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570822449265:3027] 2026-01-07T17:46:45.250558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592677570822449254:3028][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592677570822449269:3028] 2026-01-07T17:46:45.250571Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557937545801:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570822449271:3028] 2026-01-07T17:46:45.250571Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7592677570822449254:3028][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7592677557937546118:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:45.250586Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557937545804:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570822449263:3026] 2026-01-07T17:46:45.250598Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557937545804:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570822449266:3027] 2026-01-07T17:46:45.250608Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677557937546118:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-07T17:46:45.250609Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557937545804:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570822449272:3028] 2026-01-07T17:46:45.250653Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677557937546118:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592677570822449253:3027] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:45.250711Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557937546118:2121], cacheItem# { Subscriber: { Subscriber: [1:7592677570822449253:3027] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:45.250733Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677557937546118:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-07T17:46:45.250775Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677557937546118:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592677570822449252:3026] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:45.250805Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557937546118:2121], cacheItem# { Subscriber: { Subscriber: [1:7592677570822449252:3026] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:45.250836Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677557937546118:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-07T17:46:45.250862Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677557937546118:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592677570822449254:3028] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:45.250910Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557937546118:2121], cacheItem# { Subscriber: { Subscriber: [1:7592677570822449254:3028] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:45.250940Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677570822449273:3029], recipient# [1:7592677570822449235:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:45.251024Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677570822449274:3030], recipient# [1:7592677570822449247:2325], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.249701Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677557937546118:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.249844Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557937546118:2121], cacheItem# { Subscriber: { Subscriber: [1:7592677562232514302:2768] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:46.249947Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677575117416590:3034], recipient# [1:7592677575117416589:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.251678Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677557937546118:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.251781Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557937546118:2121], cacheItem# { Subscriber: { Subscriber: [1:7592677570822449252:3026] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:46.251836Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677575117416592:3035], recipient# [1:7592677575117416591:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> THealthCheckTest::TestNodeDisconnected [GOOD] >> THealthCheckTest::TestStateStorageOk >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2026-01-07T17:46:42.487085Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677557468962393:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:42.487141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:42.715118Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:42.744244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:42.744359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:42.751859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:42.809833Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:42.980253Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677557468962622:2133] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:42.980339Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677557468963071:2438] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:42.980495Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677557468962633:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:42.980656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677557468962869:2293][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677557468962633:2137], cookie# 1 2026-01-07T17:46:42.989098Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677557468962923:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557468962920:2293], cookie# 1 2026-01-07T17:46:42.989166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677557468962924:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557468962921:2293], cookie# 1 2026-01-07T17:46:42.989183Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677557468962925:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557468962922:2293], cookie# 1 2026-01-07T17:46:42.989226Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677557468962290:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557468962924:2293], cookie# 1 2026-01-07T17:46:42.989235Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677557468962287:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557468962923:2293], cookie# 1 2026-01-07T17:46:42.989266Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677557468962293:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677557468962925:2293], cookie# 1 2026-01-07T17:46:42.989307Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677557468962924:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557468962290:2053], cookie# 1 2026-01-07T17:46:42.989323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677557468962923:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557468962287:2050], cookie# 1 2026-01-07T17:46:42.989334Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677557468962925:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557468962293:2056], cookie# 1 2026-01-07T17:46:42.989406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677557468962869:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557468962921:2293], cookie# 1 2026-01-07T17:46:42.989435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677557468962869:2293][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:42.989456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677557468962869:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557468962920:2293], cookie# 1 2026-01-07T17:46:42.989474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677557468962869:2293][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:42.989525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677557468962869:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677557468962922:2293], cookie# 1 2026-01-07T17:46:42.989542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677557468962869:2293][/dc-1] Sync cookie mismatch: sender# [1:7592677557468962922:2293], cookie# 1, current cookie# 0 2026-01-07T17:46:42.989577Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677557468962633:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:43.002329Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677557468962633:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677557468962869:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:43.002484Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557468962633:2137], cacheItem# { Subscriber: { Subscriber: [1:7592677557468962869:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:43.002711Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677557468962633:2137], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:43.002768Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592677557468962633:2137], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-07T17:46:43.002826Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592677557468962633:2137], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-07T17:46:43.002870Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592677557468962633:2137], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-07T17:46:43.003043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7592677561763930369:2440][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:46:43.003530Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592677557468962287:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7592677561763930375:2440] 2026-01-07T17:46:43.003555Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592677557468962287:2050] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-07T17:46:43.003638Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592677557468962287:2050] Subscribe: subscriber# [1:7592677561763930375:2440], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:46:43.003703Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592677557468962290:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7592677561763930376:2440] 2026-01-07T17:46:43.003718Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592677557468962290:2053] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-07T17:46:43.003746Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592677557468962290:2053] Subscribe: subscriber# [1:7592677561763930376:2440], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:46:43.003780Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592677557468962293:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7592677561763930377:2440] 2026-01-07T17:46:43.003794Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592677557468962293:2056] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-07T17:46:43.003814Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592677557468962293:2056] Subscribe: subscriber# [1:7592677561763930377:2440], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:46:43.003902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592677561763930375:2440][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7592677557468962287:2050] 2026-01-07T17:46:43. ... Version: 0 }: sender# [1:7592677557468962290:2053] 2026-01-07T17:46:45.569310Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592677570353865911:3139][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592677557468962633:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:45.569314Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592677570353865925:3140][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592677557468962293:2056] 2026-01-07T17:46:45.569337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592677570353865911:3139][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7592677570353865917:3139] 2026-01-07T17:46:45.569351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592677570353865912:3140][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592677570353865915:3140] 2026-01-07T17:46:45.569361Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7592677570353865911:3139][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [1:7592677557468962633:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:45.569382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592677570353865912:3140][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592677570353865918:3140] 2026-01-07T17:46:45.569388Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557468962293:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570353865925:3140] 2026-01-07T17:46:45.569407Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592677570353865912:3140][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592677557468962633:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:45.569426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592677570353865912:3140][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7592677570353865919:3140] 2026-01-07T17:46:45.569438Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677557468962633:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-07T17:46:45.569446Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7592677570353865912:3140][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7592677557468962633:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:45.569464Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557468962287:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570353865920:3139] 2026-01-07T17:46:45.569477Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557468962287:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570353865921:3140] 2026-01-07T17:46:45.569491Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557468962290:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570353865922:3139] 2026-01-07T17:46:45.569495Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677557468962633:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592677570353865911:3139] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:45.569505Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677557468962290:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677570353865923:3140] 2026-01-07T17:46:45.569578Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557468962633:2137], cacheItem# { Subscriber: { Subscriber: [1:7592677570353865911:3139] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:45.569608Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677557468962633:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-07T17:46:45.569647Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677557468962633:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592677570353865912:3140] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:45.569699Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557468962633:2137], cacheItem# { Subscriber: { Subscriber: [1:7592677570353865912:3140] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:45.569796Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677570353865926:3142], recipient# [1:7592677570353865902:2329], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.493586Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677557468962633:2137], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.493675Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557468962633:2137], cacheItem# { Subscriber: { Subscriber: [1:7592677561763930693:2671] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:46.493735Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677574648833239:3146], recipient# [1:7592677574648833238:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.569173Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677557468962633:2137], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:46.569283Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677557468962633:2137], cacheItem# { Subscriber: { Subscriber: [1:7592677570353865901:3138] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:46.569376Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677574648833244:3147], recipient# [1:7592677574648833243:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> THealthCheckTest::ShardsLimit800 [GOOD] >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> TStorageTenantTest::Boot >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2026-01-07T17:46:44.143068Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677563159418702:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:44.143403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:44.370670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:44.402503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:44.402590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:44.413928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:44.488399Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:44.574012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:46:44.660123Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677563159418865:2138] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:44.660188Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677563159419340:2455] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:44.660308Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677563159418871:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:44.660411Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677563159419107:2295][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677563159418871:2140], cookie# 1 2026-01-07T17:46:44.662125Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677563159419136:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677563159419133:2295], cookie# 1 2026-01-07T17:46:44.662177Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677563159419137:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677563159419134:2295], cookie# 1 2026-01-07T17:46:44.662214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677563159419138:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677563159419135:2295], cookie# 1 2026-01-07T17:46:44.662265Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677563159418521:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677563159419136:2295], cookie# 1 2026-01-07T17:46:44.662338Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677563159418527:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677563159419138:2295], cookie# 1 2026-01-07T17:46:44.662396Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677563159419136:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677563159418521:2051], cookie# 1 2026-01-07T17:46:44.662410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677563159419138:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677563159418527:2057], cookie# 1 2026-01-07T17:46:44.662412Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677563159418524:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677563159419137:2295], cookie# 1 2026-01-07T17:46:44.662446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677563159419107:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677563159419133:2295], cookie# 1 2026-01-07T17:46:44.662476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677563159419107:2295][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:44.662515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677563159419107:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677563159419135:2295], cookie# 1 2026-01-07T17:46:44.662544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677563159419107:2295][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:44.662620Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677563159419137:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677563159418524:2054], cookie# 1 2026-01-07T17:46:44.662661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677563159419107:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677563159419134:2295], cookie# 1 2026-01-07T17:46:44.662677Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677563159418871:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:44.662681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677563159419107:2295][/dc-1] Sync cookie mismatch: sender# [1:7592677563159419134:2295], cookie# 1, current cookie# 0 2026-01-07T17:46:44.668649Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677563159418871:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677563159419107:2295] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:44.668825Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677563159418871:2140], cacheItem# { Subscriber: { Subscriber: [1:7592677563159419107:2295] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:44.671506Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677563159419341:2456], recipient# [1:7592677563159419340:2455], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:44.671613Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677563159419340:2455] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:44.700673Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677563159419340:2455] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:46:44.704503Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677563159419340:2455] Handle TEvDescribeSchemeResult Forward to# [1:7592677563159419339:2454] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:46:44.707143Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677563159418865:2138] Handle TEvProposeTransaction 2026-01-07T17:46:44.707212Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677563159418865:2138] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:46:44.707312Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677563159418865:2138] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677563159419346:2460] 2026-01-07T17:46:44.798319Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677563159419346:2460] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-07T17:46:44.798380Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# ... ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:47.273818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677576044321820:2872][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 Cluster State: { } }: sender# [1:7592677576044321823:2872], cookie# 2 2026-01-07T17:46:47.273835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677576044321820:2872][/dc-1/USER_0/SimpleTable] Sync cookie mismatch: sender# [1:7592677576044321823:2872], cookie# 2, current cookie# 0 2026-01-07T17:46:47.273879Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677563159418871:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 } 2026-01-07T17:46:47.273932Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677563159418871:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677576044321820:2872] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767808007250 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2026-01-07T17:46:47.274012Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677563159418871:2140], cacheItem# { Subscriber: { Subscriber: [1:7592677576044321820:2872] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767808007250 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2026-01-07T17:46:47.274136Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677576044321830:2876], recipient# [1:7592677576044321829:2875], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:47.274190Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677576044321829:2875] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:47.274254Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677576044321829:2875] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2026-01-07T17:46:47.275403Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677576044321829:2875] Handle TEvDescribeSchemeResult Forward to# [1:7592677576044321828:2874] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1767808007250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046644480 2026-01-07T17:46:47.322185Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7592677563159418521:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7592677567717304675:2103] 2026-01-07T17:46:47.322221Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7592677563159418521:2051] Unsubscribe: subscriber# [3:7592677567717304675:2103], path# /dc-1/USER_0 2026-01-07T17:46:47.322548Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-07T17:46:47.322595Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7592677563159418524:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7592677567717304676:2103] 2026-01-07T17:46:47.322609Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7592677563159418524:2054] Unsubscribe: subscriber# [3:7592677567717304676:2103], path# /dc-1/USER_0 2026-01-07T17:46:47.322798Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7592677563159418527:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7592677567717304677:2103] 2026-01-07T17:46:47.322819Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7592677563159418527:2057] Unsubscribe: subscriber# [3:7592677567717304677:2103], path# /dc-1/USER_0 2026-01-07T17:46:47.323466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:46:47.920909Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677567717304787:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:47.921104Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592677567717304787:2109], cacheItem# { Subscriber: { Subscriber: [3:7592677572012272282:2226] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:47.921190Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677576307239821:2371], recipient# [3:7592677576307239820:2313], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail >> TStorageTenantTest::CreateTableInsideSubDomain >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> TStorageTenantTest::LsLs >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> THealthCheckTest::ShardsNoLimit [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:02.444996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:02.571623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:02.580837Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:02.582476Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:02.582943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:02.583163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:02.585493Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:02.585886Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:02.586074Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:02.586246Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:03.103250Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:03.219893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:03.220071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:03.220890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:03.221023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:03.269299Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:03.270197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:03.270607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:03.359014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:03.480876Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:04.405489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:04.405560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:04.405592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:04.405948Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:11.167847Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:11.168603Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:11.170473Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:46:11.177139Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:11.178143Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:11.179289Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:11.179625Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:11.179766Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:11.181071Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:11.181107Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:11.459213Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:11.549517Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:11.549639Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:11.550317Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:11.550390Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:11.595476Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:46:11.596209Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:11.596710Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:11.664526Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:11.677765Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:12.504131Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:12.504210Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:12.504245Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:12.504672Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:19.812249Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:19.813322Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:19.828154Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:19.831431Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:19.831719Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:19.832197Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:19.832426Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:19.834229Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:691:2350], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:19.834492Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:19.834624Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:20.149992Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:20.252531Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:20.252669Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:20.253187Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState ... 026-01-07T17:46:26.102509Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:26.102745Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:26.102855Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:26.419767Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:26.505934Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:26.506077Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:26.540150Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:26.597232Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:27.419677Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:27.419734Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:27.419764Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:27.420151Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:31.865426Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:31.872302Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:31.875478Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:301:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:31.875823Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:31.875995Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:32.199937Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:32.200109Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:32.208035Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1767807988475034 != 1767807988475037 2026-01-07T17:46:32.214668Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:32.266328Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:32.344252Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:32.856385Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:32.856464Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:32.856504Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:32.914425Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:37.610658Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:37.620554Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:37.621084Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:37.621264Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:37.989723Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:38.088509Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:38.088680Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:38.135776Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:38.206993Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:39.176780Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:39.176863Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:39.176914Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:39.177444Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:47.059524Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:47.060525Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:47.075170Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:47.078408Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:47.078958Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:303:2227], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:47.079373Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:47.079572Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:47.081430Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:689:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:47.081509Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:47.081547Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:47.386582Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:47.486441Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:47.486639Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:47.487618Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:47.487706Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:47.532388Z node 12 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2026-01-07T17:46:47.533376Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:47.533753Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:47.591742Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:47.605462Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:48.614332Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:48.614417Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:48.614479Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:48.615063Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:09.065729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:09.173047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:09.181013Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:09.181424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:09.181580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:09.566649Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:09.670272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:09.670459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:09.706258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:09.779923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:10.442035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:10.443157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:10.443201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:10.443245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:10.443413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:10.509477Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:11.072966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:14.302864Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:14.309897Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:14.313572Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:14.350397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:14.350550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:14.390284Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:14.392177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:14.590155Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:14.590274Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:14.591994Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.592750Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.593527Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.594583Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.594694Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.594980Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.595114Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.595257Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.595486Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:14.613689Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:14.848099Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:14.872974Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:14.873101Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:14.905396Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:14.906457Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:14.906663Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:14.906711Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:14.906760Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:14.906811Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:14.906847Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:14.906887Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:14.907387Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:14.909338Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:42:14.911571Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:14.920778Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:42:14.920834Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:42:14.920908Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:14.926988Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:14.927105Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:14.952743Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:42:14.953218Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:42:14.956313Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:14.994849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:15.010067Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:15.010216Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:15.023453Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:15.208286Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:15.249756Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:15.625121Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:15.734871Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:15.734944Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:42:16.469517Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... atistics::TEvSaveStatisticsQueryResponse, TBasicString> const&, NKikimr::TPathId co... 2026-01-07T17:44:41.417839Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4451:3984], ActorId: [2:4454:3986], Got response [2:4457:3988] SUCCESS 2026-01-07T17:44:41.418173Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:44:41.419071Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3150:3428] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-07T17:44:41.419142Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `operationId', DatabaseName: `', Types: 1 2026-01-07T17:44:41.419184Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `operationId', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-01-07T17:44:41.445353Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:44:41.445434Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3150:3428] 2026-01-07T17:44:41.445496Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-07T17:44:41.445772Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:44:41.446701Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4489:4011], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:44:41.451308Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4489:4011], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-07T17:44:41.451484Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4489:4011], Start read next stream part 2026-01-07T17:44:41.511002Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4500:4021]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:44:41.511398Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:44:41.511471Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4500:4021], StatRequests.size() = 1 2026-01-07T17:45:54.030371Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrz47b50e6x0mv8t58y52p", SessionId: ydb://session/3?node_id=2&id=YWE3OWE3ZDAtZTM4ZjJhYmEtYzdhNTliNzgtZDhmOTE2MTQ=, Slow query, duration: 72.574594s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:45:54.031739Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4489:4011], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:45:54.031993Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4489:4011], Start read next stream part 2026-01-07T17:45:54.032472Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32190, txId: 18446744073709551615] shutting down 2026-01-07T17:45:54.032618Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4592:4105], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:45:54.034822Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4489:4011], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:45:54.034886Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4489:4011], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmRjMDI2YjEtNTA3MjNmM2MtZGU4MDgwNTktNDNhYjc4MWU=, TxId: 2026-01-07T17:45:54.035822Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4592:4105], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:45:54.035938Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4592:4105], Start read next stream part 2026-01-07T17:45:54.103045Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4610:4120]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:45:54.103393Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-07T17:45:54.103444Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4610:4120], StatRequests.size() = 1 2026-01-07T17:46:45.044726Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs1b3k4kfpfdargctdmrzj", SessionId: ydb://session/3?node_id=2&id=NDhiZjE2MmUtYzRkMzQ1ZGMtNDE0NDgyNjQtYTUwYmVhMmU=, Slow query, duration: 51.004701s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:46:45.047172Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4592:4105], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:46:45.047313Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4592:4105], Start read next stream part 2026-01-07T17:46:45.047507Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32190, txId: 18446744073709551615] shutting down 2026-01-07T17:46:45.048293Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4592:4105], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:46:45.048352Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4487:2463], ActorId: [2:4592:4105], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjljYWQzODctNGMxMGFlZDItNWZkODRhZTItZWM5ZTFlNjA=, TxId: 2026-01-07T17:46:45.049376Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4702:4205], ActorId: [2:4704:4207], Starting query actor #1 [2:4705:4208] 2026-01-07T17:46:45.049451Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4704:4207], ActorId: [2:4705:4208], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:46:45.053741Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4704:4207], ActorId: [2:4705:4208], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZGExMjllMjAtZTkxOTFiOGYtNzZkZjhhYjktZTA0ZjZhNjQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:46:45.098133Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4704:4207], ActorId: [2:4705:4208], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGExMjllMjAtZTkxOTFiOGYtNzZkZjhhYjktZTA0ZjZhNjQ=, TxId: 2026-01-07T17:46:45.098232Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4704:4207], ActorId: [2:4705:4208], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGExMjllMjAtZTkxOTFiOGYtNzZkZjhhYjktZTA0ZjZhNjQ=, TxId: 2026-01-07T17:46:45.098712Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4702:4205], ActorId: [2:4704:4207], Got response [2:4705:4208] SUCCESS 2026-01-07T17:46:45.099266Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:46:45.139549Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:46:45.139623Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3150:3428] 2026-01-07T17:46:45.196281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:46:45.196378Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:46:45.242952Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4228:3779], schemeshard count = 1 2026-01-07T17:46:48.810700Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:46:48.811020Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-07T17:46:48.811172Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-07T17:46:48.833305Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:46:48.833416Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:46:48.833760Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:46:48.847985Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> THealthCheckTest::LayoutIncorrect [GOOD] >> THealthCheckTest::LayoutCorrect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:117:2057] recipient: [1:111:2141] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:117:2057] recipient: [1:111:2141] Leader for TabletID 9437184 is [1:128:2152] sender: [1:130:2057] recipient: [1:111:2141] 2026-01-07T17:45:46.066034Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:46.072700Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:46.073154Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2152] 2026-01-07T17:45:46.073471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:45:46.122688Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:46.425634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:46.425712Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:46.434691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:45:46.435001Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:45:46.436710Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:45:46.436788Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:45:46.436863Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:45:46.437233Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:45:46.437982Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:45:46.438074Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:197:2152] in generation 2 Leader for TabletID 9437184 is [1:128:2152] sender: [1:217:2057] recipient: [1:14:2061] 2026-01-07T17:45:46.526095Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:45:46.566155Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:45:46.566328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:45:46.566418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2026-01-07T17:45:46.566455Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:45:46.566486Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:45:46.566528Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.566740Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:128:2152], Recipient [1:128:2152]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.566789Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.567068Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:46.567145Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:46.567190Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.567238Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:46.567284Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:46.567320Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:46.567351Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:46.567381Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:46.567417Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:46.567506Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:218:2216], Recipient [1:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.567545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.567583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:216:2215], serverId# [1:218:2216], sessionId# [0:0:0] 2026-01-07T17:45:46.577884Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:128:2152]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:45:46.577965Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:46.578087Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:46.578249Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:45:46.578304Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:45:46.578357Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:45:46.578397Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:46.578438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:45:46.578477Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:45:46.578507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.578844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:46.578886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:45:46.578936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:46.578982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.579030Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:45:46.579054Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:46.579084Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:45:46.579111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.579137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:45:46.593442Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:46.593522Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.593570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.593621Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:45:46.593684Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:45:46.594174Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.594243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.594286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2026-01-07T17:45:46.594433Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:128:2152]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-07T17:45:46.594482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:45:46.594600Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.594646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-07T17:45:46.594694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:45:46.594731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:45:46.599992Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:45:46.600081Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.600325Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:128:2152], Recipient [1:128:2152]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.600369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.600429Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.600469Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:45:46.600520Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... HARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-07T17:46:51.209760Z node 40 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:46:51.209783Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-07T17:46:51.209808Z node 40 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [7:6] in PlanQueue unit at 9437186 2026-01-07T17:46:51.209831Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit PlanQueue 2026-01-07T17:46:51.209859Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.209886Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit PlanQueue 2026-01-07T17:46:51.209909Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit LoadTxDetails 2026-01-07T17:46:51.209933Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit LoadTxDetails 2026-01-07T17:46:51.210457Z node 40 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437186 loaded tx from db 7:6 keys extracted: 1 2026-01-07T17:46:51.210498Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.210523Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit LoadTxDetails 2026-01-07T17:46:51.210547Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit FinalizeDataTxPlan 2026-01-07T17:46:51.210571Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit FinalizeDataTxPlan 2026-01-07T17:46:51.210607Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.210628Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit FinalizeDataTxPlan 2026-01-07T17:46:51.210649Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BuildAndWaitDependencies 2026-01-07T17:46:51.210671Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BuildAndWaitDependencies 2026-01-07T17:46:51.210706Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [7:6] is the new logically complete end at 9437186 2026-01-07T17:46:51.210738Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [7:6] is the new logically incomplete end at 9437186 2026-01-07T17:46:51.210766Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [7:6] at 9437186 2026-01-07T17:46:51.210799Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.210820Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BuildAndWaitDependencies 2026-01-07T17:46:51.210842Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BuildDataTxOutRS 2026-01-07T17:46:51.210864Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BuildDataTxOutRS 2026-01-07T17:46:51.210904Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.210926Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BuildDataTxOutRS 2026-01-07T17:46:51.210948Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit StoreAndSendOutRS 2026-01-07T17:46:51.210970Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit StoreAndSendOutRS 2026-01-07T17:46:51.210993Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.211014Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit StoreAndSendOutRS 2026-01-07T17:46:51.211035Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit PrepareDataTxInRS 2026-01-07T17:46:51.211057Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit PrepareDataTxInRS 2026-01-07T17:46:51.211085Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.211107Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit PrepareDataTxInRS 2026-01-07T17:46:51.211127Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit LoadAndWaitInRS 2026-01-07T17:46:51.211150Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit LoadAndWaitInRS 2026-01-07T17:46:51.211170Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.211191Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit LoadAndWaitInRS 2026-01-07T17:46:51.211213Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BlockFailPoint 2026-01-07T17:46:51.211236Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BlockFailPoint 2026-01-07T17:46:51.211258Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.211279Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BlockFailPoint 2026-01-07T17:46:51.211300Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit ExecuteDataTx 2026-01-07T17:46:51.211322Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2026-01-07T17:46:51.211576Z node 40 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2026-01-07T17:46:51.211622Z node 40 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:46:51.211664Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.211688Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2026-01-07T17:46:51.211711Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit CompleteOperation 2026-01-07T17:46:51.211736Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2026-01-07T17:46:51.211874Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is DelayComplete 2026-01-07T17:46:51.211899Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2026-01-07T17:46:51.211926Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit CompletedOperations 2026-01-07T17:46:51.211951Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2026-01-07T17:46:51.211980Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-07T17:46:51.212003Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2026-01-07T17:46:51.212026Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [7:6] at 9437186 has finished 2026-01-07T17:46:51.212053Z node 40 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:46:51.212083Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-07T17:46:51.212109Z node 40 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-07T17:46:51.212135Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-07T17:46:51.225048Z node 40 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2026-01-07T17:46:51.225111Z node 40 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2026-01-07T17:46:51.225166Z node 40 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:46:51.225205Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2026-01-07T17:46:51.225260Z node 40 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [40:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:46:51.225309Z node 40 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:46:51.225646Z node 40 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2026-01-07T17:46:51.225687Z node 40 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2026-01-07T17:46:51.225731Z node 40 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-07T17:46:51.225760Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2026-01-07T17:46:51.225807Z node 40 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [40:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:46:51.225846Z node 40 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-07T17:46:51.226071Z node 40 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2026-01-07T17:46:51.226107Z node 40 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2026-01-07T17:46:51.226146Z node 40 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:46:51.226174Z node 40 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2026-01-07T17:46:51.226217Z node 40 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [40:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:46:51.226256Z node 40 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> YdbProxy::ListDirectory >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse [GOOD] >> THealthCheckTest::TestStateStorageYellow >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:01.353127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:01.430522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:01.437582Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:01.437844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:01.438006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:01.910265Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:02.037325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:02.037481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:02.094889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:02.248104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:03.166905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:03.166969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:03.167017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:03.167482Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:07.330609Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:07.339951Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:07.355201Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:529:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:07.355667Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:07.355804Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:07.743781Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:07.874103Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:07.874241Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:07.936393Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:08.034236Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:08.880588Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:08.880642Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:08.880678Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:08.881602Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:12.870202Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:12.874368Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:12.876336Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:12.876445Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:12.876572Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:13.213861Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:13.303064Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:13.303204Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:13.351200Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:13.429986Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:14.307678Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:14.307729Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:14.307764Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:14.308212Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:21.829594Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.829763Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.840844Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.841499Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.841837Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:21.842176Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:21.844601Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:681:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.844822Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:21.844991Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:22.203914Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:22.302660Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:22.302787Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:22.303597Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:22.303669Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:22.347762Z node 8 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2026-01-07T17:46:22.348797Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:22.349206Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:22.424678Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:22.438196Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:23.366489Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:23.366579Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:23.366634Z node 8 :NET_CLASSIFI ... helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:676:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:31.217357Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:31.217461Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:31.545946Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:31.647791Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:31.647920Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:31.648659Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:31.648744Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:31.693284Z node 10 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2026-01-07T17:46:31.694110Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:31.694470Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:31.751590Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:31.776831Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:32.566771Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:32.566824Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:32.566849Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:32.567011Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:39.473632Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:39.474568Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:39.487834Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:39.490771Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:39.491207Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:303:2227], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:39.491524Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:39.491699Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:39.493587Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:689:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:39.493677Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:39.493717Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:39.761240Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:39.847311Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:39.847430Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:39.848125Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:39.848205Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:39.893116Z node 12 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2026-01-07T17:46:39.893880Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:39.894202Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:39.951294Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:39.976276Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:40.803368Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:40.803421Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:40.803450Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:40.803790Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:48.930742Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:48.931771Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:48.932276Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:46:48.943839Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:48.945413Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:48.945689Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:48.945838Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:48.947376Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:48.947503Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:49.233249Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:49.352881Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:49.353030Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:49.353685Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:49.353768Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:49.398636Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-07T17:46:49.399481Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:49.399803Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:49.460613Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:49.510601Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:50.282181Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:50.282223Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:50.282246Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:50.282474Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2026-01-07T17:46:37.278265Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677535834071203:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:37.278350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:37.309649Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677536980351555:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:37.309724Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:37.569083Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:37.570138Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:37.602740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:37.602867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:37.604436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:37.604547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:37.619467Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:37.619665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:37.621761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:37.695784Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:37.754934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:46:37.766024Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:46:37.905081Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677535834071242:2144] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:37.905152Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677535834071712:2457] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:37.905257Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677535834071278:2160], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:37.905357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677535834071480:2299][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677535834071278:2160], cookie# 1 2026-01-07T17:46:37.906817Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677535834071540:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677535834071537:2299], cookie# 1 2026-01-07T17:46:37.906862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677535834071541:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677535834071538:2299], cookie# 1 2026-01-07T17:46:37.906877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677535834071542:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677535834071539:2299], cookie# 1 2026-01-07T17:46:37.906913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677535834070889:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677535834071540:2299], cookie# 1 2026-01-07T17:46:37.906940Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677535834070892:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677535834071541:2299], cookie# 1 2026-01-07T17:46:37.906956Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677535834070895:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677535834071542:2299], cookie# 1 2026-01-07T17:46:37.907003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677535834071540:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677535834070889:2052], cookie# 1 2026-01-07T17:46:37.907023Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677535834071541:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677535834070892:2055], cookie# 1 2026-01-07T17:46:37.907035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677535834071542:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677535834070895:2058], cookie# 1 2026-01-07T17:46:37.907066Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677535834071480:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677535834071537:2299], cookie# 1 2026-01-07T17:46:37.907083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677535834071480:2299][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:37.907116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677535834071480:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677535834071538:2299], cookie# 1 2026-01-07T17:46:37.907137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677535834071480:2299][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:37.907158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677535834071480:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677535834071539:2299], cookie# 1 2026-01-07T17:46:37.907168Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677535834071480:2299][/dc-1] Sync cookie mismatch: sender# [1:7592677535834071539:2299], cookie# 1, current cookie# 0 2026-01-07T17:46:37.907216Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677535834071278:2160], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:37.911734Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677535834071278:2160], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677535834071480:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:37.911866Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677535834071278:2160], cacheItem# { Subscriber: { Subscriber: [1:7592677535834071480:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:37.913903Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677535834071713:2458], recipient# [1:7592677535834071712:2457], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:37.913988Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677535834071712:2457] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:37.935164Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677535834071712:2457] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:46:37.937980Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677535834071712:2457] Handle TEvDescribeSchemeResult Forward to# [1:7592677535834071711:2456] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecuritySt ... : 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:50.629525Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677590616730151:2252], recipient# [3:7592677590616730112:2554], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:50.629832Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7592677590616730112:2554], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:50.702432Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677590616730117:2249][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7592677590616730125:2249] 2026-01-07T17:46:50.702477Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677590616730116:2248][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7592677590616730119:2248] 2026-01-07T17:46:50.702497Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677590616730117:2249][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7592677577731827892:2205], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:50.702516Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677590616730117:2249][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7592677590616730126:2249] 2026-01-07T17:46:50.702529Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677590616730116:2248][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7592677577731827892:2205], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:50.702532Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677590616730117:2249][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7592677577731827892:2205], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:50.702543Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677590616730117:2249][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7592677590616730127:2249] 2026-01-07T17:46:50.702550Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677590616730116:2248][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7592677590616730120:2248] 2026-01-07T17:46:50.702565Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677590616730117:2249][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7592677577731827892:2205], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:50.702576Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677590616730116:2248][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7592677577731827892:2205], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:50.702595Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677590616730116:2248][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7592677590616730121:2248] 2026-01-07T17:46:50.702614Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677590616730116:2248][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7592677577731827892:2205], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:50.750380Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677577731827892:2205], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:50.750511Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592677577731827892:2205], cacheItem# { Subscriber: { Subscriber: [3:7592677582026795437:2238] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:50.750604Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677590616730159:2253], recipient# [3:7592677590616730158:2559], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:50.750766Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:51.314868Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592677536980351594:2109], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:51.315028Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592677536980351594:2109], cacheItem# { Subscriber: { Subscriber: [2:7592677541275319105:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:51.315127Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592677597109894070:2156], recipient# [2:7592677597109894069:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:51.327190Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592677536980351594:2109], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:51.327325Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592677536980351594:2109], cacheItem# { Subscriber: { Subscriber: [2:7592677541275319105:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:51.327436Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592677597109894072:2157], recipient# [2:7592677597109894071:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> YdbProxy::CreateTable >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] Test command err: 2026-01-07T17:45:58.117313Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677366691845553:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:58.117585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:58.385103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:58.436551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:58.436655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:58.446986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:58.520785Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:58.542864Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677366691845513:2081] 1767807958115511 != 1767807958115514 2026-01-07T17:45:58.557617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:58.594202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:58.594223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:58.594235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:58.594325Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:58.785886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:01.585713Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677379813469451:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:01.586099Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:01.634753Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:01.739726Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677379813469409:2081] 1767807961578441 != 1767807961578444 2026-01-07T17:46:01.758278Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:01.772124Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:01.772225Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:01.775666Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:01.912011Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:01.925592Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:01.925619Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:01.925627Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:01.925699Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:02.257652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:02.264485Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 ... waiting for SysViewsRoster update finished 2026-01-07T17:46:12.328579Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:12.328962Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:12.340217Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:12.341861Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:12.344074Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:616:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:12.344570Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:12.344799Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:12.347466Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:611:2281], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:12.347830Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:12.347914Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:12.724505Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:12.834639Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:12.834823Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:12.835598Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:12.835682Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:12.884069Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:46:12.885077Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:12.885470Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:12.981606Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:13.006004Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:13.916480Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:13.916555Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:13.916593Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:13.916971Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:21.676417Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.677223Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.688467Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.690639Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.690852Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.691298Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:21.691436Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:21.692746Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:691:2350], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.692906Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot ... vice] Failed to discover tenant nodes 2026-01-07T17:46:40.120128Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:40.120546Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:40.120635Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:40.122243Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:40.122300Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:40.477226Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:40.572257Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:40.572390Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:40.573045Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:40.573121Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:40.620921Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-07T17:46:40.621939Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:40.622395Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:40.692571Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:40.706988Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:41.631269Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:41.631343Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:41.631385Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:41.631874Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: MAINTENANCE_REQUIRED issue_log { id: "ORANGE-af30-1231c6b1" status: ORANGE message: "Storage has no redundancy" location { database { name: "/Root" } } reason: "ORANGE-cf29-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "ORANGE-a3e2-1231c6b1-2147483648" status: ORANGE message: "Group dead in some piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" type: "STORAGE_GROUP" level: 4 } issue_log { id: "ORANGE-cf29-1231c6b1-f7549920" status: ORANGE message: "Pool has no redundancy" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "ORANGE-a3e2-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 9 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "ORANGE-5cc9-1231c6b1" status: ORANGE message: "Database has storage issues" location { database { name: "/Root" } } reason: "ORANGE-af30-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2026-01-07T17:46:50.095709Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:50.096846Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:50.114940Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:695:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:50.115743Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:50.116334Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:50.116445Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:50.116898Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:50.117242Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:50.485569Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:50.589601Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:50.589770Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:50.590372Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:50.590460Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:50.664096Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-07T17:46:50.664954Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:50.665474Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:50.763754Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:50.779396Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:51.788017Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:51.788088Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:51.788160Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:51.788658Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-2f2e-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-40f1-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "RED-6100-1231c6b1-2147483648" status: RED message: "Group dead in all piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" reason: "RED-1a83-1231c6b1-2147483650" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-40f1-1231c6b1-f7549920" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-6100-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483650" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483650" pile { name: "2" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483650-3-58-0-58" id: "2147483650-3-59-0-59" id: "2147483650-3-60-0-60" } pile { name: "2" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-5cc9-1231c6b1" status: RED message: "Database has storage issues" location { database { name: "/Root" } } reason: "RED-2f2e-1231c6b1" type: "DATABASE" level: 1 } location { id: 11 host: "::1" port: 12001 pile { name: "pile0" } } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest Test command err: Process stderr: >> YdbProxy::DescribePath >> YdbProxy::CreateTopic >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 |92.6%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest Test command err: Process stderr: |92.6%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> TStorageTenantTest::LsLs [GOOD] >> YdbProxy::CopyTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2026-01-07T17:46:50.981235Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677588744942289:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:50.981612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:51.225013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:51.254436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:51.254510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:51.271305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:51.321068Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:51.472662Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592677588744942178:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/result_sets DomainOwnerId: 72057594046644480 }: sender# [1:7592677593039910283:2445] 2026-01-07T17:46:51.472706Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592677588744942178:2051] Upsert description: path# /dc-1/.metadata/result_sets 2026-01-07T17:46:51.472776Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592677588744942178:2051] Subscribe: subscriber# [1:7592677593039910283:2445], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:46:51.472831Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592677588744942184:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/result_sets DomainOwnerId: 72057594046644480 }: sender# [1:7592677593039910285:2445] 2026-01-07T17:46:51.472839Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592677588744942184:2057] Upsert description: path# /dc-1/.metadata/result_sets 2026-01-07T17:46:51.472868Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592677588744942184:2057] Subscribe: subscriber# [1:7592677593039910285:2445], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:46:51.472946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592677593039910283:2445][/dc-1/.metadata/result_sets] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/result_sets Version: 0 }: sender# [1:7592677588744942178:2051] 2026-01-07T17:46:51.472969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592677593039910285:2445][/dc-1/.metadata/result_sets] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/result_sets Version: 0 }: sender# [1:7592677588744942184:2057] 2026-01-07T17:46:51.473030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592677588744942181:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/result_sets DomainOwnerId: 72057594046644480 }: sender# [1:7592677593039910284:2445] 2026-01-07T17:46:51.473067Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592677588744942181:2054] Upsert description: path# /dc-1/.metadata/result_sets 2026-01-07T17:46:51.473089Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592677588744942181:2054] Subscribe: subscriber# [1:7592677593039910284:2445], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:46:51.473130Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677588744942178:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677593039910283:2445] 2026-01-07T17:46:51.473147Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677588744942184:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677593039910285:2445] 2026-01-07T17:46:51.473505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592677593039910267:2445][/dc-1/.metadata/result_sets] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/result_sets Version: 0 }: sender# [1:7592677593039910280:2445] 2026-01-07T17:46:51.473569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592677593039910267:2445][/dc-1/.metadata/result_sets] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/result_sets Version: 0 }: sender# [1:7592677593039910282:2445] 2026-01-07T17:46:51.473608Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592677593039910267:2445][/dc-1/.metadata/result_sets] Set up state: owner# [1:7592677593039909832:2146], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:51.473642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7592677593039910284:2445][/dc-1/.metadata/result_sets] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/result_sets Version: 0 }: sender# [1:7592677588744942181:2054] 2026-01-07T17:46:51.473669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7592677593039910267:2445][/dc-1/.metadata/result_sets] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/result_sets Version: 0 }: sender# [1:7592677593039910281:2445] 2026-01-07T17:46:51.473700Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7592677593039910267:2445][/dc-1/.metadata/result_sets] Ignore empty state: owner# [1:7592677593039909832:2146], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:51.473772Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677593039909832:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/result_sets PathId: Strong: 1 } 2026-01-07T17:46:51.473808Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7592677588744942181:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7592677593039910284:2445] 2026-01-07T17:46:51.474024Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677593039909832:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/result_sets PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7592677593039910267:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:51.486681Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677593039909832:2146], cacheItem# { Subscriber: { Subscriber: [1:7592677593039910267:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:51.487091Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677593039910286:2446], recipient# [1:7592677593039909817:2270], result# { ErrorCount: 3 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:51.487210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:46:51.488302Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677593039909818:2138] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:51.488375Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677593039910288:2448] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:51.488554Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677593039909832:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:51.488637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677593039910062:2295][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677593039909832:2146], cookie# 1 2026-01-07T17:46:51.488702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677593039910106:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593039910103:2295], cookie# 1 2026-01-07T17:46:51.488722Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677593039910107:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593039910104:2295], cookie# 1 2026-01-07T17:46:51.488744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677593039910108:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593039910105:2295], cookie# 1 2026-01-07T17:46:51.488778Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677588744942181:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593039910107:2295], cookie# 1 2026-01-07T17:46:51.488810Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677588744942184:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593039910108:2295], cookie# 1 2026-01-07T17:46:51.488865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677593039910107:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677588744942181:2054], cookie# 1 2026-01-07T17:46:51.488903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677593039910108:2295][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677588744942184:2057], cookie# 1 2026-01-07T17:46:51.488940Z node 1 :SCHEME_BOARD_REPLICA DEBUG: repli ... Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534427:2358] 2026-01-07T17:46:53.317164Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.397762Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534425:2358] 2026-01-07T17:46:53.397850Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.397899Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534426:2358] 2026-01-07T17:46:53.397956Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.397975Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534427:2358] 2026-01-07T17:46:53.397992Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.558542Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534425:2358] 2026-01-07T17:46:53.558641Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.558698Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534426:2358] 2026-01-07T17:46:53.558718Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.558734Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534427:2358] 2026-01-07T17:46:53.558762Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.879582Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534425:2358] 2026-01-07T17:46:53.879685Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.879707Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534426:2358] 2026-01-07T17:46:53.879733Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:53.879749Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534427:2358] 2026-01-07T17:46:53.879765Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:54.245227Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677601371566711:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:54.245348Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592677601371566711:2109], cacheItem# { Subscriber: { Subscriber: [3:7592677605666534424:2358] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:54.245442Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677609961501747:2360], recipient# [3:7592677609961501746:2313], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:54.245708Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:54.520371Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534425:2358] 2026-01-07T17:46:54.520453Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:54.520478Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534426:2358] 2026-01-07T17:46:54.520490Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:54.520534Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7592677605666534427:2358] 2026-01-07T17:46:54.520567Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7592677605666534424:2358][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7592677601371566711:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> TSchemeShardTest::MkRmDir >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2026-01-07T17:46:51.618345Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677593267382712:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:51.619252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:51.648222Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677595320782690:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:51.648277Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:51.834859Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:51.839016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:51.862965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:51.863031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:51.863808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:51.863881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:51.869708Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:51.869892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:51.870564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:51.940347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:52.001605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:46:52.045499Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:46:52.136205Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677593267382915:2144] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:52.136267Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677597562350682:2457] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:52.136363Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677593267382921:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:52.136479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677593267383168:2305][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677593267382921:2146], cookie# 1 2026-01-07T17:46:52.138108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677593267383211:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593267383208:2305], cookie# 1 2026-01-07T17:46:52.138178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677593267383212:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593267383209:2305], cookie# 1 2026-01-07T17:46:52.138194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677593267383213:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593267383210:2305], cookie# 1 2026-01-07T17:46:52.138222Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677593267382566:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593267383212:2305], cookie# 1 2026-01-07T17:46:52.138246Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677593267382563:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593267383211:2305], cookie# 1 2026-01-07T17:46:52.138263Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677593267382569:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677593267383213:2305], cookie# 1 2026-01-07T17:46:52.138313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677593267383212:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677593267382566:2055], cookie# 1 2026-01-07T17:46:52.138344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677593267383211:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677593267382563:2052], cookie# 1 2026-01-07T17:46:52.138362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677593267383213:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677593267382569:2058], cookie# 1 2026-01-07T17:46:52.138405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677593267383168:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677593267383209:2305], cookie# 1 2026-01-07T17:46:52.138426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677593267383168:2305][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:52.138443Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677593267383168:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677593267383208:2305], cookie# 1 2026-01-07T17:46:52.138465Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677593267383168:2305][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:52.138507Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677593267383168:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677593267383210:2305], cookie# 1 2026-01-07T17:46:52.138523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677593267383168:2305][/dc-1] Sync cookie mismatch: sender# [1:7592677593267383210:2305], cookie# 1, current cookie# 0 2026-01-07T17:46:52.138581Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677593267382921:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:52.143337Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677593267382921:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677593267383168:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:52.143485Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677593267382921:2146], cacheItem# { Subscriber: { Subscriber: [1:7592677593267383168:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:52.146284Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677597562350683:2458], recipient# [1:7592677597562350682:2457], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:52.146390Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677597562350682:2457] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:52.176378Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677597562350682:2457] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:46:52.180159Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677597562350682:2457] Handle TEvDescribeSchemeResult Forward to# [1:7592677597562350681:2456] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecuritySt ... self# [2:7592677595320782862:2108], cacheItem# { Subscriber: { Subscriber: [2:7592677608205684883:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:55.161369Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592677595320782862:2108], cacheItem# { Subscriber: { Subscriber: [2:7592677608205684884:2126] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:55.161485Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592677612500652241:2130], recipient# [2:7592677608205684879:2298], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:55.162252Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7592677608205684879:2298], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:55.223670Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677599615750233:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7592677599615750234:2119] 2026-01-07T17:46:55.223718Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677599615750233:2119][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.223733Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677599615750233:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7592677599615750235:2119] 2026-01-07T17:46:55.223745Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677599615750233:2119][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.223760Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677599615750233:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7592677599615750236:2119] 2026-01-07T17:46:55.223772Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677599615750233:2119][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.306844Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684884:2126][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7592677608205684888:2126] 2026-01-07T17:46:55.306927Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684884:2126][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.307028Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684884:2126][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7592677608205684889:2126] 2026-01-07T17:46:55.307073Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684884:2126][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.307139Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684884:2126][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7592677608205684890:2126] 2026-01-07T17:46:55.307162Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684884:2126][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.307820Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684883:2125][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7592677608205684885:2125] 2026-01-07T17:46:55.307886Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684883:2125][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.308214Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684883:2125][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7592677608205684886:2125] 2026-01-07T17:46:55.308261Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684883:2125][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.308645Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684883:2125][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7592677608205684887:2125] 2026-01-07T17:46:55.308685Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684883:2125][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.311221Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684872:2123][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7592677608205684873:2123] 2026-01-07T17:46:55.311278Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684872:2123][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.311313Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684872:2123][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7592677608205684874:2123] 2026-01-07T17:46:55.311356Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684872:2123][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:55.311392Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7592677608205684872:2123][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7592677608205684875:2123] 2026-01-07T17:46:55.311416Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7592677608205684872:2123][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7592677595320782862:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn >> YdbProxy::ReadTopic >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> YdbProxy::AlterTable [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] >> THealthCheckTest::LayoutCorrect [GOOD] >> TSchemeShardTest::CreateSystemColumn [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> YdbProxy::DropTopic [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2026-01-07T17:46:54.625962Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677606437986032:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:54.626025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:54.834784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:54.834885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:54.872331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:54.884922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:54.887259Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677606437985990:2081] 1767808014624521 != 1767808014624524 2026-01-07T17:46:54.893985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:55.078505Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:55.147154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:55.147184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:55.147194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:55.147287Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:55.411268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:55.629361Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:57.509078Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677619322888762:2498] txid# 281474976715658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-07T17:46:57.527453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:57.657487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:46:57.682752Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677619322888879:2579] txid# 281474976715661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2026-01-07T17:46:50.547078Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677588576997501:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:50.547632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:50.809598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:50.834421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:50.834516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:50.842834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:50.901258Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:50.902263Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677588576997358:2081] 1767808010540427 != 1767808010540430 2026-01-07T17:46:51.079413Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677588576997635:2116] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:51.079481Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677592871965406:2442] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:51.079619Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677588576997663:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:51.079762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677588576997898:2289][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677588576997663:2131], cookie# 1 2026-01-07T17:46:51.081565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677588576997929:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677588576997926:2289], cookie# 1 2026-01-07T17:46:51.081613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677588576997930:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677588576997927:2289], cookie# 1 2026-01-07T17:46:51.081628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677588576997931:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677588576997928:2289], cookie# 1 2026-01-07T17:46:51.081690Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677588576997326:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677588576997929:2289], cookie# 1 2026-01-07T17:46:51.081724Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677588576997329:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677588576997930:2289], cookie# 1 2026-01-07T17:46:51.081742Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677588576997332:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677588576997931:2289], cookie# 1 2026-01-07T17:46:51.081813Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677588576997929:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677588576997326:2049], cookie# 1 2026-01-07T17:46:51.081834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677588576997930:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677588576997329:2052], cookie# 1 2026-01-07T17:46:51.081847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677588576997931:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677588576997332:2055], cookie# 1 2026-01-07T17:46:51.081887Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677588576997898:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677588576997926:2289], cookie# 1 2026-01-07T17:46:51.081916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677588576997898:2289][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:51.081951Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677588576997898:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677588576997927:2289], cookie# 1 2026-01-07T17:46:51.081994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677588576997898:2289][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:51.082041Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677588576997898:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677588576997928:2289], cookie# 1 2026-01-07T17:46:51.082058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677588576997898:2289][/dc-1] Sync cookie mismatch: sender# [1:7592677588576997928:2289], cookie# 1, current cookie# 0 2026-01-07T17:46:51.082121Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677588576997663:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:51.089711Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677588576997663:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677588576997898:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:51.089840Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677588576997663:2131], cacheItem# { Subscriber: { Subscriber: [1:7592677588576997898:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:51.093010Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677592871965407:2443], recipient# [1:7592677592871965406:2442], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:51.093091Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677592871965406:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:51.102885Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677588576997663:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:51.102968Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592677588576997663:2131], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-07T17:46:51.103065Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592677588576997663:2131], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-07T17:46:51.103094Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592677588576997663:2131], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-07T17:46:51.103303Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7592677592871965408:2444][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:46:51.103894Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592677588576997326:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7592677592871965414:2444] 2026-01-07T17:46:51.103929Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592677588576997326:2049] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-07T17:46:51.104009Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592677588576997326:2049] Subscribe: subscriber# [1:7592677592871965414:2444], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:46:51.104092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592677588576997329:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7592677592871965415:2444] 2026-01-07T17:46:51.104099Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592677588576997329:2052] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-07T17:46:51.104119Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592677588576997329:2052] Subscribe: subscriber# [1:7592677592871965415 ... data/workload_manager/delayed_requests] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.417784Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149435:2770][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7592677621951149446:2770] 2026-01-07T17:46:57.417808Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149435:2770][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.499178Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7592677609066246501:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:57.499303Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592677609066246501:2109], cacheItem# { Subscriber: { Subscriber: [4:7592677621951149435:2770] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:57.499348Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7592677609066246501:2109], cacheItem# { Subscriber: { Subscriber: [4:7592677621951149436:2771] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:46:57.499470Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7592677621951149497:2777], recipient# [4:7592677621951149430:2361], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:57.499639Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7592677621951149430:2361], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:57.576606Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149436:2771][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7592677621951149438:2771] 2026-01-07T17:46:57.576672Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149437:2772][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7592677621951149450:2772] 2026-01-07T17:46:57.576676Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149436:2771][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.576696Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149436:2771][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7592677621951149439:2771] 2026-01-07T17:46:57.576715Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149437:2772][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.576716Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149436:2771][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.576732Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149436:2771][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7592677621951149440:2771] 2026-01-07T17:46:57.576750Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149436:2771][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.578119Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149437:2772][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7592677621951149451:2772] 2026-01-07T17:46:57.578175Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149437:2772][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.578195Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149437:2772][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7592677621951149452:2772] 2026-01-07T17:46:57.578217Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149437:2772][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.578345Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149435:2770][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7592677621951149444:2770] 2026-01-07T17:46:57.578389Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149435:2770][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.578409Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149435:2770][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7592677621951149445:2770] 2026-01-07T17:46:57.578454Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149435:2770][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:46:57.578481Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7592677621951149435:2770][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7592677621951149446:2770] 2026-01-07T17:46:57.578508Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7592677621951149435:2770][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7592677609066246501:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:27.178799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:27.178903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:27.178940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:27.178975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:27.179009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:27.179047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:27.179113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:27.179191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:27.179996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:27.180276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:27.265284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:27.265344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:27.280042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:27.280342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:27.280530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:27.286750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:27.287105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:27.287808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:27.288068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:27.290827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:27.291065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:27.292214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:27.292281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:27.292411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:27.292473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:27.292510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:27.292712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.299118Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:46:27.438044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:46:27.438317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.438509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:46:27.438570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:46:27.438812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:46:27.438889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:27.441335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:27.441515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:46:27.441729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.441797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:46:27.441845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:46:27.441876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:46:27.443921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.443976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:46:27.444012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:46:27.445727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.445774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.445854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:46:27.445913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:46:27.449482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:46:27.451356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:46:27.451543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:46:27.452619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:27.452776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:46:27.452827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:46:27.453103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:46:27.453149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:46:27.453310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:46:27.453385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:27.455367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:27.455412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... rget path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-07T17:46:58.386618Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:46:58.386687Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxCopyTable target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:46:58.386766Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:46:58.386946Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:46:58.387198Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:46:58.387853Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:46:58.387944Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:46:58.390709Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2026-01-07T17:46:58.391105Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2026-01-07T17:46:58.391488Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:58.391564Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:58.391857Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:46:58.392002Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:58.392082Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-07T17:46:58.392161Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:46:58.392263Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:46:58.392356Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2026-01-07T17:46:58.392724Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-07T17:46:58.394261Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:46:58.394418Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:46:58.394475Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:46:58.394548Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:46:58.394624Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:46:58.395857Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:46:58.395947Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:46:58.395983Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:46:58.396019Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 1 2026-01-07T17:46:58.396057Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:46:58.396141Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:46:58.399580Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2026-01-07T17:46:58.399797Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2026-01-07T17:46:58.399888Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2026-01-07T17:46:58.400410Z node 17 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-07T17:46:58.400735Z node 17 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2026-01-07T17:46:58.401042Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-07T17:46:58.401119Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2026-01-07T17:46:58.401300Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-07T17:46:58.401389Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-07T17:46:58.401515Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-07T17:46:58.401666Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 2 -> 3 2026-01-07T17:46:58.403052Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:46:58.403199Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:46:58.408493Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:46:58.408791Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:46:58.408886Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2026-01-07T17:46:58.409014Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2026-01-07T17:46:58.414150Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-01-07T17:46:58.414317Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-01-07T17:46:58.414408Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2026-01-07T17:46:58.414436Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:03.717155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:03.855345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:03.863045Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:03.864253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:03.864660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:03.864849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:03.866933Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:03.867327Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:03.867510Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:03.867656Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:04.330774Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:04.466406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:04.466556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:04.467446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:04.467541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:04.515862Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:04.516777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:04.517181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:04.545188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:04.745661Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:05.548250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:05.548314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:05.548351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:05.548745Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:12.318546Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:12.318773Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:12.328041Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:12.329461Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:12.330928Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:616:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:12.331249Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:12.331448Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:12.333674Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:611:2281], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:12.334000Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:12.334075Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:12.658366Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:12.770729Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:12.770860Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:12.771535Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:12.771624Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:12.816579Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:46:12.817429Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:12.817787Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:12.862604Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:12.888655Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:13.761702Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:13.761755Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:13.761786Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:13.762143Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:21.836567Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.837640Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.852102Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.855262Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.855551Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.856017Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:21.856233Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:21.858028Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:691:2350], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.858185Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:21.858254Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:22.160425Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:22.258141Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:22.258299Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:22.258882Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#720 ... WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:31.207469Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:31.210294Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:503:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:31.210820Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:31.210992Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:31.213979Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:496:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:31.214404Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:31.214612Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:31.526531Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:31.626421Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:31.626570Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:31.627397Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:31.627493Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:31.673257Z node 7 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2026-01-07T17:46:31.674180Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:31.674561Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:31.734421Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:31.749110Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:32.671335Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:32.671411Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:32.671472Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:32.672156Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:37.515079Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:37.521381Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:37.537268Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:37.537522Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:37.537706Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:37.908419Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:37.995236Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:37.995381Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:38.031111Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:38.087391Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:38.982449Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:38.982536Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:38.982585Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:38.983064Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:39.052973Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-07T17:46:49.823228Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:49.829272Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:49.831955Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:451:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:49.832338Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:49.832563Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:50.229358Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:50.315013Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:50.315190Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:50.360774Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:50.454639Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:51.371372Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:51.371446Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:51.371491Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:51.372043Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:56.024053Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:56.031141Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:56.048516Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:56.048827Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:56.049001Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:56.429270Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:56.527603Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:56.527721Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:56.566895Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:56.628671Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:57.442519Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:57.442577Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:57.442610Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:57.443321Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::DependentOps >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2026-01-07T17:46:52.670125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677598056221240:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:52.671536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:52.879645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:52.879755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:52.923619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:52.926385Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:52.927925Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677598056221199:2081] 1767808012668763 != 1767808012668766 2026-01-07T17:46:52.936070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:53.108037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:53.121948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:53.121970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:53.121977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:53.122063Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:53.325591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:55.611256Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677613291468181:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:55.611307Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:55.632340Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:55.710836Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:55.712627Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677613291468155:2081] 1767808015609978 != 1767808015609981 2026-01-07T17:46:55.750900Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:55.750992Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:55.754768Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:55.892214Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:55.976798Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:55.976820Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:55.976827Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:55.976904Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:56.282695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:56.387872Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-07T17:46:56.437021Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-07T17:46:56.437085Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:46:56.454832Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677617586436436:2587] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::BridgeTimeDifference [GOOD] >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict [GOOD] >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ManyDirs >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 >> YdbProxy::DescribeConsumer [GOOD] >> YdbProxy::CreateCdcStream [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> TStreamingQueryTest::ParallelCreateSameStreamingQuery >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true [GOOD] >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:07.923660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:08.058895Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:08.068377Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:08.069523Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:08.069971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:08.070190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:08.072445Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:08.072831Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:08.073024Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:08.073178Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:08.521196Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:08.632404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:08.632564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:08.633388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:08.633478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:08.678546Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:08.679401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:08.679804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:08.814349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:08.880635Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:09.683961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:09.684054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:09.684090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:09.684537Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:15.987952Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:15.988921Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:15.991541Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:46:16.000556Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:16.001809Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:16.003536Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:16.004049Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:16.004280Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:16.006363Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:16.006415Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:16.375423Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:16.478845Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:16.478993Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:16.479728Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:16.479811Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:16.531028Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:46:16.531588Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:16.532008Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:16.595883Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:16.672084Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:17.558684Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:17.558766Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:17.558818Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:17.559292Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:25.691124Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:25.692235Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:25.706526Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:25.709699Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:25.709965Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:25.710444Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:25.710664Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:25.712587Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:691:2350], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:25.712834Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:25.712977Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:26.047794Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:26.155194Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:26.155342Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:26.155875Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState ... for SysViewsRoster update finished (done) 2026-01-07T17:46:36.553831Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:36.553887Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:36.553913Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:36.554283Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-7" reason: "YELLOW-7932-1231c6b1-8" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 7 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-07T17:46:44.079219Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:44.079976Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:44.081555Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:46:44.093096Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:44.094815Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:44.095228Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:44.095322Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:44.097000Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:44.097056Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:44.437042Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:44.558567Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:44.558774Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:44.559645Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:44.559740Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:44.604833Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-07T17:46:44.605638Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:44.606119Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:44.681979Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:44.743100Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:45.559645Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:45.559717Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:45.559764Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:45.560268Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-07T17:46:50.791023Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:50.796050Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:50.842558Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:451:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:50.842948Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:50.843176Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:51.213936Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:51.328704Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:51.328882Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:51.375632Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:51.403017Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:52.459563Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:52.459639Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:52.459687Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:52.460210Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:57.507165Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:57.513685Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:57.532436Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:57.532787Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:57.532971Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:57.983763Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:58.099597Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.099796Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.135809Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.198685Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:59.234125Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:59.234200Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:59.234241Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:59.234719Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:03.988523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:04.109075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:04.124877Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:04.126125Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:04.126564Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:04.126782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:04.129021Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:04.129433Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:04.129631Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:04.129787Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:04.590752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:04.717393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:04.717521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:04.718319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:04.718406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:04.763605Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:04.764501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:04.764849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:04.841946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:04.996673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:05.791424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:05.791480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:05.791544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:05.791971Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:12.733935Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:12.734211Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:12.741800Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:12.742813Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:12.743932Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:616:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:12.744206Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:12.744358Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:12.746114Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:611:2281], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:12.746333Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:12.746381Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:13.094005Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:13.187691Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:13.187805Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:13.188423Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:13.188492Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:13.234068Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:46:13.234671Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:13.234949Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:13.302057Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:13.314789Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:14.199772Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:14.199828Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:14.199873Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:14.200224Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:21.776814Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.777970Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.791838Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.794122Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.794425Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.794854Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:21.795057Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:21.796813Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:691:2350], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:21.797065Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:21.797190Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:22.109988Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:22.222831Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:22.222971Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:22.224135Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#720 ... /delayed_requests } 2026-01-07T17:46:40.569836Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:40.569963Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:40.571834Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:40.571896Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:40.889491Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:41.004331Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:41.004512Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:41.005364Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:41.005444Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:41.050920Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-07T17:46:41.051867Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:41.052333Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:41.113790Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:41.186775Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:42.049256Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:42.049323Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:42.049371Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:42.049832Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "RED-a838-9-9-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-42" path: "/home/runner/.ya/build/build_root/ec9d/001dfa/r3tmp/tmpUigSqa/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-43" path: "/home/runner/.ya/build/build_root/ec9d/001dfa/r3tmp/tmpUigSqa/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-44" path: "/home/runner/.ya/build/build_root/ec9d/001dfa/r3tmp/tmpUigSqa/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-07T17:46:50.451570Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:50.452401Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:50.469453Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:695:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:50.470134Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:50.470557Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:50.470654Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:50.471013Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:50.471271Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:50.789934Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:50.892731Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:50.892887Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:50.893538Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:50.893652Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:50.971721Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-07T17:46:50.972624Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:50.973139Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:51.015927Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:51.092519Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:52.035304Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:52.035368Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:52.035412Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:52.035913Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:57.669774Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:57.676721Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:57.727514Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:57.727865Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:57.728028Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:58.135816Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:58.243956Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.244133Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.283031Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.357696Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:59.361665Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:59.361738Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:59.361806Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:59.362675Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration >> TStreamingQueryTest::CreateStreamingQueryWithProperties >> TStreamingQueryTest::CreateStreamingQuery |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> YdbProxy::DescribeTable [GOOD] >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableConfig >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeTimeDifference [GOOD] Test command err: 2026-01-07T17:45:57.435652Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677362846842374:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:45:57.439311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:45:57.744102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:45:57.744223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:45:57.761737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:45:57.801308Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:57.808760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:45:57.977058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:45:57.977088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:45:57.977095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:45:57.977178Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:45:57.984708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:45:58.210121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) issue_log { id: "0" status: GREY message: "Database does not exist" } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-07T17:46:07.285333Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:07.301711Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:07.305424Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:46:07.315910Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:07.319003Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:07.319922Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:07.320307Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:07.320469Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:07.322320Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:07.322473Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:07.668327Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:07.778285Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:07.778422Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:07.780191Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:07.780308Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:07.828968Z node 2 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:46:07.829519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:07.829955Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:07.908859Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:07.935292Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:08.839666Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:08.839747Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:08.839801Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:08.840380Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 2 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2026-01-07T17:46:16.522286Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:16.523246Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:16.524601Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:46:16.531614Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:16.533309Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:16.534433Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:302:2226], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:16.534800Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:16.534960Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:16.535993Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:16.536105Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:16.867410Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:16.968112Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:16.968262Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:16.968690Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:16.968755Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:17.046697Z node 4 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-07T17:46:17.047233Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:17.047659Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:17.140266Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:17.168142Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:18.121123Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:18.121209Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:18.121264Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:18.122288Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: MAINTENANCE_REQUIRED issue_log { id: "ORANGE-af30-1231c6b1" status: ORANGE message: "Storage has no redundancy" location { database { name: "/Root" } } reason: "ORANGE-cf29-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "ORANGE-a3e2-1231c6b1-2147483648" status ... 897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:29.292646Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:29.330089Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:29.408647Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:30.256194Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:30.256261Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:30.256307Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:30.256739Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:30.316463Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-07T17:46:41.462717Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:41.467168Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:41.469765Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:451:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:41.470150Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:41.470323Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:41.840528Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:41.937150Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:41.937293Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:41.984555Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:42.081870Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:43.010058Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:43.010131Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:43.010172Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:43.010631Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:58.148801Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:58.153053Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:58.158370Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:58.159231Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:58.189748Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:58.196603Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:46:58.196672Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:46:58.279978Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:58.280067Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:58.280117Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:58.283526Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:1225:2374], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:58.285816Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:58.286432Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:58.291744Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:778:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:58.292279Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:58.292442Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [16:783:2342], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:58.292645Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:58.292849Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:58.293034Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:58.294134Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:550:2142], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:58.294703Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:58.294762Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:58.646701Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:58.699107Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.699297Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.706786Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:37:2084] 1767808004333546 != 1767808004333551 2026-01-07T17:46:58.716269Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:58.724138Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.724259Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.730261Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.730387Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.731641Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.731727Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.746152Z node 15 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [15:117:2075] 1767808004349966 != 1767808004349969 2026-01-07T17:46:58.747512Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:157:2075] 1767808004355890 != 1767808004355894 2026-01-07T17:46:58.748212Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:77:2075] 1767808004344004 != 1767808004344008 2026-01-07T17:46:58.783347Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2026-01-07T17:46:58.783808Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-07T17:46:58.784163Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2026-01-07T17:46:58.784529Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.785126Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.785282Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.785397Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TStreamingQueryTest::DropStreamingQueryTwice >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2026-01-07T17:46:55.003651Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677606452906420:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:55.004067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:55.227130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:55.227244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:55.271499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:55.275711Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:55.314551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:55.318673Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677606452906269:2081] 1767808014985619 != 1767808014985622 2026-01-07T17:46:55.513909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:55.513929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:55.513939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:55.514015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:55.521064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:55.717682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:55.756088Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677610747874395:2489] txid# 281474976710658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2026-01-07T17:46:58.160746Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677624584610999:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:58.161265Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:58.196393Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:58.278674Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:58.280716Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.280786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.303298Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.449504Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:58.517780Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:58.517801Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:58.517807Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:58.517882Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:58.784829Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2026-01-07T17:46:53.887058Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677603863985713:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:53.887134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:54.101387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:54.107008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:54.107151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:54.153142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:54.168125Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677603863985679:2081] 1767808013886123 != 1767808013886126 2026-01-07T17:46:54.187333Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:54.262885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:54.399748Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:54.399773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:54.399784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:54.399865Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:54.629188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:54.896819Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:56.719671Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677616748888460:2504] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2026-01-07T17:46:56.731241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:56.887198Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677616748888545:2563] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:57.599779Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677621650830133:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:57.600967Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:57.632091Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:57.702168Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:57.704437Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677621650830089:2081] 1767808017597854 != 1767808017597857 2026-01-07T17:46:57.738426Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:57.738536Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:57.741892Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:57.873772Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:57.939836Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:57.939858Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:57.939865Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:57.939935Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:58.210985Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:58.216399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:46:58.617097Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:00.757550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:00.858327Z node 2 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][2:7592677634535733042:2333] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-07T17:47:00.893312Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677634535733093:2639] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent [GOOD] >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:59.476445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:59.476551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:59.476608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:59.476646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:59.476686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:59.476716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:59.476773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:59.476860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:59.477783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:59.478043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:59.548329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:59.548384Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:59.563285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:59.563577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:59.563757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:59.569659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:59.569935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:59.570519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:59.570794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:59.572967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:59.573145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:59.574076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:59.574121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:59.574215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:59.574260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:59.574291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:59.574461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:59.721440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.722466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.722596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.722685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.722768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.722841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.722915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.722995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.723101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.723170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.723228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.723301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.723390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.723474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.723565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:02.068334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:02.068471Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:02.068648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.068689Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:02.068729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:02.068758Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:02.070184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.070222Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:02.070253Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:02.071528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.071565Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.071599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:02.071630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:02.071738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:02.072882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:02.072999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:47:02.073223Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:02.073301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:02.073337Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:02.073509Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:02.073542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:02.073652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:02.073702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:47:02.074978Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:02.075011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:02.075195Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.075232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-07T17:47:02.075298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.075332Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:47:02.075429Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:02.075459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:02.075501Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:02.075536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:02.075570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:47:02.075603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:02.075641Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:47:02.075669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:47:02.075723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:02.075759Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:47:02.075790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:47:02.076453Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:02.076558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:02.076598Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:47:02.076645Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:47:02.076679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:02.076774Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:47:02.079336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:47:02.079762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:47:02.080155Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:682:2671] Bootstrap 2026-01-07T17:47:02.080997Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:682:2671] Become StateWork (SchemeCache [2:687:2676]) 2026-01-07T17:47:02.083369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:02.083522Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 101:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } 2026-01-07T17:47:02.083579Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 101:0, path# /MyRoot/ 2026-01-07T17:47:02.083644Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-07T17:47:02.084637Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:682:2671] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:47:02.086822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:02.087007Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE STREAMING QUERY, path: 2026-01-07T17:47:02.087310Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQueryWithProperties [GOOD] >> TStreamingQueryTest::DropStreamingQuery >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2026-01-07T17:46:54.859161Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677609821088619:2099];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:54.859874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:55.077654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:55.081029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:55.081152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:55.107472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:55.140997Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677609821088547:2081] 1767808014849336 != 1767808014849339 2026-01-07T17:46:55.149360Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:55.370151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:55.397590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:55.397610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:55.397616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:55.397712Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:55.620806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:55.683568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:46:58.187059Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677623563970839:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:58.187109Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:58.215396Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:58.298040Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:58.301546Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677623563970805:2081] 1767808018184190 != 1767808018184193 2026-01-07T17:46:58.334871Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.334961Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.342875Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.438500Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:58.544007Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:58.544022Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:58.544029Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:58.544111Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:58.786579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:59.195609Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:01.290185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> TStreamingQueryTest::CreateStreamingQuery [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:01.859482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:01.859596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:01.859655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:01.859704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:01.859741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:01.859770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:01.859831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:01.859903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:01.860813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:01.861119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:01.949200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:01.949263Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:01.963966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:01.964299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:01.964477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:01.970557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:01.970883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:01.971614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:01.971878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:01.974299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:01.974494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:01.975603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:01.975667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:01.975780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:01.975828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:01.975873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:01.976033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.131641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.132656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.132776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.132853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.132961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.133743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... escribe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:02.780805Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 235us result status StatusSuccess 2026-01-07T17:47:02.781226Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:02.781752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:02.781896Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 145us result status StatusSuccess 2026-01-07T17:47:02.782193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2026-01-07T17:47:02.782517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:02.782585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:47:02.782712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:02.782754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-07T17:47:02.782831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:47:02.782871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:47:02.783462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:02.783561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:02.783651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:02.783696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:708:2697] 2026-01-07T17:47:02.783908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:47:02.783960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:02.783987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:708:2697] 2026-01-07T17:47:02.784121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:02.784151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:708:2697] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:02.784690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:02.784900Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 224us result status StatusSuccess 2026-01-07T17:47:02.785248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2026-01-07T17:47:02.788431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "NilNoviSubLuna" } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:02.788730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 104:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "NilNoviSubLuna" } 2026-01-07T17:47:02.788814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 104:0, path# /MyRoot/NilNoviSubLuna 2026-01-07T17:47:02.788968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:47:02.791624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:02.791859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: NilNoviSubLuna TestModificationResult got TxId: 104, wait until txId: 104 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] >> TStreamingQueryTest::ParallelAlterStreamingQuery >> TStreamingQueryTest::DropStreamingQueryTwice [GOOD] >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TStreamingQueryTest::ParallelCreateStreamingQuery >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> TStreamingQueryTest::DropStreamingQuery [GOOD] >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:31.460579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:31.460656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:31.460690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:31.460717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:31.460749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:31.460775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:31.460820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:31.460881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:31.461617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:31.461868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:31.525567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:31.525617Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:31.538286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:31.538556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:31.538735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:31.544951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:31.545323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:31.546198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:31.546438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:31.549344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:31.549580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:31.550672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:31.550721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:31.550809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:31.550848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:31.550884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:31.551084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:31.693465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.694512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.694639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.694714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.694788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.694888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.694963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.695032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.695148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.695237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.695308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.695404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.695483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.695570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:31.695643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... txId: 102, path id: 1 2026-01-07T17:47:02.815130Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [16:212:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-07T17:47:02.815706Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.815786Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:47:02.817378Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:02.817519Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:02.817589Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:02.817664Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:47:02.817747Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:02.819135Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:02.819209Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:02.819234Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:02.819265Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:02.819302Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:47:02.819384Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:47:02.820653Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1731 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:47:02.820690Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:47:02.820841Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1731 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:47:02.821051Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1731 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-07T17:47:02.822580Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 736 RawX2: 68719479457 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:02.822650Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:47:02.822852Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 736 RawX2: 68719479457 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:02.822956Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:47:02.823122Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 736 RawX2: 68719479457 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:02.823247Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:02.823310Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.823384Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:47:02.823454Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:47:02.824542Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:02.826309Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:02.826439Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.827493Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.827898Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.827962Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:47:02.828173Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:02.828238Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:02.828314Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:02.828373Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:02.828441Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:47:02.828539Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:705:2694] message: TxId: 102 2026-01-07T17:47:02.828622Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:02.828707Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:47:02.828767Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:47:02.828967Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:02.830765Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:02.830840Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [16:765:2742] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-07T17:47:02.834585Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" PartitionConfig { ChannelProfileId: 0 } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:02.834864Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.835250Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, at schemeshard: 72057594046678944 2026-01-07T17:47:02.837914Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Setting ChannelProfileId to 0 for tables with storage config is not allowed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:02.838294Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2026-01-07T17:46:49.078725Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677584683362420:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:49.078925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:49.114293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:49.328104Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:49.330870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:49.330990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:49.333755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:46:49.338158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:49.495037Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677584683362584:2143] Handle TEvNavigate describe path dc-1 2026-01-07T17:46:49.495140Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677584683363033:2455] HANDLE EvNavigateScheme dc-1 2026-01-07T17:46:49.495304Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677584683362609:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:46:49.495406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592677584683362925:2372][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592677584683362609:2157], cookie# 1 2026-01-07T17:46:49.497247Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677584683362937:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677584683362934:2372], cookie# 1 2026-01-07T17:46:49.497292Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677584683362938:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677584683362935:2372], cookie# 1 2026-01-07T17:46:49.497306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592677584683362939:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677584683362936:2372], cookie# 1 2026-01-07T17:46:49.497331Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677580388394940:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677584683362938:2372], cookie# 1 2026-01-07T17:46:49.497333Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677580388394937:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677584683362937:2372], cookie# 1 2026-01-07T17:46:49.497361Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592677580388394943:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592677584683362939:2372], cookie# 1 2026-01-07T17:46:49.497419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677584683362938:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677580388394940:2054], cookie# 1 2026-01-07T17:46:49.497477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677584683362937:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677580388394937:2051], cookie# 1 2026-01-07T17:46:49.497500Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592677584683362939:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677580388394943:2057], cookie# 1 2026-01-07T17:46:49.497543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677584683362925:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677584683362935:2372], cookie# 1 2026-01-07T17:46:49.497597Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592677584683362925:2372][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:46:49.497623Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677584683362925:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677584683362934:2372], cookie# 1 2026-01-07T17:46:49.497644Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592677584683362925:2372][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:46:49.497694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592677584683362925:2372][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592677584683362936:2372], cookie# 1 2026-01-07T17:46:49.497713Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592677584683362925:2372][/dc-1] Sync cookie mismatch: sender# [1:7592677584683362936:2372], cookie# 1, current cookie# 0 2026-01-07T17:46:49.497730Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592677584683362609:2157], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:46:49.503276Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592677584683362609:2157], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592677584683362925:2372] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:46:49.503418Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592677584683362609:2157], cacheItem# { Subscriber: { Subscriber: [1:7592677584683362925:2372] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:46:49.506104Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592677584683363034:2456], recipient# [1:7592677584683363033:2455], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:46:49.506201Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677584683363033:2455] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:46:49.530349Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677584683363033:2455] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:46:49.532852Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677584683363033:2455] Handle TEvDescribeSchemeResult Forward to# [1:7592677584683363032:2454] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:46:49.534341Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677584683362584:2143] Handle TEvProposeTransaction 2026-01-07T17:46:49.534359Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677584683362584:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:46:49.534411Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677584683362584:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677584683363040:2461] 2026-01-07T17:46:49.608292Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592677584683362609:2157], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073 ... tem# { Subscriber: { Subscriber: [3:7592677598855392221:2309] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:47:00.744439Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677633215152397:9599], recipient# [3:7592677633215152396:7395], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:00.752556Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592677585970490186:2241], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:00.752756Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592677585970490186:2241], cacheItem# { Subscriber: { Subscriber: [3:7592677598855392224:2312] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:47:00.752869Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592677633215152399:9600], recipient# [3:7592677633215152398:7396], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:00.974444Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592677622724493098:2230], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:00.974576Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592677622724493098:2230], cacheItem# { Subscriber: { Subscriber: [2:7592677635609395159:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:47:00.974627Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592677622724493098:2230], cacheItem# { Subscriber: { Subscriber: [2:7592677635609395160:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:47:00.975806Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592677635609395273:2343], recipient# [2:7592677635609395270:2552], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:00.976702Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:47:01.660179Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592677622724493098:2230], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:01.660329Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592677622724493098:2230], cacheItem# { Subscriber: { Subscriber: [2:7592677622724493144:2239] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:47:01.660404Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592677622724493098:2230], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:01.660475Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592677639904362572:2344], recipient# [2:7592677639904362570:2555], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:01.660478Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592677622724493098:2230], cacheItem# { Subscriber: { Subscriber: [2:7592677622724493144:2239] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:47:01.660542Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592677639904362573:2345], recipient# [2:7592677639904362571:2556], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:01.669057Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7592677622724493098:2230], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:47:01.669201Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7592677622724493098:2230], cacheItem# { Subscriber: { Subscriber: [2:7592677635609395186:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:47:01.669292Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7592677639904362575:2346], recipient# [2:7592677639904362574:2557], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:02.447535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:02.447653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:02.447704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:02.447751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:02.447792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:02.447818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:02.447867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:02.447936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:02.448791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:02.449321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:02.534269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:02.534318Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:02.553836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:02.554137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:02.554316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:02.560156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:02.560470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:02.561100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:02.561354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:02.564012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.564222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:02.565365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:02.565417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.565527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:02.565576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:02.565622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:02.565788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.684025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.684775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.684854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.684918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.685546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard: 72057594046678944 2026-01-07T17:47:03.640592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2026-01-07T17:47:03.640735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:03.641345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:03.641436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:03.641473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:47:03.641508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:47:03.641543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:47:03.642148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:03.642217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:03.642256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:47:03.642292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 2 2026-01-07T17:47:03.642324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:47:03.642406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-07T17:47:03.644677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2026-01-07T17:47:03.644852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:47:03.646060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 2026-01-07T17:47:03.647017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:03.647131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:03.647177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 105:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000040, at schemeshard: 72057594046678944 2026-01-07T17:47:03.647309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-07T17:47:03.647467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:47:03.647544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:47:03.647795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:03.649327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:03.649387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:03.649565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:47:03.649650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:03.649681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:861:2818], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-07T17:47:03.649724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:861:2818], at schemeshard: 72057594046678944, txId: 105, path id: 40 FAKE_COORDINATOR: Erasing txId 105 2026-01-07T17:47:03.650115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.650159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:47:03.650269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:47:03.650310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:47:03.650364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:47:03.650392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:47:03.650422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-07T17:47:03.650457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:47:03.650489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:47:03.650520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:47:03.650591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:47:03.650631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2026-01-07T17:47:03.650669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2026-01-07T17:47:03.650699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-07T17:47:03.651181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:03.651271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:03.651307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:47:03.651345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2026-01-07T17:47:03.651396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:47:03.652061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:03.652155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:03.652186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:47:03.652216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-07T17:47:03.652240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:47:03.652296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-07T17:47:03.654859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:03.654949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:02.161268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:02.161383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:02.161426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:02.161472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:02.161515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:02.161544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:02.161595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:02.161662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:02.162511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:02.162791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:02.236826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:02.236876Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:02.249575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:02.249879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:02.250089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:02.258519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:02.258846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:02.259522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:02.259753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:02.262387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.262565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:02.263617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:02.263672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.263780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:02.263832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:02.263872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:02.264051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.400383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.401918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-07T17:47:03.980382Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:03.980487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:03.980539Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_streaming_query.cpp:22: [72057594046678944] TDropStreamingQuery TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-07T17:47:03.980633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:03.980729Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:47:03.980868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:03.980942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:03.981328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:03.982673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:03.983401Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:03.983430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:03.983523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:03.983607Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:03.983641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:47:03.983665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:47:03.983913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.983955Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:47:03.984057Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:03.984090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:03.984130Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:03.984178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:03.984212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:47:03.984253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:03.984287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:47:03.984321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:47:03.984383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:03.984425Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:47:03.984475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-07T17:47:03.984506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:47:03.984798Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:03.984864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:03.984891Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:03.984949Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:47:03.984990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:03.985275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:03.985318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:03.985384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:03.985545Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:03.985600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:03.985629Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:03.985654Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:47:03.985685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:03.985754Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:47:03.988626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:03.988733Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:03.988778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:47:03.988963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:03.989005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:03.989375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:03.989492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:03.989528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:740:2729] TestWaitNotification: OK eventTxId 102 2026-01-07T17:47:03.989955Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:03.990120Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 190us result status StatusPathDoesNotExist 2026-01-07T17:47:03.990316Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyStreamingQuery" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |92.7%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] |92.7%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:02.164364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:02.164441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:02.164471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:02.164503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:02.164531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:02.164551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:02.164586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:02.164641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:02.165449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:02.165682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:02.232695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:02.232732Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:02.245607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:02.245849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:02.246014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:02.252998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:02.253375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:02.254258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:02.254562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:02.257528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.257733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:02.259008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:02.259066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.259182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:02.259232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:02.259301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:02.259491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:02.418023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.419957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.420031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.420111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:02.420203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :47:04.200213Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:04.202229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.202293Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:04.202335Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:04.203924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.203979Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.204023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:04.204071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:04.204212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:04.206120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:04.206267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:47:04.206598Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:04.206728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:04.206775Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:04.207030Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:04.207084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:04.207253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:04.207327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:47:04.209130Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:04.209178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:04.209415Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:04.209465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-07T17:47:04.209541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.209591Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:47:04.209695Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:04.209730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:04.209784Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:04.209824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:04.209863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:47:04.209907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:04.209948Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:47:04.209984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:47:04.210053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:04.210100Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:47:04.210135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:47:04.210943Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:04.211065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:04.211111Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:47:04.211151Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:47:04.211194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:04.211287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:47:04.214076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:47:04.214543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:47:04.215573Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:682:2671] Bootstrap 2026-01-07T17:47:04.216763Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:682:2671] Become StateWork (SchemeCache [2:687:2676]) 2026-01-07T17:47:04.219561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:04.219765Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2026-01-07T17:47:04.219895Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-07T17:47:04.221401Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:682:2671] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:47:04.223865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:04.224088Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER STREAMING QUERY, path: MyStreamingQuery 2026-01-07T17:47:04.224488Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:47:04.224695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:04.224753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:47:04.225139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:04.225231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:04.225269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:697:2686] TestWaitNotification: OK eventTxId 101 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:28.789779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:28.789853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:28.789886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:28.789912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:28.789944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:28.789998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:28.790073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:28.790163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:28.791028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:28.791278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:28.864739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:28.864789Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:28.876659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:28.877018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:28.877225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:28.885680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:28.885988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:28.886550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:28.886737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:28.888919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:28.889174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:28.890337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:28.890402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:28.890536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:28.890586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:28.890629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:28.890830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:29.027148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.027842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.027917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.027960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:29.028517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-07T17:47:03.855526Z node 12 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-07T17:47:03.866541Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:03.866782Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.866995Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:03.867049Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:03.867242Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:03.867323Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:03.869547Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:03.869746Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:03.869965Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.870036Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:03.870085Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:03.870123Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:03.871715Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.871766Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:03.871820Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:03.873157Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.873199Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.873253Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:03.873310Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:03.873461Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:03.874635Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:03.874778Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:47:03.875092Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:03.875212Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 51539609711 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:03.875262Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:03.875536Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:03.875592Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:03.875796Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:03.875867Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:47:03.877480Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:03.877533Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:03.877753Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:03.877799Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [12:213:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-07T17:47:03.878083Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.878136Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:47:03.878277Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:03.878323Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:03.878367Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:03.878402Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:03.878451Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:47:03.878506Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:03.878543Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:47:03.878576Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:47:03.878647Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:03.878695Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:47:03.878741Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:47:03.879360Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:03.879476Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:03.879536Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:47:03.879587Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:47:03.879638Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:03.879732Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:47:03.882330Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:47:03.882886Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:03.885860Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [12:684:2673] Bootstrap 2026-01-07T17:47:03.887187Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [12:684:2673] Become StateWork (SchemeCache [12:690:2679]) 2026-01-07T17:47:03.888272Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [12:684:2673] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:47:03.890331Z node 12 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:02.566054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:02.722445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:02.731158Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:02.732386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:02.732801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:02.733059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:02.735164Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:02.735551Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:02.735705Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:02.735869Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:03.180582Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:03.318058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:03.318241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:03.319170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:03.319270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:03.367375Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:03.368292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:03.373013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:03.514077Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:03.636266Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:04.462873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:04.462962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:04.463030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:04.463569Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-7932-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-1" reason: "YELLOW-7932-1231c6b1-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-a8c7-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-f700-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-f700-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-5d3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-5d3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-9f89-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-edf5-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" reason: "YELLOW-a8c7-1231c6b1" type: "DATABASE" level: 1 } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-07T17:46:11.783830Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:11.784178Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:11.797434Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:680:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:11.797822Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:11.798118Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:11.799017Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:11.800888Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:675:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:11.801129Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:11.801388Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:12.148225Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:12.253166Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:12.253307Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:12.253853Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:12.253934Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:12.317569Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:46:12.318547Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:12.318837Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:12.380170Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:12.434996Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:13.310470Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:13.310537Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:13.310581Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:13.311144Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:21.304388Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.305517Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:21.319416Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.322386Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:21.322612Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026- ... cting -> Connected 2026-01-07T17:46:39.175926Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:39.216345Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:39.974444Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:39.974484Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:39.974502Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:39.974817Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:47.583914Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:47.585072Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:47.605090Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:695:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:47.605884Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:47.606483Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:47.606594Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:47.607004Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:47.607324Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:47.930299Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:48.028004Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:48.028174Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:48.028776Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:48.028863Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:48.098776Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-07T17:46:48.099451Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:48.099858Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:48.184850Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:48.211684Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:49.024922Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:49.025026Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:49.025055Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:49.025478Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:57.492605Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:760:2348], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:46:57.494430Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:57.495590Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:57.507219Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:57.509128Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:57.510782Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:765:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:57.511345Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:57.511611Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:57.513542Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:57.513704Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:57.898198Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:58.029619Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.029794Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.030354Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:58.030443Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:58.103934Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2026-01-07T17:46:58.104636Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.105104Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:58.207993Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:58.249611Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:47:02.496799Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:02.505702Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:02.505764Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:02.505809Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:02.507567Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:02.525239Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:02.525397Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:02.569087Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-07T17:47:02.570279Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-13" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 13 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-14" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 14 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-15" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 15 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-13" reason: "YELLOW-7932-1231c6b1-14" reason: "YELLOW-7932-1231c6b1-15" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 13 host: "::1" port: 12001 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:02.763404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:02.763515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:02.763567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:02.763609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:02.763650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:02.763681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:02.763743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:02.763838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:02.764821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:02.765125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:02.849355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:02.849406Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:02.862172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:02.862419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:02.862628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:02.868597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:02.868997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:02.869818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:02.870084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:02.872784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.873107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:02.874286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:02.874346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:02.874487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:02.874536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:02.874580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:02.874766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:03.018503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.019610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.019745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.019844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.019930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:03.020823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:04.692106Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:04.693540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.693583Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:04.693616Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:04.694763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.694804Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.694840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:04.694894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:04.695021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:04.696072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:04.696171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:47:04.696390Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:04.696486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:04.696530Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:04.696717Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:04.696755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:04.696939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:04.697014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:47:04.698385Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:04.698422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:04.698554Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:04.698588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-07T17:47:04.698653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.698699Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:47:04.698774Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:04.698802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:04.698846Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:04.698873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:04.698901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:47:04.698934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:04.698960Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:47:04.698983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:47:04.699029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:04.699057Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:47:04.699102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:47:04.699619Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:04.699700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:04.699744Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:47:04.699774Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:47:04.699806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:04.699876Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:47:04.701853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:47:04.702183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:47:04.702476Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:682:2671] Bootstrap 2026-01-07T17:47:04.703275Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:682:2671] Become StateWork (SchemeCache [2:687:2676]) 2026-01-07T17:47:04.705443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropStreamingQuery Drop { Name: "MyStreamingQuery" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:04.705586Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_streaming_query.cpp:182: [72057594046678944] TDropStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2026-01-07T17:47:04.705702Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-07T17:47:04.706680Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:682:2671] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:47:04.708941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:04.709149Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP STREAMING QUERY, path: MyStreamingQuery 2026-01-07T17:47:04.709609Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:47:04.709854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:04.709899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:47:04.710262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:04.710353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:04.710389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:697:2686] TestWaitNotification: OK eventTxId 101 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:04.250074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:04.250150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:04.250185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:04.250224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:04.250253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:04.250271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:04.250302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:04.250355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:04.250996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:04.251218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:04.319995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:04.320040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:04.331574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:04.331831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:04.331990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:04.338114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:04.338415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:04.339057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:04.339325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:04.342020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:04.342203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:04.343169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:04.343217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:04.343304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:04.343343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:04.343379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:04.343567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.464559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.465914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... th "/MyRoot/DirA/MyStreamingQuery1" took 160us result status StatusSuccess 2026-01-07T17:47:05.070684Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:05.071194Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:05.071365Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 160us result status StatusSuccess 2026-01-07T17:47:05.071568Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:05.072059Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:05.072213Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 129us result status StatusSuccess 2026-01-07T17:47:05.072519Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:05.072940Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:05.073085Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery1" took 162us result status StatusSuccess 2026-01-07T17:47:05.073346Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:05.073861Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:05.074037Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 199us result status StatusSuccess 2026-01-07T17:47:05.074368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:04.208037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:04.208141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:04.208192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:04.208238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:04.208272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:04.208300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:04.208364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:04.208431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:04.209324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:04.209584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:04.296893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:04.296972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:04.312523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:04.312896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:04.313116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:04.319512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:04.319871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:04.320524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:04.320825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:04.323437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:04.323651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:04.324792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:04.324847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:04.324992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:04.325037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:04.325086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:04.325262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:04.468732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.469711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.469830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.469912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:04.470806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... r txId 112: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.225271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.225345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 118, at schemeshard: 72057594046678944 2026-01-07T17:47:05.225397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2026-01-07T17:47:05.225431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-07T17:47:05.225455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.225467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.225536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.225551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.225598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-07T17:47:05.225669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-07T17:47:05.225757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-07T17:47:05.225794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.225806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.225879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-07T17:47:05.225937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.225950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-07T17:47:05.226031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-07T17:47:05.226162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-07T17:47:05.226333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-07T17:47:05.226438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-07T17:47:05.226686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-07T17:47:05.226795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.226903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.226994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.227016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.227064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.227095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.227188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.227203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:794:2783] 2026-01-07T17:47:05.227259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-07T17:47:05.227271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:794:2783] TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 2026-01-07T17:47:05.229500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:05.229686Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 186us result status StatusSuccess 2026-01-07T17:47:05.229996Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> YdbProxy::AlterTopic [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:23.090452Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:23.197063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:23.203996Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:23.204337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:23.204474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:23.583806Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:23.721050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:23.721196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:23.758496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:23.851010Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:24.585630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:24.586718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:24.586835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:24.586883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:24.587068Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:24.660226Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:25.257428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:46:28.343973Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:28.351610Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:46:28.355727Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:28.390680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:28.390811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:28.430966Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:28.433203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:28.627442Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:28.627551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:28.628892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.629596Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.630200Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.631023Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.631210Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.631522Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.631615Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.631722Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.631921Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:28.647032Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:28.825358Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:28.882229Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:46:28.882355Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:46:28.916753Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:46:28.918030Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:46:28.918298Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:46:28.918359Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:46:28.918399Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:46:28.918448Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:46:28.918508Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:46:28.918553Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:46:28.919119Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:46:28.921015Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:46:28.923196Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:46:28.932245Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:46:28.932304Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:46:28.932401Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:46:28.938729Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:28.938831Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:28.969999Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:46:28.970507Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:46:28.973523Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:46:29.009069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:29.022689Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:46:29.022848Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:46:29.034548Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:46:29.207589Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:46:29.263065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:46:29.641456Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:46:29.771846Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:46:29.771942Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:46:30.474262Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 7 2026-01-07T17:47:03.704389Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-07T17:47:03.704414Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1767808023590530 2026-01-07T17:47:03.704442Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-07T17:47:03.704466Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-07T17:47:03.704492Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-07T17:47:03.704545Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-07T17:47:03.704587Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:47:03.704672Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-07T17:47:03.704716Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:47:03.704757Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:47:03.704807Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:47:03.704911Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:47:03.705989Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:47:03.706733Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:47:03.706803Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:47:03.706898Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5571:4779] Owner: [2:5570:4778]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:47:03.706955Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5571:4779] Owner: [2:5570:4778]. Column diff is empty, finishing 2026-01-07T17:47:03.708006Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:47:03.708072Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:47:03.709580Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:47:03.726226Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5580:4786] 2026-01-07T17:47:03.726499Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5580:4786], schemeshard id = 72075186224037897 2026-01-07T17:47:03.726595Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5581:4787] 2026-01-07T17:47:03.726671Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5537:4759], server id = [2:5581:4787], tablet id = 72075186224037894, status = OK 2026-01-07T17:47:03.726787Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5581:4787], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-07T17:47:03.786827Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:47:03.786964Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-07T17:47:03.787657Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5586:4792], server id = [2:5590:4796], tablet id = 72075186224037899, status = OK 2026-01-07T17:47:03.787925Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5586:4792], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:03.788203Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5587:4793], server id = [2:5591:4797], tablet id = 72075186224037900, status = OK 2026-01-07T17:47:03.788244Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5587:4793], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:03.788900Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5588:4794], server id = [2:5592:4798], tablet id = 72075186224037901, status = OK 2026-01-07T17:47:03.788974Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5588:4794], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:03.789377Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5589:4795], server id = [2:5593:4799], tablet id = 72075186224037902, status = OK 2026-01-07T17:47:03.789431Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5589:4795], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:03.793955Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:47:03.794330Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5586:4792], server id = [2:5590:4796], tablet id = 72075186224037899 2026-01-07T17:47:03.794364Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:03.794814Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:47:03.795148Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5587:4793], server id = [2:5591:4797], tablet id = 72075186224037900 2026-01-07T17:47:03.795168Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:03.795349Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:47:03.795685Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5588:4794], server id = [2:5592:4798], tablet id = 72075186224037901 2026-01-07T17:47:03.795706Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:03.796173Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:47:03.796222Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:47:03.796404Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:47:03.796558Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:47:03.796740Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5606:4808], ActorId: [2:5607:4809], Starting query actor #1 [2:5608:4810] 2026-01-07T17:47:03.796781Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5607:4809], ActorId: [2:5608:4810], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:47:03.799170Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5589:4795], server id = [2:5593:4799], tablet id = 72075186224037902 2026-01-07T17:47:03.799194Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:03.799667Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5607:4809], ActorId: [2:5608:4810], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=N2Y3M2MyZDUtZDI4NTYyLTliNzhkZjE0LTNmM2NmZGU4, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:47:03.896384Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5617:4819]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:47:03.896664Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:47:03.896725Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5617:4819], StatRequests.size() = 1 2026-01-07T17:47:04.023459Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5607:4809], ActorId: [2:5608:4810], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2Y3M2MyZDUtZDI4NTYyLTliNzhkZjE0LTNmM2NmZGU4, TxId: 2026-01-07T17:47:04.023552Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5607:4809], ActorId: [2:5608:4810], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2Y3M2MyZDUtZDI4NTYyLTliNzhkZjE0LTNmM2NmZGU4, TxId: 2026-01-07T17:47:04.023971Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5606:4808], ActorId: [2:5607:4809], Got response [2:5608:4810] SUCCESS 2026-01-07T17:47:04.024268Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:47:04.060457Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:47:04.060520Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:47:04.151799Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5643:4827]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:47:04.152178Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:47:04.152236Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:47:04.152575Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:47:04.152694Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:47:04.152784Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:47:04.190839Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 |92.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema |92.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2026-01-07T17:46:56.020720Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677615127809816:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:56.020890Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:56.261077Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:56.269179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:56.269285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:56.311282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:56.344292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:56.518525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:56.565742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:56.565766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:56.565790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:56.565878Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:56.808131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:57.034369Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:58.690953Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677623717745254:2501] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-07T17:46:58.710550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:59.530852Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677628103702399:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:59.530912Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:59.542121Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:59.626397Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:59.663356Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:59.663593Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:59.671144Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:59.671993Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:59.854521Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:59.854544Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:59.854551Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:59.854633Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:00.103898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:00.535889Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:02.585217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:02.624020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:03.408503Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677646418690028:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:03.408547Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:03.419820Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:03.485364Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:03.524698Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:03.524760Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:03.527894Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:03.706939Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:03.707981Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:03.708004Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:03.708021Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:03.708099Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:03.976979Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:04.076866Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-07T17:47:04.093308Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592677650713658249:2578] txid# 281474976710660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> YdbProxy::ReadNonExistentTopic [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion |92.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv [GOOD] |92.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |92.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |92.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |92.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |92.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |92.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |92.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:27.689709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:27.689816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:27.689861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:27.689897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:27.689932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:27.689957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:27.690015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:27.690083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:27.690858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:27.691101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:27.778401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:27.778454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:27.794342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:27.794737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:27.794943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:27.804372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:27.804779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:27.805602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:27.805877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:27.813642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:27.813925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:27.815219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:27.815285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:27.815427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:27.815484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:27.815529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:27.815731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.977653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.978693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.978812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.978887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.978979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.979803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:07.237287Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:07.237320Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:07.239037Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-07T17:47:07.239274Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409548 2026-01-07T17:47:07.241220Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:07.241637Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-07T17:47:07.242275Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:47:07.242478Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:07.242587Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:47:07.242714Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:07.243004Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:47:07.244096Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:07.244322Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:07.245504Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2026-01-07T17:47:07.245801Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:07.246001Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:07.246891Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:07.246975Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:07.247085Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2026-01-07T17:47:07.251117Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:07.251211Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:07.252889Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:07.252960Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:07.253043Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:07.253073Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:07.253305Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:07.253372Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:07.254370Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:47:07.254749Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:47:07.254822Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:47:07.255390Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:47:07.255525Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:07.255590Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:537:2490] TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:07.256285Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:07.256579Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 322us result status StatusPathDoesNotExist 2026-01-07T17:47:07.256830Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2026-01-07T17:47:07.257443Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-07T17:47:07.257536Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2026-01-07T17:47:07.257625Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2026-01-07T17:47:07.257668Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2026-01-07T17:47:07.258232Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:07.258489Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 268us result status StatusSuccess 2026-01-07T17:47:07.259054Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:43:35.681774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:43:35.750726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:43:35.762666Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:43:35.762962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:43:35.763004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:43:35.961375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:43:35.961479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:43:36.013582Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767807813297507 != 1767807813297511 2026-01-07T17:43:36.021389Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:36.062999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:43:36.152630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:43:36.442990Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.443061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.443102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:43:36.443225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-07T17:43:36.443264Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:43:36.552226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-07T17:43:36.552474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.552680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:43:36.552727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:43:36.552915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:43:36.553010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:36.553104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.553744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:43:36.553880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:43:36.553928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.553960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.554130Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.554183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.554293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.554378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:43:36.554426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:43:36.554462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:43:36.554564Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.555049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.555089Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.555216Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.555255Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.555310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.555365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:43:36.555419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:43:36.555490Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.555834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.555867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-07T17:43:36.555969Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.556007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T17:43:36.556053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.556084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:43:36.556137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-07T17:43:36.556177Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T17:43:36.556221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:43:36.559624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:43:36.560095Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:43:36.560146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:43:36.560255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:43:36.560461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-07T17:43:36.560845Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.560898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:43:36.560952Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:43:36.561049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-07T17:43:36.561091Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-07T17:43:36.561148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 7205759404664 ... 337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.332009Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.332045Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.342655Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:47:05.342752Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:47:05.342791Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2026-01-07T17:47:05.342880Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2026-01-07T17:47:05.342920Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-01-07T17:47:05.343053Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 60 shard idx 72057594046644480:11 data size 271 row count 13 2026-01-07T17:47:05.343131Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037898 maps to shardIdx: 72057594046644480:11 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 60], pathId map=SequenceTable, is column=0, is olap=0, RowCount 13, DataSize 271, with borrowed parts 2026-01-07T17:47:05.343171Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037898, followerId 0 2026-01-07T17:47:05.343292Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037898: SplitByLoadNotEnabledForTable 2026-01-07T17:47:05.343355Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 68 shard idx 72057594046644480:17 data size 368 row count 16 2026-01-07T17:47:05.343403Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037906 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 68], pathId map=SequenceTable, is column=0, is olap=0, RowCount 16, DataSize 368, with borrowed parts 2026-01-07T17:47:05.343432Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037906, followerId 0 2026-01-07T17:47:05.343491Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037906: SplitByLoadNotEnabledForTable 2026-01-07T17:47:05.343567Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:47:05.354253Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:47:05.354350Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:47:05.354388Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-07T17:47:05.364888Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.364981Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.365013Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.365045Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.365074Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.385958Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.386047Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.386089Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.386147Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.386184Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.396777Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.396858Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.396890Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.396944Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.396979Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.407577Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.407666Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.407705Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.407745Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.407780Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.428880Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.428996Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.429036Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.429076Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.429114Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.439684Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.439778Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.439816Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.439857Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.439895Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.450388Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.450463Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.450494Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.450527Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.450556Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.471661Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.471752Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.471791Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.471833Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.471869Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.482383Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.482456Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.482487Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.482520Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.482558Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.493066Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:47:05.493148Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.493180Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:47:05.493213Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:47:05.493241Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-07T17:47:05.854617Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037907, clientId# [11:3258:4468], serverId# [11:3259:4469], sessionId# [0:0:0] { items { uint32_value: 6 } items { uint32_value: 600 } }, { items { uint32_value: 7 } items { uint32_value: 70 } }, { items { uint32_value: 8 } items { uint32_value: 80 } }, { items { uint32_value: 9 } items { uint32_value: 90 } } |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2026-01-07T17:46:57.232910Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677619096677033:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:57.233018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:57.497442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:57.497586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:57.533079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:57.545600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:57.590090Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:57.713953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:57.804063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:57.804097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:57.804109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:57.804240Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:58.013058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:58.256465Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:58.261892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:00.156204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677631981580001:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:47:00.156266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677631981579987:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:47:00.156505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:47:00.157825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677631981580002:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:47:00.157892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677631981580007:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:47:00.158017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:47:00.159479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:47:00.202748Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677631981580028:2641] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:47:00.213098Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677631981580006:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:47:00.213151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677631981580025:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:47:00.291349Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677631981580057:2667] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:47:00.308697Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677631981580075:2675] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:47:01.026486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:01.386492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:01.757266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:02.102164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:02.232921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677619096677033:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:02.232996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:47:02.599724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:04.675161Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677649673371608:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:04.675239Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:04.685921Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:04.760989Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:04.792450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:04.792519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:04.794108Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:04.849317Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:04.981677Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:04.981702Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:04.981709Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:04.981789Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:05.254656Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 |92.7%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |92.8%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare >> TSchemeShardSubDomainTest::CreateAndWait >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false >> TSchemeShardSubDomainTest::Redefine >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::SchemeLimitsRejects >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TSchemeShardSubDomainTest::RedefineErrors >> TSchemeShardSubDomainTest::Restart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:26.595863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:26.682878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:26.690551Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:26.690826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:26.690934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:27.019876Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:27.119055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:27.119222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:27.153862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:27.224418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:27.912194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:27.913482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:27.913536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:27.913583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:27.913822Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:27.972195Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:28.519090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:46:31.088016Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:31.094295Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:46:31.097640Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:31.128122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:31.128243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:31.166801Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:31.168533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:31.357108Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:31.357207Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:31.358877Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.359560Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.360008Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.360913Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.361151Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.361495Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.361617Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.361845Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.362093Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:31.383110Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:31.588696Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:31.657864Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:46:31.657958Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:46:31.695244Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:46:31.696479Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:46:31.696725Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:46:31.696782Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:46:31.696828Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:46:31.696873Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:46:31.696947Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:46:31.697003Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:46:31.697591Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:46:31.699706Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:46:31.702207Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:46:31.712277Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:46:31.712369Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:46:31.712457Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:46:31.719237Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:31.719336Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:31.742647Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:46:31.742961Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:46:31.745423Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:46:31.775342Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:31.788143Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:46:31.788254Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:46:31.798377Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:46:31.955719Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:46:32.010823Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:46:32.361322Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:46:32.496114Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:46:32.496234Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:46:33.206391Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... StatRequests.size() = 1 2026-01-07T17:47:04.626440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:47:04.780691Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6149:5109], ActorId: [2:6150:5110], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDA1MDM0NGUtZjU0NTFkNDgtYWVmYjU2YWItYzM2MjI1NmM=, TxId: 2026-01-07T17:47:04.780781Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6149:5109], ActorId: [2:6150:5110], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDA1MDM0NGUtZjU0NTFkNDgtYWVmYjU2YWItYzM2MjI1NmM=, TxId: 2026-01-07T17:47:04.781144Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6148:5108], ActorId: [2:6149:5109], Got response [2:6150:5110] SUCCESS 2026-01-07T17:47:04.781542Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:47:04.807141Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-07T17:47:04.807211Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-07T17:47:04.979089Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:47:04.979171Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:47:05.045191Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:6161:5121], schemeshard count = 1 2026-01-07T17:47:06.058501Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-07T17:47:06.748511Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-07T17:47:06.748578Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.156000s, at schemeshard: 72075186224037899 2026-01-07T17:47:06.748870Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-07T17:47:06.762747Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:47:07.398337Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-07T17:47:07.398405Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 32] is column table. 2026-01-07T17:47:07.398439Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-07T17:47:07.403434Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:47:07.433272Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:47:07.433941Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:47:07.434031Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:47:07.435463Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:47:07.449776Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:47:07.450050Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:47:07.451201Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6278:5184], server id = [2:6282:5188], tablet id = 72075186224037905, status = OK 2026-01-07T17:47:07.451534Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6278:5184], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-07T17:47:07.451894Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6279:5185], server id = [2:6283:5189], tablet id = 72075186224037906, status = OK 2026-01-07T17:47:07.451956Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6279:5185], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-07T17:47:07.452821Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6280:5186], server id = [2:6284:5190], tablet id = 72075186224037907, status = OK 2026-01-07T17:47:07.452878Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6280:5186], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-07T17:47:07.453518Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6281:5187], server id = [2:6285:5191], tablet id = 72075186224037908, status = OK 2026-01-07T17:47:07.453578Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6281:5187], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-07T17:47:07.457826Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037905 2026-01-07T17:47:07.458272Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6278:5184], server id = [2:6282:5188], tablet id = 72075186224037905 2026-01-07T17:47:07.458305Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:07.458511Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037906 2026-01-07T17:47:07.458900Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6279:5185], server id = [2:6283:5189], tablet id = 72075186224037906 2026-01-07T17:47:07.458921Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:07.459275Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037908 2026-01-07T17:47:07.459515Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6281:5187], server id = [2:6285:5191], tablet id = 72075186224037908 2026-01-07T17:47:07.459533Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:07.459668Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037907 2026-01-07T17:47:07.459696Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:47:07.459827Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:47:07.459918Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:47:07.460175Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6298:5200], ActorId: [2:6299:5201], Starting query actor #1 [2:6300:5202] 2026-01-07T17:47:07.460217Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6299:5201], ActorId: [2:6300:5202], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-07T17:47:07.461455Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6280:5186], server id = [2:6284:5190], tablet id = 72075186224037907 2026-01-07T17:47:07.461475Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:07.461921Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6299:5201], ActorId: [2:6300:5202], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Njk0N2Y5MWUtN2YyODJhMTQtZDE4N2Y2NzMtNmQ4ZGJkNzU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:47:07.545869Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6309:5211]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:47:07.546176Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:47:07.546227Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:6309:5211], StatRequests.size() = 1 2026-01-07T17:47:07.680612Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6299:5201], ActorId: [2:6300:5202], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Njk0N2Y5MWUtN2YyODJhMTQtZDE4N2Y2NzMtNmQ4ZGJkNzU=, TxId: 2026-01-07T17:47:07.680700Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6299:5201], ActorId: [2:6300:5202], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Njk0N2Y5MWUtN2YyODJhMTQtZDE4N2Y2NzMtNmQ4ZGJkNzU=, TxId: 2026-01-07T17:47:07.681144Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6298:5200], ActorId: [2:6299:5201], Got response [2:6300:5202] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:47:07.681657Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6322:5217]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:47:07.681868Z node 2 :SYSTEM_VIEWS WARN: tx_interval_summary.cpp:204: [72075186224037891] TEvIntervalQuerySummary, wrong stage: node id# 2 2026-01-07T17:47:07.681964Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:47:07.682521Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:47:07.682583Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:47:07.683219Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:47:07.683291Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-07T17:47:07.683351Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:47:07.689517Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 >> TSchemeShardSubDomainTest::DeleteAndRestart >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table |92.8%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:27.612358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:27.729887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:27.738725Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:27.739167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:27.739325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:28.155803Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:28.281045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:28.281213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:28.317210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:28.392896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:29.057224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:29.058369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:29.058419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:29.058470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:29.058698Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:29.125922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:29.684988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:46:32.414200Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:32.419215Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:46:32.422128Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:32.450824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:32.450935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:32.492801Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:32.494985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:32.700448Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:32.700595Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:32.702179Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.702920Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.703528Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.704500Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.704629Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.705078Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.705197Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.705391Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.705661Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:32.721196Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:32.934711Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:33.007662Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:46:33.007758Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:46:33.044989Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:46:33.046264Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:46:33.046482Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:46:33.046532Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:46:33.046569Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:46:33.046626Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:46:33.046669Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:46:33.046720Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:46:33.047342Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:46:33.049253Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:46:33.051665Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:46:33.061147Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:46:33.061209Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:46:33.061299Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:46:33.067970Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:33.068082Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:33.094170Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:46:33.094617Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:46:33.097737Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:46:33.131634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:33.144535Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:46:33.144677Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:46:33.155412Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:46:33.322794Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:46:33.375275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:46:33.727311Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:46:33.857822Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:46:33.857936Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:46:34.576231Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... rd count = 1 2026-01-07T17:47:04.508342Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-07T17:47:04.508396Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-07T17:47:04.508428Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:47:04.512698Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-07T17:47:04.541696Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-07T17:47:04.542476Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-07T17:47:04.542575Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-07T17:47:04.543810Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:47:04.557975Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:47:04.558250Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-07T17:47:04.559215Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5524:4747], server id = [2:5528:4751], tablet id = 72075186224037899, status = OK 2026-01-07T17:47:04.559608Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5524:4747], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:04.560011Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5525:4748], server id = [2:5529:4752], tablet id = 72075186224037900, status = OK 2026-01-07T17:47:04.560066Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5525:4748], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:04.560795Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5526:4749], server id = [2:5530:4753], tablet id = 72075186224037901, status = OK 2026-01-07T17:47:04.560850Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5526:4749], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:04.561499Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5527:4750], server id = [2:5531:4754], tablet id = 72075186224037902, status = OK 2026-01-07T17:47:04.561549Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5527:4750], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:04.566605Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-07T17:47:04.567163Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5524:4747], server id = [2:5528:4751], tablet id = 72075186224037899 2026-01-07T17:47:04.567208Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:04.568137Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:47:04.569099Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5525:4748], server id = [2:5529:4752], tablet id = 72075186224037900 2026-01-07T17:47:04.569129Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:04.569371Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-07T17:47:04.569697Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5526:4749], server id = [2:5530:4753], tablet id = 72075186224037901 2026-01-07T17:47:04.569725Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:04.569942Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-07T17:47:04.569984Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:47:04.570256Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:47:04.570687Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5527:4750], server id = [2:5531:4754], tablet id = 72075186224037902 2026-01-07T17:47:04.570713Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:04.606502Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:47:04.606750Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-07T17:47:05.123722Z node 2 :STATISTICS DEBUG: service_impl.cpp:254: Event round 2 is different from the current 3 2026-01-07T17:47:05.123828Z node 2 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-07T17:47:07.817182Z node 2 :STATISTICS INFO: service_impl.cpp:418: Node 3 is unavailable 2026-01-07T17:47:07.817266Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:47:07.817512Z node 2 :STATISTICS DEBUG: service_impl.cpp:254: Event round 3 is different from the current 0 2026-01-07T17:47:07.817539Z node 2 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-07T17:47:07.817623Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:47:07.817669Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:47:07.818261Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-07T17:47:07.832208Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-07T17:47:07.832406Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2026-01-07T17:47:07.833059Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5648:4808], server id = [2:5649:4809], tablet id = 72075186224037900, status = OK 2026-01-07T17:47:07.833149Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5648:4808], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-07T17:47:07.834626Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-07T17:47:07.834692Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-07T17:47:07.834805Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-07T17:47:07.834931Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-07T17:47:07.835244Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5651:4811], ActorId: [2:5652:4812], Starting query actor #1 [2:5653:4813] 2026-01-07T17:47:07.835312Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5652:4812], ActorId: [2:5653:4813], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:47:07.837797Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5648:4808], server id = [2:5649:4809], tablet id = 72075186224037900 2026-01-07T17:47:07.837833Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-07T17:47:07.838460Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5652:4812], ActorId: [2:5653:4813], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MTRmNDg0ZmUtN2JjZjExYWMtNGI4MjQ3NmYtNDczZWFhOWE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:47:07.922230Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5662:4822]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:47:07.922417Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:47:07.922453Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5662:4822], StatRequests.size() = 1 2026-01-07T17:47:08.024325Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5652:4812], ActorId: [2:5653:4813], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTRmNDg0ZmUtN2JjZjExYWMtNGI4MjQ3NmYtNDczZWFhOWE=, TxId: 2026-01-07T17:47:08.024380Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5652:4812], ActorId: [2:5653:4813], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTRmNDg0ZmUtN2JjZjExYWMtNGI4MjQ3NmYtNDczZWFhOWE=, TxId: 2026-01-07T17:47:08.024708Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5651:4811], ActorId: [2:5652:4812], Got response [2:5653:4813] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-07T17:47:08.025110Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5675:4828]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:47:08.025296Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:47:08.025654Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:47:08.025692Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:47:08.026203Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:47:08.026244Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:47:08.026299Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T17:47:08.030438Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeShardSubDomainTest::Redefine [GOOD] >> TSchemeShardSubDomainTest::Restart [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete |92.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.452113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.452162Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.466140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.466392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.466543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.472086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.472393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.472979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.473201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.475234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.475390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.493711Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:10.622644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:10.622851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.623065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:10.623110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:10.623314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:10.623386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:10.625378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:10.625780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:10.625878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:10.625905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:10.627542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:10.629120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.629166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.629212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.629254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:10.632708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:10.634748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:10.634853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:10.636181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:10.637200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.637403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.639402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.639445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... G: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:10.823416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1492: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.823498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1518: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.823711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2026-01-07T17:47:10.824027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.824127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-07T17:47:10.824399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.824495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.824607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:10.824656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:10.824717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:10.824739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:47:10.824857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.824956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.825182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2026-01-07T17:47:10.825574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.825711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.826124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.826197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.826414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.826512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.826558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.826644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.826810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.826885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.827038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.827301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.827374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.827419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.827567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.827628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.827666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.832923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.834423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.834492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.834958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.835032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.835076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.836418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:466:2418] sender: [1:528:2058] recipient: [1:15:2062] 2026-01-07T17:47:10.899543Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:10.899748Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 180us result status StatusSuccess 2026-01-07T17:47:10.900043Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:10.900441Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:10.900623Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 116us result status StatusSuccess 2026-01-07T17:47:10.900891Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.449606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.449658Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.463351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.463627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.463783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.469884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.470243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.470832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.471032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.473380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.473527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.496544Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:10.614844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:10.615159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.616764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:10.616825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:10.617889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:10.617966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:10.622099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.622581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:10.622889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.622961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:10.623006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:10.623049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:10.625733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:10.627827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.628006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:10.631803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:10.633874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:10.634316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:10.636273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:10.637239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.637518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.642032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.642089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 2026-01-07T17:47:10.793946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2026-01-07T17:47:10.795480Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-07T17:47:10.796437Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:47:10.796637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-07T17:47:10.796867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2026-01-07T17:47:10.797887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:10.798109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:47:10.799333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:10.799536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-01-07T17:47:10.802733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:10.802801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:10.802959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:10.804257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:10.804312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:10.804385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.805166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:47:10.805219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-07T17:47:10.805940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2026-01-07T17:47:10.806958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:10.807015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:10.807114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:10.807137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:10.809868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:47:10.809918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-07T17:47:10.810003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:10.810028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:10.810107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-01-07T17:47:10.810216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:10.810250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:10.810452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:10.810552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-07T17:47:10.810814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:10.810861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-07T17:47:10.810961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:10.810981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:47:10.811372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:10.811456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:10.811497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:605:2519] 2026-01-07T17:47:10.811701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:10.811770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:10.811799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:605:2519] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:10.812189Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:10.812441Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 256us result status StatusPathDoesNotExist 2026-01-07T17:47:10.812668Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:10.813189Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:10.813394Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 188us result status StatusSuccess 2026-01-07T17:47:10.813841Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.448975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.449043Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.463820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.464115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.464279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.470948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.471313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.472066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.472333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.475073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.475274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.494221Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:10.616055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:10.616273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.616737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:10.616797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:10.617906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:10.618000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:10.621025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.622553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:10.622889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.622959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:10.622996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:10.623024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:10.625284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:10.626966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.627134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:10.631276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:10.633006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:10.634305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:10.636313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:10.637202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.637484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.639686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.639727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... : 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:10.833130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:47:10.833155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-07T17:47:10.833193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:47:10.833261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:47:10.834894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:47:10.834998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:10.835036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:10.835062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:10.836306Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:47:10.837183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.837479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2026-01-07T17:47:10.837749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:47:10.838205Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-07T17:47:10.838464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:47:10.839183Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-07T17:47:10.839391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:10.839578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2026-01-07T17:47:10.840771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:10.840950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:10.841878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:10.841934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:10.842058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:10.843036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:10.843078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:10.843139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.843798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:10.843866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:10.845035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:10.845071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:10.845131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:10.845175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:10.846667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:10.846786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:47:10.847083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:47:10.847118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:47:10.847543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:47:10.847623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:47:10.847655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:582:2537] TestWaitNotification: OK eventTxId 104 2026-01-07T17:47:10.848247Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:10.848403Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 168us result status StatusPathDoesNotExist 2026-01-07T17:47:10.848579Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:10.849176Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:10.849329Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 161us result status StatusSuccess 2026-01-07T17:47:10.849734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.452647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.452721Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.464621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.464844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.464992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.470002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.470243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.470769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.470962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.473139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.473345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.494344Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:10.613552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:10.615112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.616728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:10.616795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:10.617877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:10.617958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:10.620654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.622603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:10.622891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.622957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:10.623006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:10.623058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:10.625205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:10.627027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.627182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:10.631748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:10.633459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:10.634301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:10.636273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:10.637243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.637489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.639353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.639412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:11.015625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2026-01-07T17:47:11.015645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2026-01-07T17:47:11.015720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:47:11.016097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 509 RawX2: 4294969757 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:11.016132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2026-01-07T17:47:11.016235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 509 RawX2: 4294969757 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:11.016282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:47:11.016350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 509 RawX2: 4294969757 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:11.016400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.016437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.016473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:47:11.016510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:47:11.021006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.021149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:11.021233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:11.021302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.021575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.021622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:47:11.021741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:11.021780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:11.021822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:11.021853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:11.021894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:47:11.021964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:315:2304] message: TxId: 102 2026-01-07T17:47:11.022022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:11.022067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:47:11.022098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:47:11.022247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-07T17:47:11.025449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:11.025499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:535:2479] TestWaitNotification: OK eventTxId 102 2026-01-07T17:47:11.026025Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:11.026256Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 212us result status StatusSuccess 2026-01-07T17:47:11.026793Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:11.027504Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:11.027719Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 221us result status StatusSuccess 2026-01-07T17:47:11.028192Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.458849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.458904Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.474194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.474490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.474680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.481295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.481636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.482384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.482633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.485622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.485782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.660840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.661808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.661937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.662927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... oordinatorID: 72057594046316545 2026-01-07T17:47:11.260121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2026-01-07T17:47:11.260263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2026-01-07T17:47:11.260640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.260766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:11.260806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-07T17:47:11.261062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-07T17:47:11.261125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-07T17:47:11.261286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:11.261360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:11.261408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:11.263279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:11.263327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:11.263433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:11.263523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:11.263608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-07T17:47:11.263674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 FAKE_COORDINATOR: Erasing txId 100 2026-01-07T17:47:11.264048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.264098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-07T17:47:11.264202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:11.264235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:11.264268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:11.264295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:11.264329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-07T17:47:11.264367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:11.264410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-07T17:47:11.264449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-07T17:47:11.264520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:11.264575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2026-01-07T17:47:11.264614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:47:11.264655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:47:11.265426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:11.265507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:11.265555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:11.265594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:47:11.265641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:11.266312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:11.266384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:11.266412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:11.266441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:11.266484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:11.266565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2026-01-07T17:47:11.266612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:682:2671] 2026-01-07T17:47:11.270122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:47:11.270555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:47:11.270638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:11.270679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:683:2672] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2026-01-07T17:47:11.271153Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:11.271366Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 199us result status StatusSuccess 2026-01-07T17:47:11.271808Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.455504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.455560Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.470268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.470481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.470602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.476335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.476663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.477379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.477583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.480099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.480232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.494582Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:10.638115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:10.638368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.638567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:10.638620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:10.638825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:10.638910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:10.641078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.641308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:10.641508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.641571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:10.641602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:10.641627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:10.643045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.643110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:10.643150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:10.644649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.644708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.644767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.644818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:10.647683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:10.649392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:10.649596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:10.650641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.650768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:10.650812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.651124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:10.651191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.651365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.651450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.653579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.653645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... alIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2026-01-07T17:47:11.235739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-01-07T17:47:11.236014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-07T17:47:11.239175Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:47:11.239516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.239821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-07T17:47:11.240774Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-07T17:47:11.241263Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-07T17:47:11.242131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:11.242347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2026-01-07T17:47:11.243390Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:47:11.243623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:11.243763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2026-01-07T17:47:11.245217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409549 2026-01-07T17:47:11.245746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:11.245891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-07T17:47:11.246497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-07T17:47:11.247623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-07T17:47:11.247713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:11.247794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-07T17:47:11.247851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:11.248082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:11.248146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:11.248271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:11.249080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:47:11.249129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-07T17:47:11.249190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:11.249213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:11.252054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:11.252086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:11.252145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:11.252161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:11.252397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:11.252429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:11.252490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:11.252636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:11.252682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:11.252778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:11.254147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:11.255553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:47:11.255817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:47:11.255859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:47:11.256257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:47:11.256339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:47:11.256374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:793:2696] TestWaitNotification: OK eventTxId 106 2026-01-07T17:47:11.256872Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:11.257040Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 178us result status StatusSuccess 2026-01-07T17:47:11.257360Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.456246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.456309Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.470437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.470713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.470890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.476647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.476992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.477658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.477873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.479657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.479784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.628363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.631510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.631691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.631759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.631823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.631898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.631974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.632032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.632104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.632185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.632250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.632311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.632386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.632475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.632538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-07T17:47:11.236246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-07T17:47:11.236299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-07T17:47:11.236445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:11.236499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:11.236542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2026-01-07T17:47:11.238360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:11.238407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:11.238540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:11.238642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:11.238683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-07T17:47:11.238768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-07T17:47:11.239013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.239054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-07T17:47:11.239171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:11.239211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:11.239257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:11.239292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:11.239328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-07T17:47:11.239370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:11.239506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-07T17:47:11.239552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-07T17:47:11.239702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:11.239751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-07T17:47:11.239782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:47:11.239820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:47:11.240457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:11.240529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:11.240566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:11.240609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:47:11.240657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:11.241338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:11.241415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:11.241454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:11.241482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:11.241519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:11.241592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-07T17:47:11.244149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:47:11.245027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-07T17:47:11.245282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:11.245331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-07T17:47:11.245404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:11.245423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:47:11.245752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:11.245874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:11.245911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:718:2707] 2026-01-07T17:47:11.246044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:11.246172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:11.246199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:718:2707] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:11.246578Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:11.246752Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 178us result status StatusSuccess 2026-01-07T17:47:11.247242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.431084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.431145Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.444296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.444494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.444798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.451701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.455617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.457551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.460866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.466833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.468843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.658495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.659437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.659592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.659666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.659732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.659796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.659895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.659967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.660051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.660122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.660207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.660296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.660385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.660483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.660553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xt state 2026-01-07T17:47:11.459600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 2 -> 3 2026-01-07T17:47:11.461661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.461716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:11.461759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 3 -> 128 2026-01-07T17:47:11.463588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.463674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.463720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2026-01-07T17:47:11.463773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2026-01-07T17:47:11.463897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:11.465158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2026-01-07T17:47:11.465264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000043 2026-01-07T17:47:11.465501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.465595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:11.465632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-07T17:47:11.465837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 240 2026-01-07T17:47:11.465897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-07T17:47:11.466032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:11.466084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2026-01-07T17:47:11.467443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:11.467483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:11.467599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:11.467630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 108, path id: 38 2026-01-07T17:47:11.467854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.467889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-07T17:47:11.467964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-07T17:47:11.467993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:47:11.468021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-07T17:47:11.468047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:47:11.468083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2026-01-07T17:47:11.468115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:47:11.468142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-07T17:47:11.468167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-07T17:47:11.468221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:47:11.468253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2026-01-07T17:47:11.468319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 38], 8 2026-01-07T17:47:11.468696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:47:11.468777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:47:11.468806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-01-07T17:47:11.468833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-07T17:47:11.468863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:11.468948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-01-07T17:47:11.471218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-07T17:47:11.471486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-07T17:47:11.471526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-07T17:47:11.471954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-07T17:47:11.472026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-07T17:47:11.472083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:992:2947] TestWaitNotification: OK eventTxId 108 2026-01-07T17:47:11.472646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:11.472847Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 209us result status StatusSuccess 2026-01-07T17:47:11.473292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.455794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.455847Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.470190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.470395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.470526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.475646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.475873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.476442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.476607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.478614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.478786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.493836Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:10.613544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:10.615120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.616748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:10.616815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:10.617870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:10.617958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:10.620637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.622580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:10.622866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.623035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:10.623082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:10.623119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:10.625186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:10.626890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.626940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.627073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:10.631698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:10.633563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:10.634311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:10.636248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:10.636420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:10.637202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.637385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.637496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.639187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.639222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... T_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:11.471987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:11.472013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:11.472969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:11.473931Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:47:11.474088Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-07T17:47:11.474198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.474502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2026-01-07T17:47:11.475577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:11.475750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2026-01-07T17:47:11.476395Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-07T17:47:11.476566Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:47:11.476639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:11.476768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2026-01-07T17:47:11.477544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409549 2026-01-07T17:47:11.478024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:11.478197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-07T17:47:11.478645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:11.478933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:11.478967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-07T17:47:11.479020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:11.479155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:11.479194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:11.479283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:11.490121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:11.490180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:11.490699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:11.490734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:11.490793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:11.490810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:11.493811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:11.493857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:11.493906Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:11.494050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:11.494090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:11.494163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:11.494391Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:11.495579Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:47:11.495765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:11.495797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:11.496122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:11.496184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:11.496212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:544:2498] TestWaitNotification: OK eventTxId 102 2026-01-07T17:47:11.509384Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:11.509607Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 257us result status StatusPathDoesNotExist 2026-01-07T17:47:11.509796Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:11.510448Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:11.510628Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 197us result status StatusPathDoesNotExist 2026-01-07T17:47:11.510761Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:11.289950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:11.290042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:11.290081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:11.290122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:11.290169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:11.290196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:11.290265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:11.290350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:11.291175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:11.291476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:11.378247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:11.378309Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:11.392615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:11.392958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:11.393129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:11.405906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:11.406211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:11.406875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.407104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:11.409613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:11.409800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:11.410875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:11.410922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:11.411029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:11.411068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:11.411181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:11.411352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.417678Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:11.542495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:11.542723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.542937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:11.542988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:11.543213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:11.543281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:11.545445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.545656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:11.545867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.545923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:11.545960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:11.545994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:11.548014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.548065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:11.548121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:11.549716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.549773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.549829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:11.549880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:11.553687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:11.555481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:11.555668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:11.556644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.556796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:11.556843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:11.557116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:11.557169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:11.557348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:11.557437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:11.559224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:11.559283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2026-01-07T17:47:12.231302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:12.231391Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:47:12.233411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:47:12.233789Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2026-01-07T17:47:12.234262Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:274:2263] Bootstrap 2026-01-07T17:47:12.235322Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:274:2263] Become StateWork (SchemeCache [2:279:2268]) 2026-01-07T17:47:12.237975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:12.238207Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.238304Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2026-01-07T17:47:12.239173Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:274:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:47:12.241070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:12.241247Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2026-01-07T17:47:12.241673Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2026-01-07T17:47:12.244344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:12.244553Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.244643Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2026-01-07T17:47:12.246505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:12.246694Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-07T17:47:12.246944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:12.246983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-07T17:47:12.247061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:12.247083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:47:12.247399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:12.247505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:12.247545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [2:293:2282] 2026-01-07T17:47:12.247665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:12.247732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:12.247758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:293:2282] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:12.248072Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:12.248223Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 156us result status StatusPathDoesNotExist 2026-01-07T17:47:12.248393Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:12.248764Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:12.248895Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 138us result status StatusPathDoesNotExist 2026-01-07T17:47:12.249032Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:12.249431Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:12.249561Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 147us result status StatusSuccess 2026-01-07T17:47:12.249923Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.440096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.440150Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.453661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.453901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.454046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.459411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.459716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.460312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.460878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.466856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.468846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.646580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.649934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.650023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.650084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.650178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.650243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 165710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:47:12.165735Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:47:12.165761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-07T17:47:12.165781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:47:12.166468Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:12.166532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:12.166558Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:12.166587Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:47:12.166622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:12.167200Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:12.167275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:12.167297Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:12.167314Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:47:12.167332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:12.167401Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:47:12.168543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:47:12.168629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:12.168654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:12.168671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:12.170080Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:47:12.170222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:12.170370Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-07T17:47:12.170572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:12.170766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 Forgetting tablet 72075186233409546 2026-01-07T17:47:12.171664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:12.171806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409548 2026-01-07T17:47:12.173012Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2026-01-07T17:47:12.173388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:12.173502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:12.174097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:12.174150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:12.174237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:12.174448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:12.174477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:12.174524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:12.175188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:12.175549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:12.175605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:12.177355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:12.177385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:12.177422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:12.177446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:12.177551Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:12.177606Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:47:12.177801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:12.177830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:12.178097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:12.178156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:12.178186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:930:2885] TestWaitNotification: OK eventTxId 102 2026-01-07T17:47:12.178542Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:12.178653Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 130us result status StatusPathDoesNotExist 2026-01-07T17:47:12.178780Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.454821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.454877Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.469235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.469504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.469646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.475187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.475472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.476140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.476341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.478796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.478969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.652071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.652988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.653733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ecute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:12.592619Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.592768Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:47:12.598015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 106, at schemeshard: 72057594046678944 2026-01-07T17:47:12.598239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2026-01-07T17:47:12.598774Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:12.598954Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 190us result status StatusSuccess 2026-01-07T17:47:12.599320Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:12.599861Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:12.600039Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 199us result status StatusSuccess 2026-01-07T17:47:12.600439Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:12.601224Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:12.601375Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 169us result status StatusSuccess 2026-01-07T17:47:12.601676Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:12.602331Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:12.602493Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 182us result status StatusSuccess 2026-01-07T17:47:12.602859Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 41 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 41 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:11.962003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:11.962091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:11.962126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:11.962167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:11.962216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:11.962241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:11.962288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:11.962356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:11.963146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:11.963404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:12.033579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:12.033631Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:12.045154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:12.045431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:12.045623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:12.051215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:12.051520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:12.052145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:12.052356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:12.054694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:12.054851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:12.055805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:12.055858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:12.055992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:12.056037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:12.056129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:12.056284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.062798Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:12.163913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:12.164100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.164232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:12.164278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:12.164450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:12.164496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:12.165963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:12.166141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:12.166336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.166370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:12.166410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:12.166444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:12.167715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.167763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:12.167793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:12.169035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.169077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.169130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:12.169188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:12.172230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:12.173611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:12.173764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:12.174557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:12.174656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:12.174686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:12.174886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:12.174973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:12.175088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:12.175159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:12.176627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:12.176669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... shard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 600 RawX2: 4294969836 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-07T17:47:12.750409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2026-01-07T17:47:12.750499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 600 RawX2: 4294969836 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-07T17:47:12.750543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:47:12.750590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 600 RawX2: 4294969836 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-07T17:47:12.750646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:12.750671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.750696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-07T17:47:12.750720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2026-01-07T17:47:12.753326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:47:12.753403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-07T17:47:12.753460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:47:12.755240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:47:12.755342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-07T17:47:12.755442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-07T17:47:12.755479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:2 ProgressState 2026-01-07T17:47:12.755586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 2/3 2026-01-07T17:47:12.755613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2026-01-07T17:47:12.755636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 2/3 2026-01-07T17:47:12.755673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2026-01-07T17:47:12.755697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2026-01-07T17:47:12.755926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:47:12.756031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.756166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.756337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.756368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:0 ProgressState 2026-01-07T17:47:12.756421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 3/3 2026-01-07T17:47:12.756453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-07T17:47:12.756482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 3/3 2026-01-07T17:47:12.756501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-07T17:47:12.756533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2026-01-07T17:47:12.756587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:479:2428] message: TxId: 107 2026-01-07T17:47:12.756627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-07T17:47:12.756669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-07T17:47:12.756698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-07T17:47:12.756806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2026-01-07T17:47:12.756843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2026-01-07T17:47:12.756861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:1 2026-01-07T17:47:12.756889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2026-01-07T17:47:12.756905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2026-01-07T17:47:12.756914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:2 2026-01-07T17:47:12.756952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2026-01-07T17:47:12.758523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:47:12.758562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:535:2484] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-07T17:47:12.761247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:12.761722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:101: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2026-01-07T17:47:12.761811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2026-01-07T17:47:12.761855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2026-01-07T17:47:12.763410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:12.763618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-07T17:47:12.763988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-07T17:47:12.764025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-07T17:47:12.764641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-07T17:47:12.764784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-07T17:47:12.764829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:728:2647] TestWaitNotification: OK eventTxId 108 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:12.365826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:12.365906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:12.365942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:12.365975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:12.366034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:12.366063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:12.366121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:12.366236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:12.366988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:12.367245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:12.451781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:12.451838Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:12.466217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:12.466477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:12.466626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:12.473170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:12.473492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:12.474194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:12.474412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:12.476900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:12.477109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:12.478266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:12.478322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:12.478451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:12.478499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:12.478593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:12.478760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:12.617092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.618956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:12.619024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:13.227256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:13.227362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:13.227406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-07T17:47:13.227451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-07T17:47:13.227711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-07T17:47:13.227757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-07T17:47:13.227875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:13.227912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:13.228005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:13.228039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:13.228073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-07T17:47:13.228114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:13.228150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-07T17:47:13.228184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-07T17:47:13.228249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:13.228316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-07T17:47:13.228359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:47:13.228384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:47:13.229147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:13.229242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:13.229284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:13.229321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:47:13.229360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:13.229987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:13.230053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:13.230077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:13.230113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:13.230143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:13.230200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-07T17:47:13.233558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:47:13.233956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-07T17:47:13.234192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:13.234232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-07T17:47:13.234307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:13.234325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:47:13.234643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:13.234738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:13.234766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:720:2709] 2026-01-07T17:47:13.234896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:13.234924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:13.234950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:720:2709] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:13.235287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:13.235440Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 181us result status StatusSuccess 2026-01-07T17:47:13.235849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:13.236165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:13.236333Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 139us result status StatusPathDoesNotExist 2026-01-07T17:47:13.236462Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::RestartAtInFly |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.452629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.452672Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.466827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.467180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.467378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.474307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.474661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.475418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.475663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.478264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.478445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.653961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.654939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.655982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... de 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:13.128077Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 253us result status StatusSuccess 2026-01-07T17:47:13.128589Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 250 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:13.129292Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:13.129517Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 244us result status StatusSuccess 2026-01-07T17:47:13.129944Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:13.130532Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:13.130704Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 191us result status StatusSuccess 2026-01-07T17:47:13.131068Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 300 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:13.131563Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:13.131754Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 208us result status StatusSuccess 2026-01-07T17:47:13.132135Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 300 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] >> TSchemeShardSubDomainTest::SetSchemeLimits >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.453270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.453325Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.467167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.467440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.467595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.473540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.473851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.474543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.474744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.477073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.477228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.496302Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:10.624755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:10.625020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.625181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:10.625217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:10.625372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:10.625430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:10.627203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:10.627569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.627642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:10.627682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:10.627722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:10.630714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.630765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:10.630805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:10.632252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.632296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.632336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.632377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:10.635656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:10.637390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:10.637523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:10.638249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.638352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:10.638387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.638575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:10.638617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:10.638766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:10.638863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.640434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.640472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 10: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-07T17:47:13.394075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:13.394395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-07T17:47:13.394725Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 2026-01-07T17:47:13.395648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2026-01-07T17:47:13.395858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 2026-01-07T17:47:13.396142Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 Forgetting tablet 72075186233409556 2026-01-07T17:47:13.397672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2026-01-07T17:47:13.397876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 2026-01-07T17:47:13.398098Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409555 2026-01-07T17:47:13.399822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2026-01-07T17:47:13.399996Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 2026-01-07T17:47:13.401694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:13.401900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409557 2026-01-07T17:47:13.402371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2026-01-07T17:47:13.403154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2026-01-07T17:47:13.403350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2026-01-07T17:47:13.403712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:13.403759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2026-01-07T17:47:13.403828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:13.404091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:13.404140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:13.404259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:13.407721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:13.407777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:13.407866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2026-01-07T17:47:13.407889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2026-01-07T17:47:13.408483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:14 2026-01-07T17:47:13.408523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2026-01-07T17:47:13.408630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:13.408653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:13.409208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:16 2026-01-07T17:47:13.409251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2026-01-07T17:47:13.409464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:13.409557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:13.409620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:13.409663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:13.409743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:13.411211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2026-01-07T17:47:13.412030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2026-01-07T17:47:13.412089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2026-01-07T17:47:13.413418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2026-01-07T17:47:13.413518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2026-01-07T17:47:13.413561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2157:3939] TestWaitNotification: OK eventTxId 139 2026-01-07T17:47:13.415130Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:13.415332Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 239us result status StatusSuccess 2026-01-07T17:47:13.415758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 4 MaxPaths: 5 MaxChildrenInDir: 4 MaxAclBytesSize: 25 MaxTableColumns: 3 MaxTableColumnNameLength: 10 MaxTableKeyColumns: 1 MaxTableIndices: 20 MaxShards: 6 MaxShardsInPath: 4 MaxConsistentCopyTargets: 1 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 20 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.354022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.354109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.354145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.354179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.354218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.354249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.354308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.354378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.355139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.355363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.419659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.419706Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.431357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.431651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.431814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.437141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.437435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.437923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.438133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.440187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.440366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.441199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.441273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.441378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.441410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.441480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.441632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.447184Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:14.550430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:14.550644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.550816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:14.550857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:14.551069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:14.551139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:14.553004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.553206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:14.553393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.553453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:14.553496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:14.553528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:14.555196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.555262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:14.555306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:14.556843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.556887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.556963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.557010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:14.560418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:14.562015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:14.562183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:14.563088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.563208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:14.563258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.563534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:14.563583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.563727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:14.563803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.565417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.565482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... CE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.703890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-07T17:47:14.704091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.704188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.704284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:14.704308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:14.704328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:14.704343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:47:14.704432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.704484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.704606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2026-01-07T17:47:14.704871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.704968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.705293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.705344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.705475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.705538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.705591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.705683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.705872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.705922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.706056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.706247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.706307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.706347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.706439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.706469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.706497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.710886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.711848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.711894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.712120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.712165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.712201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.712360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2026-01-07T17:47:14.774848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:14.774904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:460:2412] sender: [1:522:2058] recipient: [1:15:2062] 2026-01-07T17:47:14.775590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:14.775692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:14.775723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:520:2458] TestWaitNotification: OK eventTxId 100 2026-01-07T17:47:14.776148Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:14.776330Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 196us result status StatusSuccess 2026-01-07T17:47:14.776723Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:14.777140Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:14.777300Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 174us result status StatusSuccess 2026-01-07T17:47:14.777561Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.552861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.552975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.553011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.553043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.553090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.553129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.553180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.553251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.554035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.554329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.620640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.620683Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.632380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.632591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.632694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.639392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.639742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.640574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.640859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.647556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.647750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.649169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.649247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.649402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.649454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.649557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.649744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.656904Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:14.777662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:14.777875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.778060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:14.778107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:14.778335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:14.778405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:14.780375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.780601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:14.780831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.780883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:14.780922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:14.780978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:14.782804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.782856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:14.782908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:14.784531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.784574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.784638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.784687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:14.788239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:14.789815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:14.789976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:14.790739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.790833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:14.790865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.791084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:14.791136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.791278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:14.791350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.792893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.792943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... blishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:47:14.977192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.977230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:340:2315], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-07T17:47:14.977279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:340:2315], at schemeshard: 72057594046678944, txId: 100, path id: 2 2026-01-07T17:47:14.977603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.977645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-07T17:47:14.977726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:14.977767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:14.977802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:14.977834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:14.977866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-07T17:47:14.977902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:14.977933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-07T17:47:14.977971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-07T17:47:14.978123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:47:14.978157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-07T17:47:14.978186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-07T17:47:14.978212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-07T17:47:14.978784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:14.978871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:14.978913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:14.978961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-07T17:47:14.979015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:14.979589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:14.979662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:14.979686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:14.979711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-07T17:47:14.979734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-07T17:47:14.979782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-07T17:47:14.982295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:47:14.983191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-07T17:47:14.983416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:14.983469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-07T17:47:14.983824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:14.983902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:14.983951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:480:2429] TestWaitNotification: OK eventTxId 100 2026-01-07T17:47:14.984414Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:14.984629Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 191us result status StatusSuccess 2026-01-07T17:47:14.985054Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:14.985480Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:14.985671Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 183us result status StatusSuccess 2026-01-07T17:47:14.986019Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> TSchemeShardSubDomainTest::SchemeQuotas >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.209188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.209283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.209340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.209379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.209428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.209453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.209496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.209562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.210306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.210590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.295366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.295411Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.309630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.309950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.310113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.316054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.316388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.317035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.317267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.319506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.319669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.320740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.320806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.320952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.320999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.321096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.321287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.327269Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:14.452920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:14.453159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.453327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:14.453380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:14.453587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:14.453651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:14.455505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.455719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:14.455926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.455989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:14.456027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:14.456058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:14.457716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.457779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:14.457819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:14.459256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.459297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.459354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.459399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:14.462944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:14.464438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:14.464569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:14.465400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.465499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:14.465534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.465762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:14.465800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:14.465924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:14.465989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.467414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.467448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... lt> execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2026-01-07T17:47:14.973826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:654: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2026-01-07T17:47:14.973874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-07T17:47:14.973900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2026-01-07T17:47:14.989654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.989822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.037708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2026-01-07T17:47:15.037859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-07T17:47:15.037942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-07T17:47:15.038009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.038049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2026-01-07T17:47:15.038082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2026-01-07T17:47:15.038370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2026-01-07T17:47:15.038469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-07T17:47:15.038513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-07T17:47:15.038544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.038583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-07T17:47:15.038735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:47:15.038879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:15.038942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:47:15.042614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.042747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.043103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:15.043139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:15.043272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2026-01-07T17:47:15.043472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:15.043509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:340:2315], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-07T17:47:15.043544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:340:2315], at schemeshard: 72057594046678944, txId: 104, path id: 3 2026-01-07T17:47:15.044147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.044189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:47:15.044276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:47:15.044322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:15.044364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:47:15.044405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:15.044445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-07T17:47:15.044481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:15.044514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:47:15.044539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:47:15.044661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2026-01-07T17:47:15.044692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-07T17:47:15.044719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:47:15.044769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2026-01-07T17:47:15.045398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:15.045473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:15.045501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:47:15.045541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:47:15.045582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:15.046044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:15.046113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:15.046140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:47:15.046247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2026-01-07T17:47:15.046284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2026-01-07T17:47:15.046334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:47:15.050515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:47:15.050895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.257813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.257899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.257931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.257965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.258005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.258033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.258098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.258165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.258926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.259179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.346510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.346560Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.360521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.360802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.360962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.369237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.369587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.370165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.370392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.372756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.372916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.373919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.373965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.374064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.374106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.374208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.374359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.529748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.530733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.530867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.530955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.531767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... plete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:15.144606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:47:15.145857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:15.145906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:15.146035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:15.146123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:15.146166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:47:15.146246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:47:15.146300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.146339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:47:15.146467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:15.146514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:15.146556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:15.146596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:15.146631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:47:15.146671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:15.146707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:47:15.146737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:47:15.146816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:15.146851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:47:15.146881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:47:15.146913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:47:15.147584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:15.147670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:15.147709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:15.147754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:47:15.147807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:15.148490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:15.148576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:15.148608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:15.148675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:15.148716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:15.148805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:47:15.151663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:15.152128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2026-01-07T17:47:15.155208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:15.155432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2026-01-07T17:47:15.155484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2026-01-07T17:47:15.155665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2026-01-07T17:47:15.155735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2026-01-07T17:47:15.158977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:15.159258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-07T17:47:15.159587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:15.159637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:47:15.159721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:15.159742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:15.160097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:15.160240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:15.160293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:15.160337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:716:2705] 2026-01-07T17:47:15.160532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:15.160562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:716:2705] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.446216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.446278Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.460112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.460425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.460625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.468672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.469054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.469781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.470059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.472781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.472978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.643337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.644956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.645016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.645069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... complete, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:15.219033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-07T17:47:15.219077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-07T17:47:15.219256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-07T17:47:15.219405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-07T17:47:15.219443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:856:2809], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-07T17:47:15.219483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:856:2809], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-07T17:47:15.219843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:15.219891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-07T17:47:15.220007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:15.220043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-07T17:47:15.220101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-07T17:47:15.221019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:15.221104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:15.221140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-07T17:47:15.221178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2026-01-07T17:47:15.221212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-07T17:47:15.221920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:15.221997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:15.222022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-07T17:47:15.222048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-07T17:47:15.222076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-07T17:47:15.222149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-07T17:47:15.224785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:15.224840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-07T17:47:15.225184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-07T17:47:15.225371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:47:15.225407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:15.225441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:47:15.225470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:15.225501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-07T17:47:15.225555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:995:2935] message: TxId: 104 2026-01-07T17:47:15.225595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:15.225631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:47:15.225661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:47:15.225771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-07T17:47:15.229436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-07T17:47:15.229474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-07T17:47:15.230911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-07T17:47:15.231026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-07T17:47:15.231397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-07T17:47:15.231439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:856:2809], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-07T17:47:15.231505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:47:15.231559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1352:3269] 2026-01-07T17:47:15.232255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-07T17:47:15.233305Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-07T17:47:15.233491Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 209us result status StatusSuccess 2026-01-07T17:47:15.234000Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:15.240882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:15.240997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:15.241036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:15.241074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:15.241123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:15.241154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:15.241229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:15.241323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:15.242154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:15.242431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:15.325271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:15.325322Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:15.338385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:15.338597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:15.338723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:15.344599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:15.344954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:15.345686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:15.345910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:15.348384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:15.348547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:15.349465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:15.349509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:15.349632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:15.349670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:15.349738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:15.349857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.354743Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:15.477398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:15.477662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.477865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:15.477915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:15.478179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:15.478258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:15.480393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:15.480626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:15.480949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.481017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:15.481056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:15.481121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:15.483075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.483270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:15.483315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:15.484941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.484989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.485046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:15.485100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:15.488708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:15.490604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:15.490794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:15.491934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:15.492070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:15.492120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:15.492399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:15.492463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:15.492647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:15.492731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:15.494623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:15.494684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:15.684096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:15.690925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:47:15.690982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-07T17:47:15.691064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-07T17:47:15.691197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2026-01-07T17:47:15.691261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:15.691282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:15.691899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:15.692334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:15.692390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:15.692471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:15.694238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:15.694272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:15.694347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:47:15.694370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-07T17:47:15.694463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-01-07T17:47:15.694518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:15.694536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:15.694571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:15.694589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:15.694681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:15.696071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2026-01-07T17:47:15.696306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:15.696357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-07T17:47:15.696481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:15.696503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:47:15.696547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:15.696563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:15.697070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:15.697231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:15.697271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:621:2534] 2026-01-07T17:47:15.697440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:15.697616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:15.697645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:621:2534] 2026-01-07T17:47:15.697759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:15.697845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:15.697894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:621:2534] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-07T17:47:15.698341Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:15.698533Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 199us result status StatusPathDoesNotExist 2026-01-07T17:47:15.698753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:15.699257Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:15.699454Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 192us result status StatusPathDoesNotExist 2026-01-07T17:47:15.699598Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:15.700139Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:15.700338Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 173us result status StatusSuccess 2026-01-07T17:47:15.700825Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:15.934016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:15.934095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:15.934135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:15.934172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:15.934214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:15.934242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:15.934296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:15.934386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:15.935151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:15.935431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:16.018396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:16.018448Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:16.035515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:16.035842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:16.036013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:16.042468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:16.042821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:16.043494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.043724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:16.046777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.046963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:16.048098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:16.048152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.048277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:16.048325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:16.048423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:16.048611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.055344Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:16.139043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:16.139232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.139440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:16.139483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:16.139659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:16.139712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:16.141575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.141744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:16.141918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.141957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:16.141983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:16.142009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:16.143663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.143715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:16.143746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:16.145233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.145277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.145328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:16.145372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:16.147949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:16.149240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:16.149401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:16.150359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.150492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:16.150526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:16.150752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:16.150790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:16.150947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:16.151016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:16.152436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:16.152471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:47:16.353342Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 2026-01-07T17:47:16.353770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:16.354281Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2026-01-07T17:47:16.355137Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:47:16.355932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-07T17:47:16.356103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2026-01-07T17:47:16.357484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:16.357653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2026-01-07T17:47:16.358536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:16.358697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:16.359902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:16.359950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:16.360060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:16.360394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:16.360432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:16.360538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:16.362348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:47:16.362396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-07T17:47:16.362563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:16.362591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:16.363351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:16.363393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:16.363566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:47:16.363589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-07T17:47:16.366029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:16.366078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:16.366191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:16.366235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:16.366312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:16.366441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2026-01-07T17:47:16.366785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:16.366823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-07T17:47:16.366910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:47:16.366930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:47:16.367334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:16.367431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:16.367462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:662:2568] 2026-01-07T17:47:16.367591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:47:16.367647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:16.367696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:662:2568] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:16.368149Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:16.368368Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 190us result status StatusPathDoesNotExist 2026-01-07T17:47:16.368520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:16.368955Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:16.369121Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 184us result status StatusSuccess 2026-01-07T17:47:16.369537Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:16.194576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:16.194691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:16.194729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:16.194768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:16.194816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:16.194844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:16.194900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:16.195021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:16.195906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:16.196206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:16.275836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:16.275912Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:16.290896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:16.291232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:16.291415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:16.297877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:16.298224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:16.298986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.299241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:16.302031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.302252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:16.303453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:16.303512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.303635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:16.303710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:16.303811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:16.304015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.310735Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:16.442086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:16.442349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.442574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:16.442624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:16.442842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:16.442902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:16.445337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.445561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:16.445793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.445849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:16.445891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:16.445927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:16.448128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.448206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:16.448252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:16.450229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.450279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.450336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:16.450388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:16.453623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:16.455312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:16.455479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:16.456301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.456428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:16.456470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:16.456764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:16.456832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:16.457045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:16.457151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:16.459781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:16.459841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2026-01-07T17:47:16.789960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:16.790094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2026-01-07T17:47:16.791792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:16.791853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:16.791970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:16.793978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:47:16.794035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-07T17:47:16.794143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-07T17:47:16.794178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2026-01-07T17:47:16.794300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2026-01-07T17:47:16.794399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:16.794494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:16.794535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:16.794607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:16.797362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:16.797421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:16.797490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:16.797511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:16.797554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:47:16.797583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-07T17:47:16.797639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:16.797658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:16.797706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:16.797759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:16.799502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:16.799622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-07T17:47:16.799947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:16.799989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:47:16.800081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:16.800103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:16.800560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:16.800700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:16.800736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:718:2613] 2026-01-07T17:47:16.800834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:16.800978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:16.801004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:718:2613] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-07T17:47:16.801518Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:16.801716Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 210us result status StatusPathDoesNotExist 2026-01-07T17:47:16.801888Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:16.802421Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:16.802598Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 191us result status StatusPathDoesNotExist 2026-01-07T17:47:16.802729Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:16.803190Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:16.803409Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 196us result status StatusSuccess 2026-01-07T17:47:16.803830Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::Create >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DDC353A9500 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.431007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.431065Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.443387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.443598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.444795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.451541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.455611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.457567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.460873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.466804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.468815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.640749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.641614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.641740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.641805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.641869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.641959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.642577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... ten 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 Leader for TabletID 72057594046678944 is [2:1018:2977] sender: [2:1077:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 2026-01-07T17:47:17.444342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 2026-01-07T17:47:17.486030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 0 2026-01-07T17:47:17.486216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-07T17:47:17.486282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-07T17:47:17.486357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-07T17:47:17.486440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC35326100, stats written 1 2026-01-07T17:47:17.486938Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:17.487171Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 260us result status StatusSuccess 2026-01-07T17:47:17.487782Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 165 LastUpdateTime: 165 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:16.786278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:16.786389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:16.786427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:16.786462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:16.786505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:16.786529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:16.786583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:16.786646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:16.787419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:16.787697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:16.878314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:16.878375Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:16.893536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:16.893836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:16.894003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:16.901281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:16.901626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:16.902401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.902718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:16.905719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.905908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:16.907122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:16.907179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.907310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:16.907357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:16.907450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:16.907636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:17.032598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.033946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.034000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.034060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.034183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:17.034272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:17.550142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-07T17:47:17.550360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-07T17:47:17.550417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-07T17:47:17.550568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:17.550628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:17.550669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:17.552350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:17.552391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:17.552528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:17.552611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:17.552644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:47:17.552684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:47:17.553126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:17.553182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:47:17.553273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:17.553307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:17.553346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:17.553380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:17.553417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:47:17.553453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:17.553486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:47:17.553521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:47:17.553579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:17.553635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:47:17.553684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:47:17.553712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:47:17.554248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:17.554335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:17.554376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:17.554411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:47:17.554449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:17.555059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:17.555126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:17.555151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:17.555180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:17.555222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:17.555298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:47:17.557723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:17.558716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2026-01-07T17:47:17.561717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:17.561885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:17.562062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2026-01-07T17:47:17.563955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2026-01-07T17:47:17.564153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-07T17:47:17.564424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:17.564472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:47:17.564559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:17.564584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:17.564979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:17.565117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:17.565157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:722:2711] 2026-01-07T17:47:17.565304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:17.565371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:17.565396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:722:2711] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::DefaultStorageConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:17.962688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:17.962751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:17.962781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:17.962808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:17.962840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:17.962868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:17.962910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:17.962974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:17.963671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:17.963928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:18.024577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:18.024620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:18.035617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:18.035859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:18.035972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:18.042307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:18.042686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:18.043467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.043724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:18.046270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.046428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:18.047302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.047346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.047438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:18.047474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:18.047627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:18.047768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.053331Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:18.175553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:18.175782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.175983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:18.176026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:18.176249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:18.176314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:18.178484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.178699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:18.178937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.178991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:18.179058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:18.179111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:18.181137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.181195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:18.181233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:18.183146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.183191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.183246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:18.183296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:18.186792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:18.188568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:18.188759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:18.189805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.189955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:18.190000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:18.190275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:18.190339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:18.190518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:18.190599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:18.192532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.192585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... gnore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1374 } } CommitVersion { Step: 140 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-07T17:47:18.536250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 619 RawX2: 4294969828 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:47:18.536312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2026-01-07T17:47:18.536459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 619 RawX2: 4294969828 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:47:18.536513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:47:18.536632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 619 RawX2: 4294969828 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:47:18.536696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.536751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.536809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2026-01-07T17:47:18.536849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:47:18.540330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:18.540417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:18.540539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.540860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.541177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.541225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:47:18.541320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:18.541358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:18.541400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:18.541446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:18.541484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:47:18.541544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:280:2269] message: TxId: 101 2026-01-07T17:47:18.541594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:18.541635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:47:18.541683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:47:18.541789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-07T17:47:18.543481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:18.543520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:281:2270] TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:18.544009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:18.544221Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 234us result status StatusSuccess 2026-01-07T17:47:18.544718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:18.545368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:18.545601Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 226us result status StatusSuccess 2026-01-07T17:47:18.546049Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:16.010749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:16.010831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:16.010857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:16.010884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:16.010930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:16.010951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:16.010987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:16.011062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:16.011689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:16.011917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:16.083358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:16.083410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:16.099932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:16.100283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:16.100484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:16.108091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:16.108516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:16.109367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.109642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:16.113990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.114188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:16.115345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:16.115407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.115541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:16.115594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:16.115693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:16.115884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.213271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.213998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.214655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... athId: 38] was 11 2026-01-07T17:47:18.542931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 3 2026-01-07T17:47:18.544737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 46, at schemeshard: 72057594046678944 2026-01-07T17:47:18.544986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2026-01-07T17:47:18.545215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.545259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:18.545425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 46] 2026-01-07T17:47:18.545496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.545532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1392:3254], at schemeshard: 72057594046678944, txId: 137, path id: 38 2026-01-07T17:47:18.545581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1392:3254], at schemeshard: 72057594046678944, txId: 137, path id: 46 2026-01-07T17:47:18.545646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.545690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2026-01-07T17:47:18.545890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-07T17:47:18.547093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 137 2026-01-07T17:47:18.547192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 137 2026-01-07T17:47:18.547226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2026-01-07T17:47:18.547288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 15 2026-01-07T17:47:18.547329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 12 2026-01-07T17:47:18.548447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2026-01-07T17:47:18.548513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2026-01-07T17:47:18.548538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2026-01-07T17:47:18.548590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 46], version: 1 2026-01-07T17:47:18.548626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 4 2026-01-07T17:47:18.548687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2026-01-07T17:47:18.550449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2026-01-07T17:47:18.550562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2026-01-07T17:47:18.550597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2026-01-07T17:47:18.550883Z node 1 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-07T17:47:18.551053Z node 1 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2026-01-07T17:47:18.551145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-07T17:47:18.551178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2026-01-07T17:47:18.551280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-07T17:47:18.551320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-07T17:47:18.551377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-07T17:47:18.551446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 137:0 2 -> 3 2026-01-07T17:47:18.552302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2026-01-07T17:47:18.554439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2026-01-07T17:47:18.555014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.555144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.555191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2026-01-07T17:47:18.555270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2026-01-07T17:47:18.555571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1338 RawX2: 4294970507 } TxBody: "\n\236\004\n\007Table11\020.\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020.:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 38 2026-01-07T17:47:18.558926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2026-01-07T17:47:18.559078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D1F55DAA900 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.680504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.680618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.680660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.680696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.680768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.680805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.680862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.680992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.681864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.682164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.750937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.750997Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.763777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.764021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.764174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.770589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.770882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.771472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.771647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.773712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.773881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.774722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.774766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.774863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.774901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.774973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.775097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.890658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.893732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.893848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.893890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.893949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.894551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... aitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 Leader for TabletID 72057594046678944 is [1:901:2849] sender: [1:972:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 2026-01-07T17:47:18.724071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 0 2026-01-07T17:47:18.724268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-07T17:47:18.724328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:18.724504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 2026-01-07T17:47:18.737416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 2026-01-07T17:47:18.748292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 1 2026-01-07T17:47:18.748439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-07T17:47:18.748491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:18.748704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D1F55EDE100, stats written 2 2026-01-07T17:47:18.749275Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:18.751373Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 232us result status StatusSuccess 2026-01-07T17:47:18.751823Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.469196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.469263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.469291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.469320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.469352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.469372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.469412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.469466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.470012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.470196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.554227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.554281Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.569669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.570012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.570197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.576582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.576992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.577630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.577881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.580414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.580586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.581714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.581766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.581883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.581926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.582015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.582188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.719927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.720643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.720730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.720789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.720849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.720900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.720970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.721020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.721105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.721147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.721181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.721220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.721255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.721312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.721361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... lete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.851901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.852792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.852840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:18.853050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:47:18.853196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.853230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-07T17:47:18.853287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:47:18.853779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.853878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:47:18.853957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.853998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-07T17:47:18.854034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-07T17:47:18.854731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:18.854820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:18.854855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:18.854891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-07T17:47:18.854950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:18.855618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:18.855707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:18.855747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:18.855769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:47:18.855796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:47:18.855853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:47:18.858826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.858881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:18.859275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:47:18.859443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:18.859475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:18.859510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:18.859538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:18.859585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:47:18.859641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:810:2776] message: TxId: 103 2026-01-07T17:47:18.859687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:18.859714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:47:18.859741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:47:18.859833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:47:18.860371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.860404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:18.861059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:18.861145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:18.862428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.862483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-07T17:47:18.862899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:18.862952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1064:2996] 2026-01-07T17:47:18.863340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:18.864110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:18.864307Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 216us result status StatusSuccess 2026-01-07T17:47:18.864758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.455974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.456034Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.470467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.470764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.470889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.476565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.476869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.477558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.477738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.480088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.480202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.632774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.633659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.633842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.633909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.633977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.634514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 46678944, LocalPathId: 39] 2026-01-07T17:47:18.511174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.511213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-07T17:47:18.511269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-07T17:47:18.511301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:47:18.511650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.511695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:47:18.511789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:18.511826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:18.511864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:18.511899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:18.511933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:47:18.511981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:18.512032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:47:18.512061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:47:18.512185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:47:18.512224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-07T17:47:18.512260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 9 2026-01-07T17:47:18.512289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-07T17:47:18.513246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:18.513341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:18.513388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:18.513432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:47:18.513477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:47:18.514032Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2026-01-07T17:47:18.514251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:18.514297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:47:18.514374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:18.514846Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2026-01-07T17:47:18.515515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:18.515877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:18.515963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:18.516012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:18.516058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 9 2026-01-07T17:47:18.516106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:47:18.516183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:47:18.516693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:18.519304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:18.519404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:18.521242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:18.521346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:18.521428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:47:18.521876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:47:18.521931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:47:18.522385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:47:18.522483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:18.522521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1145:3047] TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:18.906588Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:18.906829Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 264us result status StatusSuccess 2026-01-07T17:47:18.907276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::NestedDirs |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.744666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.744751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.744780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.744810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.744854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.744883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.745029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.745131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.745897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.746214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.835954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.836009Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.854164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.854488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.854670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.865066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.865437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.866211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.866460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.869198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.869384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.870583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.870645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.870790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.870841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.870937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.871133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.877945Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:11.023990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:11.024253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.024455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:11.024503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:11.024758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:11.024830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:11.026895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.027122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:11.027340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.027405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:11.027447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:11.027489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:11.029564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.029628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:11.029698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:11.031403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.031450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.031506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:11.031558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:11.040517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:11.045478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:11.045658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:11.046679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.046830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:11.046878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:11.047135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:11.047195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:11.047355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:11.047449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:11.049262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:11.049313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 Leader for TabletID 72057594046678944 is [2:1098:3018] sender: [2:1173:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 2026-01-07T17:47:18.994352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0.0977 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 2026-01-07T17:47:19.025390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 0 2026-01-07T17:47:19.025577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-07T17:47:19.025655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table1, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:19.025799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 2026-01-07T17:47:19.036319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 2026-01-07T17:47:19.057615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 2026-01-07T17:47:19.068622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 1 2026-01-07T17:47:19.068791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-07T17:47:19.068863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:19.068989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-07T17:47:19.069039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2026-01-07T17:47:19.069070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:19.069107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D129CCF0500, stats written 3 2026-01-07T17:47:19.069660Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:19.069849Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 228us result status StatusSuccess 2026-01-07T17:47:19.070364Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D4B8E297D00 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.020389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.020489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.020535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.020572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.020614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.020644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.020695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.020816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.022153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.022464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.113196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.113269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.128668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.129130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.129316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.136039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.136420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.137201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.137481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.140077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.140342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.141473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.141532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.141669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.141724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.141819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.142005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.287725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.288705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.288896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.289884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.243969Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 66, at schemeshard: 72057594046678944 2026-01-07T17:47:19.244457Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.244574Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.244961Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.245046Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.245303Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.245459Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.245510Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.245620Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.245796Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.245876Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.246379Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.246616Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.246694Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.246744Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.246880Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.246929Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.246976Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.247188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:19.247406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-07T17:47:19.251528Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:19.254946Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:19.254999Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:19.255962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:19.256004Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:19.256040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:19.256212Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 Leader for TabletID 72057594046678944 is [2:873:2832] sender: [2:928:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 2026-01-07T17:47:19.312832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 0 2026-01-07T17:47:19.312994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-07T17:47:19.313058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-07T17:47:19.313121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-07T17:47:19.313182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D4B8E2B9900, stats written 1 2026-01-07T17:47:19.313560Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:19.313744Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 196us result status StatusSuccess 2026-01-07T17:47:19.314371Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 165 LastUpdateTime: 165 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:18.368202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:18.368307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.368350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:18.368389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:18.368434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:18.368464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:18.368523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.368597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:18.369492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:18.369783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:18.454908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:18.454966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:18.470624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:18.470909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:18.471080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:18.477125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:18.477437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:18.478131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.478349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:18.480798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.480985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:18.482063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.482118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.482241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:18.482290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:18.482382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:18.482580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.488645Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:18.612048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:18.612318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.612516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:18.612562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:18.612804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:18.612872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:18.614881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.615096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:18.615298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.615368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:18.615406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:18.615440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:18.617331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.617387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:18.617443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:18.619011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.619055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.619109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:18.619177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:18.622620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:18.624319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:18.624488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:18.625564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.625717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:18.625765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:18.626017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:18.626084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:18.626246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:18.626343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:18.628149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.628199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... _board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:19.542781Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:19.542818Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2026-01-07T17:47:19.542865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 8 2026-01-07T17:47:19.542934Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:47:19.544641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:19.545838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2026-01-07T17:47:19.546062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:19.546103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-07T17:47:19.546203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:19.546226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-07T17:47:19.546275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:19.546295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:19.546712Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:19.546818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:19.546869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:922:2766] 2026-01-07T17:47:19.547072Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:19.547144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:19.547192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:19.547218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [2:922:2766] 2026-01-07T17:47:19.547306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:19.547350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:922:2766] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:19.547762Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:19.547936Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 209us result status StatusSuccess 2026-01-07T17:47:19.548399Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:19.548848Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:19.549017Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 197us result status StatusSuccess 2026-01-07T17:47:19.549300Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:19.549680Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:19.549846Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 162us result status StatusSuccess 2026-01-07T17:47:19.550197Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D7DCD7AE500 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:15.210038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:15.210140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:15.210185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:15.210220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:15.210276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:15.210311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:15.210356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:15.210440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:15.211202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:15.211465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:15.302782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:15.302831Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:15.317523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:15.317784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:15.317954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:15.324667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:15.325096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:15.325781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:15.326039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:15.328598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:15.328804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:15.329940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:15.330011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:15.330128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:15.330172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:15.330260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:15.330432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:15.478830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.479826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.479958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:15.480852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 Leader for TabletID 72057594046678944 is [1:895:2843] sender: [1:968:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 2026-01-07T17:47:19.324159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 2026-01-07T17:47:19.335080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 0 2026-01-07T17:47:19.335224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-07T17:47:19.335305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:19.335439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-07T17:47:19.335510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-07T17:47:19.335543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:19.335583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D7DCD8D4100, stats written 2 2026-01-07T17:47:19.336014Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:19.336278Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 246us result status StatusSuccess 2026-01-07T17:47:19.336761Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] >> TSchemeShardTest::DefaultStorageConfig [GOOD] >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DDF7CCAE500 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:13.098047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:13.098139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:13.098178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:13.098220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:13.098270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:13.098304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:13.098380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:13.098466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:13.099339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:13.099645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:13.191207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:13.191279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:13.207125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:13.207535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:13.207724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:13.214812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:13.215189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:13.215949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:13.216234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:13.219018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:13.219215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:13.220495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:13.220581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:13.220748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:13.220801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:13.220905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:13.221137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:13.364500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.365634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.365788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.365864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.365961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:13.366800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 Leader for TabletID 72057594046678944 is [2:974:2933] sender: [2:1033:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 2026-01-07T17:47:20.242828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 2026-01-07T17:47:20.284360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 0 2026-01-07T17:47:20.284566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-07T17:47:20.284636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-07T17:47:20.284690Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-07T17:47:20.284742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DDF7CC53100, stats written 1 2026-01-07T17:47:20.285272Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:20.285539Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 305us result status StatusSuccess 2026-01-07T17:47:20.286026Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 152 LastUpdateTime: 152 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:18.393957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:18.394043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.394082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:18.394124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:18.394163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:18.394188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:18.394236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.394304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:18.395067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:18.395315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:18.475363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:18.475411Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:18.488840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:18.489112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:18.489267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:18.494889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:18.495185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:18.495807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.496015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:18.498356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.498515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:18.499491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.499545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.499653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:18.499697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:18.499784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:18.499945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.631253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.634526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.634675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.634740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.634804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.634863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.634915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.634989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.635065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.635121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.635171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.635265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.635326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.635413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.635476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:20.423225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:20.423247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:20.423970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:20.424328Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:47:20.424488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:20.424723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:47:20.425668Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-07T17:47:20.426090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:20.428281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 Forgetting tablet 72075186233409546 2026-01-07T17:47:20.429585Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-07T17:47:20.429700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:20.429865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409548 2026-01-07T17:47:20.430853Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:47:20.430954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:20.431094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 Forgetting tablet 72075186233409547 2026-01-07T17:47:20.433629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:20.433672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:47:20.433741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 Forgetting tablet 72075186233409549 2026-01-07T17:47:20.434484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:20.434533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:20.434644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:20.435431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:20.435508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:20.437489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:20.437541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:20.437612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:20.437632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:20.437700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:20.437725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:20.437763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:20.437791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:20.439825Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:20.439923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:20.439972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:20.440047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:20.440237Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:20.442372Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:47:20.444493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:47:20.444537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:47:20.444991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:47:20.445077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:47:20.445113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1066:3017] TestWaitNotification: OK eventTxId 105 2026-01-07T17:47:20.445671Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:20.445871Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 226us result status StatusPathDoesNotExist 2026-01-07T17:47:20.446024Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:20.446548Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:20.446681Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 152us result status StatusPathDoesNotExist 2026-01-07T17:47:20.446790Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DF8017AF900 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:10.363140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:10.363247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:10.363389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:10.363445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:10.363477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:10.363550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:10.363635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:10.364606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:10.366528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:10.457812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:10.457873Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:10.472514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:10.472888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:10.473083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:10.479100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:10.479426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:10.480113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:10.480346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:10.482953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.483107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:10.487173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:10.487380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:10.487430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:10.487521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:10.487707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:10.645774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.646677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.646787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.646838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.646883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.646932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.646991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.647054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.647110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.647156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.647190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.647230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.647278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.647348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:10.647413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... ten 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 Leader for TabletID 72057594046678944 is [2:1019:2978] sender: [2:1078:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 2026-01-07T17:47:20.424716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 2026-01-07T17:47:20.466361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 0 2026-01-07T17:47:20.466569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-07T17:47:20.466652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-07T17:47:20.466735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-07T17:47:20.466796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8017CD900, stats written 1 2026-01-07T17:47:20.467335Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:20.467592Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 302us result status StatusSuccess 2026-01-07T17:47:20.468183Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 165 LastUpdateTime: 165 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DC13D3AF900 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:16.607960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:16.608057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:16.608093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:16.608126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:16.608164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:16.608199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:16.608254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:16.608316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:16.609116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:16.609366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:16.695189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:16.695244Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:16.709707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:16.710015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:16.710167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:16.715808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:16.716099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:16.716697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:16.716948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:16.719130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.719289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:16.720286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:16.720332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:16.720443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:16.720482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:16.720562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:16.720721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:16.868813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.872322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.872470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.872541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.872605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.872664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.872768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.872833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.872947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.873013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.873065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.873120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.873177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.873267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:16.873345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... aitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 Leader for TabletID 72057594046678944 is [1:895:2843] sender: [1:968:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 2026-01-07T17:47:20.624312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 0 2026-01-07T17:47:20.624459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-07T17:47:20.624521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:20.624619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 2026-01-07T17:47:20.634990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 2026-01-07T17:47:20.645731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 1 2026-01-07T17:47:20.645832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-07T17:47:20.645877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:20.645947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DC13D41B100, stats written 2 2026-01-07T17:47:20.646278Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:20.646471Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 197us result status StatusSuccess 2026-01-07T17:47:20.646868Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:20.655734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:20.655812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:20.655848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:20.655881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:20.655923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:20.655954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:20.655995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:20.656066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:20.656826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:20.657123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:20.737570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:20.737631Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:20.753555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:20.753915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:20.754103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:20.760410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:20.760772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:20.761564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:20.761850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:20.764702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:20.764919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:20.766147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:20.766208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:20.766352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:20.766402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:20.766509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:20.766752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.773368Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:20.856621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:20.856828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.856992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:20.857033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:20.857226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:20.857271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:20.859052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:20.859220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:20.859406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.859447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:20.859472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:20.859494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:20.860997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.861033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:20.861062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:20.862255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.862289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.862332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:20.862386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:20.864755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:20.866069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:20.866202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:20.866951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:20.867035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:20.867062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:20.867272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:20.867309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:20.867426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:20.867487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:20.868869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:20.868906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 4 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-07T17:47:21.007749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2026-01-07T17:47:21.008085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:21.008184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:21.008227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-07T17:47:21.008541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-07T17:47:21.008586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-07T17:47:21.008733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:21.008805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2026-01-07T17:47:21.008852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:47:21.010627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:21.010673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:21.010835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:47:21.010935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:21.010972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:47:21.011013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 2 2026-01-07T17:47:21.011070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:21.011109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:47:21.011200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:21.011230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:21.011290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:21.011337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:21.011376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:47:21.011417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:21.011453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:47:21.011487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:47:21.011766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2026-01-07T17:47:21.011817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2026-01-07T17:47:21.011875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-07T17:47:21.011908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-07T17:47:21.012829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:21.012969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:21.013014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:21.013049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-07T17:47:21.013088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:21.014491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:21.014613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:21.014654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:21.014683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-07T17:47:21.014712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2026-01-07T17:47:21.014855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-01-07T17:47:21.014900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:561:2477] 2026-01-07T17:47:21.017159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:21.018038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:21.018131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:21.018161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:562:2478] TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:21.018831Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:21.019036Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 213us result status StatusSuccess 2026-01-07T17:47:21.019543Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D83862A6D00 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:18.044528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:18.044612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.044649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:18.044680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:18.044722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:18.044750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:18.044817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.044891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:18.045704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:18.045981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:18.132695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:18.132753Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:18.147550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:18.147879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:18.148057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:18.155541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:18.155893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:18.156647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.156918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:18.159755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.159950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:18.161109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.161170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.161298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:18.161346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:18.161450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:18.161650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.260256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.260989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.261747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 Leader for TabletID 72057594046678944 is [1:966:2925] sender: [1:1025:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 2026-01-07T17:47:20.797091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 2026-01-07T17:47:20.838703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 0 2026-01-07T17:47:20.838852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-07T17:47:20.838950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-07T17:47:20.839001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-07T17:47:20.839050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D838644FD00, stats written 1 2026-01-07T17:47:20.839562Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:20.839793Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 251us result status StatusSuccess 2026-01-07T17:47:20.840229Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 148 LastUpdateTime: 148 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir >> TSchemeShardSubDomainTest::LS >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> TSchemeShardSubDomainTest::LS [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:42:31.673837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:31.763661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:31.771491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:42:31.771988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:42:31.772164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:42:32.134324Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:32.237356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:32.237508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:32.273395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:32.347824Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:42:33.069907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:33.071163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:42:33.071214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:42:33.071268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:42:33.071493Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:42:33.133039Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:42:33.678459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:36.900367Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:42:36.906491Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:42:36.909609Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:42:36.944907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:36.945127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:36.998222Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:42:37.005535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:37.275162Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:42:37.275318Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:42:37.277055Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.277836Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.278438Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.279536Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.279667Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.280060Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.280184Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.280311Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.280518Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:42:37.299074Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:42:37.534704Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:37.602380Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:42:37.602495Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:42:37.651336Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:42:37.652808Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:42:37.653117Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:42:37.653204Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:42:37.653281Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:42:37.653335Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:42:37.653394Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:42:37.653446Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:42:37.654176Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:42:37.656408Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:42:37.659125Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:42:37.670372Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:42:37.670497Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:42:37.670595Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:42:37.678892Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:37.679094Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:42:37.706500Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:42:37.706930Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:42:37.710046Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:42:37.755999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:42:37.778215Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:42:37.778393Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:42:37.799737Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:42:38.066808Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:42:38.151653Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:42:38.641236Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:42:38.792673Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:42:38.792774Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:42:39.561751Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... -07T17:45:06.540544Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4674:4136], ActorId: [2:4675:4137], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODllNjQxM2MtODA3ZWU0ZS0zNjVhYTQzZS01MThkODdlZA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:45:06.622712Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4688:4149]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:45:06.622965Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:45:06.623010Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4688:4149], StatRequests.size() = 1 2026-01-07T17:45:06.797834Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4674:4136], ActorId: [2:4675:4137], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODllNjQxM2MtODA3ZWU0ZS0zNjVhYTQzZS01MThkODdlZA==, TxId: 2026-01-07T17:45:06.797939Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4674:4136], ActorId: [2:4675:4137], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODllNjQxM2MtODA3ZWU0ZS0zNjVhYTQzZS01MThkODdlZA==, TxId: 2026-01-07T17:45:06.798347Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4670:4134], ActorId: [2:4674:4136], Got response [2:4675:4137] SUCCESS 2026-01-07T17:45:06.798762Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:45:06.830097Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:45:06.830192Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3408:3501] 2026-01-07T17:45:06.888820Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:45:06.888921Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:45:06.944132Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4444:3927], schemeshard count = 1 2026-01-07T17:45:07.823683Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:45:07.832837Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4740:4180], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:45:07.845547Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4740:4180], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2` 2026-01-07T17:45:07.845713Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4740:4180], Start read next stream part 2026-01-07T17:45:07.898017Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4757:4190]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:45:07.898381Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:45:07.898438Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4757:4190], StatRequests.size() = 1 2026-01-07T17:46:28.482083Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecrzy059ercbvegqatx4rtd", SessionId: ydb://session/3?node_id=2&id=Zjg3MTA4YjAtNWI5ZGY1ZjQtMTJhMmZmNS1hYWZlMDM3Yw==, Slow query, duration: 80.630296s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2`", parameters: 0b 2026-01-07T17:46:28.483098Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4740:4180], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:46:28.483269Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4740:4180], Start read next stream part 2026-01-07T17:46:28.483447Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-07T17:46:28.483905Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4740:4180], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:46:28.483962Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4740:4180], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTFkMTI0NTItMmY1NTYzOGQtNmYyMmVhMmItMjYzMWZhNWM=, TxId: 2026-01-07T17:46:28.484181Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4854:4278], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:46:28.487954Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4854:4278], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2` 2026-01-07T17:46:28.488080Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4854:4278], Start read next stream part 2026-01-07T17:46:28.556033Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4871:4292]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:46:28.556417Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-07T17:46:28.556471Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4871:4292], StatRequests.size() = 1 2026-01-07T17:47:20.045384Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs2cr8086gnhx2b91s6ke5", SessionId: ydb://session/3?node_id=2&id=YzE5MzdmYzEtODhmNDc2NWItYTI0NTMzNzItZTFlNzk3Mg==, Slow query, duration: 51.553360s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2`", parameters: 0b 2026-01-07T17:47:20.047976Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4854:4278], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:47:20.048118Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4854:4278], Start read next stream part 2026-01-07T17:47:20.048545Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2026-01-07T17:47:20.049008Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4854:4278], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:47:20.049067Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4730:2463], ActorId: [2:4854:4278], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDRlNTdiZmQtMmQwNzc4MDYtNDkwMjVlM2QtZTQzZmJjZg==, TxId: 2026-01-07T17:47:20.050117Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4961:4376], ActorId: [2:4965:4379], Starting query actor #1 [2:4966:4380] 2026-01-07T17:47:20.050185Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4965:4379], ActorId: [2:4966:4380], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:47:20.055155Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4965:4379], ActorId: [2:4966:4380], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODVkZjJlNy04ZDYwMDljMC1mZjcwNjMyNC03NDQ1OWQ5Yg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:47:20.098145Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4965:4379], ActorId: [2:4966:4380], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODVkZjJlNy04ZDYwMDljMC1mZjcwNjMyNC03NDQ1OWQ5Yg==, TxId: 2026-01-07T17:47:20.098223Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4965:4379], ActorId: [2:4966:4380], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODVkZjJlNy04ZDYwMDljMC1mZjcwNjMyNC03NDQ1OWQ5Yg==, TxId: 2026-01-07T17:47:20.098586Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4961:4376], ActorId: [2:4965:4379], Got response [2:4966:4380] SUCCESS 2026-01-07T17:47:20.098893Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:47:20.136727Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2026-01-07T17:47:20.136821Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3408:3501] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:18.676343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:18.676461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.676506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:18.676545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:18.676593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:18.676622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:18.676677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.676746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:18.677608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:18.677907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:18.766121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:18.766180Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:18.782354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:18.782720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:18.782948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:18.790225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:18.790632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:18.791436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.791702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:18.794724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.794941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:18.796113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.796172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.796295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:18.796344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:18.796452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:18.796632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.907267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.908812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 17:47:22.026620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.026955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.027000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:22.027171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:47:22.027318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.027355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-07T17:47:22.027398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:47:22.027464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.027513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:47:22.027597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.027657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-07T17:47:22.027699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-07T17:47:22.029440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:22.029536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:22.029577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:22.029614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-07T17:47:22.029668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:22.030973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:22.031076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:22.031106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:22.031135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:47:22.031161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:47:22.031227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:47:22.033560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.033617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:22.033993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:47:22.034181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:22.034227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:22.034262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:22.034292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:22.034328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:47:22.034384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:810:2776] message: TxId: 103 2026-01-07T17:47:22.034423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:22.034478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:47:22.034511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:47:22.034597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:47:22.034983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.035018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:22.035522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:22.036632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:22.037896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.037951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-07T17:47:22.038029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.038066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1136:3069] 2026-01-07T17:47:22.038829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:22.040268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.040471Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 230us result status StatusSuccess 2026-01-07T17:47:22.040960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] >> TSchemeShardTest::PreserveColumnOrder >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:21.544027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:21.544107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:21.544139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:21.544172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:21.544218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:21.544241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:21.544292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:21.544374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:21.545171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:21.545437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:21.618809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:21.618853Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:21.629061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:21.629264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:21.629393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:21.635047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:21.635352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:21.635989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:21.636235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:21.638657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:21.638838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:21.639891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:21.639954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:21.640086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:21.640132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:21.640220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:21.640380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:21.646576Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:21.750901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:21.751081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:21.751221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:21.751249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:21.751403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:21.751444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:21.753576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:21.753780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:21.753991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:21.754042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:21.754090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:21.754127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:21.755971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:21.756024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:21.756072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:21.757661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:21.757709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:21.757760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:21.757824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:21.761367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:21.762935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:21.763112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:21.764081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:21.764208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:21.764250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:21.764500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:21.764556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:21.764734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:21.764871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:21.766586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:21.766641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 2026-01-07T17:47:22.704205Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.704233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:47:22.704266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 3 2026-01-07T17:47:22.704525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.704562Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:47:22.704678Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:22.704719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:22.704755Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:22.704783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:22.704834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:47:22.704872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:22.704910Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:47:22.704977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:47:22.705158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2026-01-07T17:47:22.705200Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:47:22.705233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-07T17:47:22.705270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2026-01-07T17:47:22.705855Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:22.705929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:22.705964Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:22.706000Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:47:22.706052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:22.706657Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:22.706727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:22.706756Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:22.706781Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2026-01-07T17:47:22.706805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2026-01-07T17:47:22.706868Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:47:22.710263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:22.710604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:47:22.710858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:47:22.710948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:47:22.711316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:47:22.711396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.711428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:961:2794] TestWaitNotification: OK eventTxId 102 2026-01-07T17:47:22.711862Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.712020Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 179us result status StatusSuccess 2026-01-07T17:47:22.712380Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.712837Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.713013Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 207us result status StatusSuccess 2026-01-07T17:47:22.713344Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:20.716529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:20.716628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:20.716667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:20.716704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:20.716746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:20.716775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:20.716841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:20.716950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:20.717792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:20.718110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:20.805912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:20.805968Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:20.820002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:20.820268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:20.820421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:20.826647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:20.827157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:20.827885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:20.828168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:20.831114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:20.831286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:20.832456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:20.832537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:20.832619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:20.832700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:20.832768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:20.833090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.839735Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:20.972630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:20.972914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.973168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:20.973222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:20.973474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:20.973545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:20.976072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:20.976308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:20.976545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.976600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:20.976648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:20.976685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:20.978955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.979023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:20.979083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:20.980906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.980985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:20.981047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:20.981107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:20.984247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:20.985724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:20.985860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:20.986604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:20.986726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:20.986768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:20.986997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:20.987035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:20.987161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:20.987238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:47:20.988777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:20.988833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.515746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.515790Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:199: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2026-01-07T17:47:22.515956Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 129 2026-01-07T17:47:22.516103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:22.516150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2026-01-07T17:47:22.518122Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.518171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.518371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2026-01-07T17:47:22.518533Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.518583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:341:2316], at schemeshard: 72057594046678944, txId: 108, path id: 1 2026-01-07T17:47:22.518633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:341:2316], at schemeshard: 72057594046678944, txId: 108, path id: 5 2026-01-07T17:47:22.519050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.519106Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:47:22.519166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2026-01-07T17:47:22.519870Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:47:22.519944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:47:22.519970Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-07T17:47:22.520000Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:47:22.520048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:22.520819Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:47:22.520880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:47:22.520900Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-07T17:47:22.520919Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2026-01-07T17:47:22.520982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2026-01-07T17:47:22.521031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2026-01-07T17:47:22.522877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2026-01-07T17:47:22.523950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-07T17:47:22.524265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2026-01-07T17:47:22.536257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2026-01-07T17:47:22.536304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2026-01-07T17:47:22.536426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2026-01-07T17:47:22.536477Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 129 -> 240 2026-01-07T17:47:22.538044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.538187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.538259Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-07T17:47:22.538348Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-07T17:47:22.538374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:47:22.538406Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-07T17:47:22.538433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:47:22.538467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-01-07T17:47:22.538524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:495:2444] message: TxId: 108 2026-01-07T17:47:22.538582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:47:22.538629Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-07T17:47:22.538651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-07T17:47:22.538743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2026-01-07T17:47:22.540304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.540346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:946:2856] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2026-01-07T17:47:22.543078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:22.543264Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.543550Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:22.545398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.545591Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2026-01-07T17:47:22.545921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2026-01-07T17:47:22.545970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2026-01-07T17:47:22.546397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-01-07T17:47:22.546459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.546486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:981:2891] TestWaitNotification: OK eventTxId 109 >> TSchemeShardSubDomainTest::RmDir [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:22.391142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:22.391222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.391253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:22.391293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:22.391335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:22.391353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:22.391392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.391444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:22.392096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:22.392365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:22.454033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:22.454074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:22.464227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:22.464460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:22.464609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:22.470544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:22.470916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:22.471580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.471846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.474386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.474565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:22.475684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.475748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.475874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:22.475919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.476012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:22.476183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.482450Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:22.578152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:22.578343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.578478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:22.578517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:22.578695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:22.578740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:22.580575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.580793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:22.581052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.581119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:22.581162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:22.581197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:22.582860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.582909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:22.582945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:22.584268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.584301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.584343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.584375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:22.587098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:22.588527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:22.588690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:22.589672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.589791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.589832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.590065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:22.590114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.590250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:22.590318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.591734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.591775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... meshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:47:22.670633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.670667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-07T17:47:22.670701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 2 2026-01-07T17:47:22.671031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.671069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-07T17:47:22.671167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:22.671201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:22.671244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:22.671278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:22.671316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-07T17:47:22.671349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:22.671382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-07T17:47:22.671410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-07T17:47:22.671591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-07T17:47:22.671639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-07T17:47:22.671669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-07T17:47:22.671696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-07T17:47:22.672575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:22.672665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:22.672706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:22.672756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-07T17:47:22.672789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:22.673903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:22.673990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:22.674021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:22.674047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-07T17:47:22.674076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:47:22.674156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-07T17:47:22.676340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:47:22.677193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-07T17:47:22.677395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-07T17:47:22.677434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-07T17:47:22.677760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-07T17:47:22.677860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.677924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:456:2411] TestWaitNotification: OK eventTxId 100 2026-01-07T17:47:22.678366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.678549Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 188us result status StatusSuccess 2026-01-07T17:47:22.679030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.679646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.679813Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 172us result status StatusSuccess 2026-01-07T17:47:22.680198Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:21.904905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:21.905046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:21.905089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:21.905128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:21.905179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:21.905209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:21.905286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:21.905365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:21.906202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:21.906492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:21.991385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:21.991450Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:22.006612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:22.006928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:22.007104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:22.013346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:22.013685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:22.014421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.014667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.017385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.017586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:22.018680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.018739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.018898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:22.018950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.019048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:22.019253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.025825Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:22.166538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:22.166773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.166978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:22.167029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:22.167250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:22.167314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:22.169815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.170033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:22.170248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.170314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:22.170384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:22.170419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:22.172475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.172536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:22.172581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:22.174564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.174617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.174674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.174730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:22.178170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:22.179970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:22.180145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:22.181247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.181370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.181418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.181730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:22.181785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.181951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:22.182043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.184061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.184119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... EBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-07T17:47:22.627959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409548 2026-01-07T17:47:22.629092Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:47:22.629226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:22.629455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2026-01-07T17:47:22.631377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-07T17:47:22.631517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409549 2026-01-07T17:47:22.632029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:22.632155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:47:22.632414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:22.632587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:22.633711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:22.634399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:22.634564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:22.634608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:22.634703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:22.635833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:47:22.635873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-07T17:47:22.635981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-07T17:47:22.636001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2026-01-07T17:47:22.636058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:22.636075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:22.636112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:22.636200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:22.636229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:22.636285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:22.638603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:22.638640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-07T17:47:22.638712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:47:22.638733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-07T17:47:22.638778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:22.638792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:22.638824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:22.638847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:47:22.638931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:22.640093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:47:22.640398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:47:22.640445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:47:22.640897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:47:22.641026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.641073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:782:2674] TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:22.641632Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.641845Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 260us result status StatusPathDoesNotExist 2026-01-07T17:47:22.642040Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:22.642556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.642790Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 197us result status StatusSuccess 2026-01-07T17:47:22.643247Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:22.720948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:22.721059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.721102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:22.721142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:22.721184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:22.721212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:22.721262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.721333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:22.722150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:22.722448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:22.797520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:22.797580Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:22.811700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:22.812037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:22.812215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:22.818535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:22.818882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:22.819638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.819888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.823462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.823657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:22.824796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.824885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.825051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:22.825105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.825222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:22.825421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.832671Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:22.930396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:22.930622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.930797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:22.930834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:22.931012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:22.931061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:22.932763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.932964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:22.933134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.933176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:22.933205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:22.933230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:22.934672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.934716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:22.934751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:22.936059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.936092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.936137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.936179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:22.938684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:22.940023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:22.940161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:22.940978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.941100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.941150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.941413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:22.941459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.941592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:22.941676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.943484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.943524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 07T17:47:22.989611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:22.989631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:22.990578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:22.991234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:22.991339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.991368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:282:2271] 2026-01-07T17:47:22.991691Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 2026-01-07T17:47:22.991814Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2026-01-07T17:47:22.991899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-01-07T17:47:22.992161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2026-01-07T17:47:22.992350Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2026-01-07T17:47:22.992473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.992604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-07T17:47:22.992799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:47:22.992942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:47:22.993106Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 2026-01-07T17:47:22.993186Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2026-01-07T17:47:22.993268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-07T17:47:22.993376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-07T17:47:22.993632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:22.993736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:47:22.994042Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2026-01-07T17:47:22.994198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:47:22.994334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:22.994609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:22.994662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:22.994780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:22.995545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:22.995593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:22.995655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:22.996221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:47:22.998437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:22.998562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:47:22.998624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:47:22.998751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:23.000414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:47:23.000645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:23.000755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-07T17:47:23.001216Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:23.001389Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 164us result status StatusPathDoesNotExist 2026-01-07T17:47:23.001578Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:23.001983Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:23.002203Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2026-01-07T17:47:23.002689Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:21.924039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:21.924129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:21.924160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:21.924195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:21.924237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:21.924265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:21.924326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:21.924416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:21.925212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:21.925531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:22.001553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:22.001607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:22.015298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:22.015561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:22.015702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:22.021384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:22.021646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:22.022212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.022448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.025181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.025373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:22.026522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.026584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.026709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:22.026776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.026876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:22.027064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.032867Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:47:22.121350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:22.121560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.121706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:47:22.121737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:22.121904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:22.121951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:22.124041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.124254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:22.124502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.124557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:22.124591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:22.124621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:22.126617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.126671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:22.126725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:22.128510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.128568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.128623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.128672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:22.132303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:22.134307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:22.134469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:47:22.135530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.135668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.135717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.135986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:22.136037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:22.136197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:22.136294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.138310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.138359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 101, path id: 1 2026-01-07T17:47:22.655634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 2 2026-01-07T17:47:22.655905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.655946Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2026-01-07T17:47:22.656030Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:22.656063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:22.656100Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:22.656150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:22.656193Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:47:22.656226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:22.656256Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:47:22.656284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:47:22.656339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:47:22.656390Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:47:22.656417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:47:22.656448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-07T17:47:22.656828Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:22.656886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:22.656913Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:22.656963Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:47:22.656999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:22.657306Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:22.657361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:22.657377Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:22.657405Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-07T17:47:22.657430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:22.657477Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:47:22.657754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:22.657789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:22.657853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-07T17:47:22.658123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:22.658158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:22.658209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:22.660082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:22.660362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:22.661237Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:22.661316Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:47:22.661513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:22.661552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:47:22.661808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:22.661891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.661926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:344:2333] TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:22.662265Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.662402Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 163us result status StatusPathDoesNotExist 2026-01-07T17:47:22.662512Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:22.662802Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:22.662905Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 111us result status StatusSuccess 2026-01-07T17:47:22.663158Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:59.191392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:59.191498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:59.191536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:59.191572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:59.191607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:59.191652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:59.191740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:59.191809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:59.192524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:59.192809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:59.272216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:59.272256Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:59.284327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:59.284712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:59.284908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:59.290063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:59.290297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:59.290823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:59.291052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:59.293773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:59.293979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:59.295057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:59.295119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:59.295221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:59.295261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:59.295301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:59.295473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:59.439747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.441637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.441762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.441851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.441926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:59.442640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... status: StatusAccepted 2026-01-07T17:47:22.690740Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.690814Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:30: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2026-01-07T17:47:22.690905Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:47:22.691066Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:22.694103Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-07T17:47:22.694272Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:825:2795], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:47:22.694630Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2026-01-07T17:47:22.694753Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:47:22.694924Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2026-01-07T17:47:22.694963Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2026-01-07T17:47:22.695007Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000043 2026-01-07T17:47:22.695247Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.695353Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 73014446191 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:22.695419Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000043 2026-01-07T17:47:22.695497Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-07T17:47:22.697703Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.697756Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-07T17:47:22.697875Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-07T17:47:22.697916Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:47:22.697966Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-07T17:47:22.698002Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:47:22.698055Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-07T17:47:22.698132Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:128:2153] message: TxId: 281474976710760 2026-01-07T17:47:22.698191Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:47:22.698240Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-07T17:47:22.698279Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-07T17:47:22.698359Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-07T17:47:22.700443Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-07T17:47:22.700509Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-07T17:47:22.700577Z node 17 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710760 2026-01-07T17:47:22.700714Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:825:2795], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-07T17:47:22.702713Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-07T17:47:22.702860Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:825:2795], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:47:22.702926Z node 17 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:47:22.704655Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2026-01-07T17:47:22.704768Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:825:2795], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:47:22.704829Z node 17 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2026-01-07T17:47:22.705017Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:22.705115Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:918:2877] TestWaitNotification: OK eventTxId 103 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:22.350768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:22.350847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.350874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:22.350902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:22.350951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:22.350971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:22.351009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.351067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:22.351710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:22.351921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:22.413856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:22.413901Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:22.424342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:22.424546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:22.424676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:22.429337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:22.429622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:22.430138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.430315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.432223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.432376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:22.433290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.433336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.433445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:22.433487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.433572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:22.433717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.533516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.537739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 26-01-07T17:47:23.243822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-07T17:47:23.243876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2026-01-07T17:47:23.245628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:23.245669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:23.245845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:23.245975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:23.246022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-07T17:47:23.246071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-07T17:47:23.246378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-07T17:47:23.246425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-07T17:47:23.246522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:23.246569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:23.246612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-07T17:47:23.246656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:23.246722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-07T17:47:23.246760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-07T17:47:23.246798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-07T17:47:23.246831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-07T17:47:23.247051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-07T17:47:23.247116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2026-01-07T17:47:23.247153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:47:23.247179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:47:23.247908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:23.248009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:23.248054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:23.248092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:47:23.248127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:23.248886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:23.249005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-07T17:47:23.249036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-07T17:47:23.249063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:23.249091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-07T17:47:23.249159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2026-01-07T17:47:23.249219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:961:2877] 2026-01-07T17:47:23.255589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:47:23.256651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-07T17:47:23.256742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-07T17:47:23.256764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:962:2878] TestWaitNotification: OK eventTxId 100 2026-01-07T17:47:23.257154Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:23.257374Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 197us result status StatusSuccess 2026-01-07T17:47:23.257833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-07T17:47:23.260140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:23.260270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:23.260367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:47:23.262174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:23.262368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:19.526229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:19.526312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:19.526353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:19.526392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:19.526438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:19.526467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:19.526513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:19.526590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:19.527411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:19.527700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:19.616795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:19.616851Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:19.631793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:19.632120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:19.632321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:19.638775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:19.639135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:19.639883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:19.640137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:19.642819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:19.643017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:19.644177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:19.644244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:19.644382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:19.644432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:19.644531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:19.644754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.756790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.757752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.757904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.757970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.758793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... MESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-07T17:47:23.353583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-07T17:47:23.353734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:23.353779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-07T17:47:23.353860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:23.353894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-07T17:47:23.353938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-07T17:47:23.354185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:23.354231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-07T17:47:23.354259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2807], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-07T17:47:23.354310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2807], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-07T17:47:23.355346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:23.355433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:23.355469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-07T17:47:23.355510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2026-01-07T17:47:23.355553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-07T17:47:23.356902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:23.356967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:23.356994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-07T17:47:23.357018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-07T17:47:23.357039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-07T17:47:23.357093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-07T17:47:23.358672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:23.358717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-07T17:47:23.359005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-07T17:47:23.359146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:47:23.359173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:23.359197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:47:23.359220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:23.359250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-07T17:47:23.359296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:993:2933] message: TxId: 104 2026-01-07T17:47:23.359323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:23.359350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:47:23.359376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:47:23.359447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-07T17:47:23.359881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-07T17:47:23.359916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-07T17:47:23.360666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-07T17:47:23.361544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-07T17:47:23.362487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-07T17:47:23.362532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2807], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-07T17:47:23.362592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:47:23.362626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1420:3338] 2026-01-07T17:47:23.363259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-07T17:47:23.364396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-07T17:47:23.364552Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 175us result status StatusSuccess 2026-01-07T17:47:23.365007Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardTest::PreserveColumnOrder [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:22.662682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:22.662761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.662792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:22.662828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:22.662866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:22.662894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:22.662937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.663002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:22.663758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:22.663998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:22.749096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:22.749142Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:22.762937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:22.763175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:22.763312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:22.768842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:22.769118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:22.769686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.769863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.772082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.772222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:22.773134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.773183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.773301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:22.773340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.773421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:22.773615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.921164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.922912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:24.314069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:24.314163Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 130 2026-01-07T17:47:24.314261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:24.314311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:24.314907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:24.315800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:47:24.317160Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:24.317192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:24.317310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:24.317416Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:24.317446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:47:24.317501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-07T17:47:24.317751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:24.317795Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2026-01-07T17:47:24.317842Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:24.317873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:24.317909Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:24.317939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:24.317976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:47:24.318008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:24.318042Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:47:24.318073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:47:24.318134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:24.318171Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-07T17:47:24.318201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-07T17:47:24.318246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-07T17:47:24.318852Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:24.318920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:24.318952Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:24.318988Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-07T17:47:24.319025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:24.320121Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:24.320212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:24.320246Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:24.320276Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-07T17:47:24.320304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:24.320391Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:47:24.320650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:24.320703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:24.320788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:24.321228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:24.321278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:24.321346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:24.323010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:24.324407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:24.324468Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:24.324564Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-07T17:47:24.324744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-07T17:47:24.324780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-07T17:47:24.325101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-07T17:47:24.325168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:24.325197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:753:2742] TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:24.325532Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:24.325656Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 139us result status StatusPathDoesNotExist 2026-01-07T17:47:24.325768Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> THealthCheckTest::TestStateStorageOk [GOOD] >> THealthCheckTest::TestStateStorageBlue >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::PreserveColumnOrder [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:57.280881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:57.281368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:57.281436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:57.281478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:57.281512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:57.281539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:57.281606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:57.281673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:57.282580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:57.282867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:57.374428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:57.374492Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:57.389874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:57.390286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:57.390515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:57.398038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:57.398394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:57.399193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:57.399438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:57.402356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:57.402571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:57.403781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:57.403842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:57.403961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:57.404013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:57.404062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:57.404290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:57.411461Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:46:57.569175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:46:57.569426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:57.569634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:46:57.569711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:46:57.569952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:46:57.570026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:57.573287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:57.573497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:46:57.573712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:57.573782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:46:57.573830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:46:57.573868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:46:57.576866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:57.576918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:46:57.576972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:46:57.579208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:57.579271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:46:57.579361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:46:57.579462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:46:57.583199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:46:57.585374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:46:57.585592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:46:57.586834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:57.586998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:46:57.587062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:46:57.587379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:46:57.587444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:46:57.587611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:46:57.587689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:57.590086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:57.590141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... ecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1793 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-07T17:47:24.433321Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1793 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-07T17:47:24.434926Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 68719479434 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:47:24.434981Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-07T17:47:24.435129Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 68719479434 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:47:24.435205Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:47:24.435334Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 68719479434 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-07T17:47:24.435443Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:24.435497Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:24.435552Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:47:24.435600Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-07T17:47:24.436437Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:24.437833Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:24.437941Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:24.438053Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:24.438418Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:24.438468Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:47:24.438636Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:24.438691Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:24.438738Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:24.438778Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:24.438823Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-07T17:47:24.438892Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:741:2718] message: TxId: 101 2026-01-07T17:47:24.438951Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:24.439000Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:47:24.439044Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:47:24.439200Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:24.440629Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:24.440698Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [16:742:2719] TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:24.441253Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:24.441593Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 370us result status StatusSuccess 2026-01-07T17:47:24.442255Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "col01" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col02" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col03" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "col04" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "col05" Type: "Utf8" TypeId: 4608 Id: 5 NotNull: false IsBuildInProgress: false } Columns { Name: "col06" Type: "Utf8" TypeId: 4608 Id: 6 NotNull: false IsBuildInProgress: false } Columns { Name: "col07" Type: "Utf8" TypeId: 4608 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "col08" Type: "Utf8" TypeId: 4608 Id: 8 NotNull: false IsBuildInProgress: false } Columns { Name: "col09" Type: "Utf8" TypeId: 4608 Id: 9 NotNull: false IsBuildInProgress: false } Columns { Name: "col10" Type: "Utf8" TypeId: 4608 Id: 10 NotNull: false IsBuildInProgress: false } Columns { Name: "col11" Type: "Utf8" TypeId: 4608 Id: 11 NotNull: false IsBuildInProgress: false } Columns { Name: "col12" Type: "Utf8" TypeId: 4608 Id: 12 NotNull: false IsBuildInProgress: false } Columns { Name: "col13" Type: "Utf8" TypeId: 4608 Id: 13 NotNull: false IsBuildInProgress: false } Columns { Name: "col14" Type: "Utf8" TypeId: 4608 Id: 14 NotNull: false IsBuildInProgress: false } Columns { Name: "col15" Type: "Utf8" TypeId: 4608 Id: 15 NotNull: false IsBuildInProgress: false } Columns { Name: "col16" Type: "Utf8" TypeId: 4608 Id: 16 NotNull: false IsBuildInProgress: false } Columns { Name: "col17" Type: "Utf8" TypeId: 4608 Id: 17 NotNull: false IsBuildInProgress: false } Columns { Name: "col18" Type: "Utf8" TypeId: 4608 Id: 18 NotNull: false IsBuildInProgress: false } Columns { Name: "col19" Type: "Utf8" TypeId: 4608 Id: 19 NotNull: false IsBuildInProgress: false } Columns { Name: "col20" Type: "Utf8" TypeId: 4608 Id: 20 NotNull: false IsBuildInProgress: false } Columns { Name: "col21" Type: "Utf8" TypeId: 4608 Id: 21 NotNull: false IsBuildInProgress: false } Columns { Name: "col22" Type: "Utf8" TypeId: 4608 Id: 22 NotNull: false IsBuildInProgress: false } Columns { Name: "col23" Type: "Utf8" TypeId: 4608 Id: 23 NotNull: false IsBuildInProgress: false } Columns { Name: "col24" Type: "Utf8" TypeId: 4608 Id: 24 NotNull: false IsBuildInProgress: false } Columns { Name: "col25" Type: "Utf8" TypeId: 4608 Id: 25 NotNull: false IsBuildInProgress: false } Columns { Name: "col26" Type: "Utf8" TypeId: 4608 Id: 26 NotNull: false IsBuildInProgress: false } Columns { Name: "col27" Type: "Utf8" TypeId: 4608 Id: 27 NotNull: false IsBuildInProgress: false } Columns { Name: "col28" Type: "Utf8" TypeId: 4608 Id: 28 NotNull: false IsBuildInProgress: false } Columns { Name: "col29" Type: "Utf8" TypeId: 4608 Id: 29 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col01" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:22.660126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:22.660217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.660259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:22.660297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:22.660342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:22.660370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:22.660418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:22.660492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:22.661604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:22.661890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:22.746247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:22.746305Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:22.760404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:22.760702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:22.760891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:22.767259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:22.767626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:22.768351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.768584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.771136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.771323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:22.772431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.772490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.772621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:22.772674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.772787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:22.772998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.912404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.915779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.915952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.916966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-07T17:47:25.099164Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 240 -> 240 2026-01-07T17:47:25.100443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.100490Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-07T17:47:25.100572Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:47:25.100601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:47:25.100628Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:47:25.100652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:47:25.100685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-07T17:47:25.100752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:649:2571] message: TxId: 106 2026-01-07T17:47:25.100788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:47:25.100833Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-07T17:47:25.100864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-07T17:47:25.100987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2026-01-07T17:47:25.101026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-07T17:47:25.102292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:47:25.102335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:826:2723] TestWaitNotification: OK eventTxId 106 2026-01-07T17:47:25.102815Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:25.102977Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 176us result status StatusSuccess 2026-01-07T17:47:25.103291Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:25.103901Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:25.104047Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 156us result status StatusSuccess 2026-01-07T17:47:25.104333Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:25.104734Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:25.104855Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 114us result status StatusSuccess 2026-01-07T17:47:25.105164Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:21.994642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:21.994748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:21.994785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:21.994825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:21.994878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:21.994921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:21.994983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:21.995067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:21.995928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:21.996240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:22.066822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:22.066889Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:22.080045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:22.080419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:22.080603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:22.087232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:22.087925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:22.088652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:22.089045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:22.092657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.092878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:22.094169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:22.094251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:22.094337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:22.094384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:22.094444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:22.094737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:22.233044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.234930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.235004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.235098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.235199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:22.235269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-07T17:47:25.047920Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-07T17:47:25.047978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-07T17:47:25.048022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-07T17:47:25.049113Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:25.049194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:25.049229Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:47:25.049277Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:47:25.049321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:25.049870Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:25.049953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:47:25.049984Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:47:25.050012Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-07T17:47:25.050042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-07T17:47:25.050105Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-07T17:47:25.052146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:47:25.052245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:25.052288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:47:25.053179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:47:25.053918Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-07T17:47:25.054153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:25.054413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-07T17:47:25.054621Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2026-01-07T17:47:25.056432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:47:25.056659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-07T17:47:25.057176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:25.057232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:25.057359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2026-01-07T17:47:25.058312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:25.058365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-07T17:47:25.058437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:25.058706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-07T17:47:25.061837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:47:25.061960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:47:25.062278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:47:25.062374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-07T17:47:25.062756Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:47:25.062984Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:47:25.063305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:47:25.063347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:47:25.063806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:47:25.063906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:47:25.063949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:2103:3706] TestWaitNotification: OK eventTxId 104 2026-01-07T17:47:25.072895Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:25.073138Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 326us result status StatusPathDoesNotExist 2026-01-07T17:47:25.073284Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:25.073919Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:25.074075Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 174us result status StatusPathDoesNotExist 2026-01-07T17:47:25.074195Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:24.723385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:24.723475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:24.723512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:24.723546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:24.723589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:24.723628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:24.723684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:24.723754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:24.724531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:24.724795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:24.807135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:24.807193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:24.821309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:24.821601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:24.821798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:24.827767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:24.828094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:24.828758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:24.829048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:24.831510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:24.831694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:24.832762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:24.832842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:24.832985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:24.833028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:24.833122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:24.833325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:24.952570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.953980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.954067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.954142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.954227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:24.954297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 5Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2026-01-07T17:47:25.491776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 3 -> 128 2026-01-07T17:47:25.493001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.494711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.494810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.494881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.494916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.494946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2026-01-07T17:47:25.494986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-01-07T17:47:25.495119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:25.496178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-07T17:47:25.496263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000039 2026-01-07T17:47:25.496495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:25.496574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:25.496610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-07T17:47:25.496769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-07T17:47:25.496805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-07T17:47:25.496997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:25.497061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:25.498660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:25.498705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:25.498906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:25.498954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:47:25.499385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.499431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-07T17:47:25.499521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:25.499550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:25.499588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-07T17:47:25.499628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:25.499668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-07T17:47:25.499706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-07T17:47:25.499741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-07T17:47:25.499785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-07T17:47:25.499957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:47:25.500000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2026-01-07T17:47:25.500029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-07T17:47:25.500327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:25.500387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:47:25.500444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:47:25.500469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-07T17:47:25.500499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:25.500553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-01-07T17:47:25.500590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:715:2704] 2026-01-07T17:47:25.502691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-07T17:47:25.502798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-07T17:47:25.502826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:722:2711] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-07T17:47:25.503178Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:25.503344Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 166us result status StatusSuccess 2026-01-07T17:47:25.503617Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.062026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.062138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.062176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.062208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.062249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.062275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.062321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.062383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.063150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.063429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.145658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.145709Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.160210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.160522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.160720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.168993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.169324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.169996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.170238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.172850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.173320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.174385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.174450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.174597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.174650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.174760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.174971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.309693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.310536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.310670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.310734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.310798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.310858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.310925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.310976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.311077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.311140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.311192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.311355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.311426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.311527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.311591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 25.482013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.482210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:25.482248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:25.482390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:47:25.482514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:25.482562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-07T17:47:25.482609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:47:25.482877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.482916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:47:25.482979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.483005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-07T17:47:25.483035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-07T17:47:25.483927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:25.484001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:25.484026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:25.484057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2026-01-07T17:47:25.484092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:25.485434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:25.485518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:25.485548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:25.485577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:47:25.485605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:47:25.485678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-07T17:47:25.487511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:25.487562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:25.487864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:47:25.488041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:25.488079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:25.488114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:25.488146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:25.488181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-07T17:47:25.488243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:810:2776] message: TxId: 103 2026-01-07T17:47:25.488285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:25.488319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:47:25.488348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:47:25.488429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:47:25.488809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:25.488859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:25.489600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:25.490743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:47:25.492073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:25.492124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-07T17:47:25.492402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:25.492449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1744:3668] 2026-01-07T17:47:25.493122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:25.496469Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:25.496683Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 242us result status StatusSuccess 2026-01-07T17:47:25.497168Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D88518A4500 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:11.374532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:11.374612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:11.374648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:11.374683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:11.374724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:11.374750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:11.374799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:11.374866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:11.375622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:11.375897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:11.462002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:11.462053Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:11.474926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:11.475226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:11.475419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:11.481363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:11.481734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:11.482442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:11.482727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:11.485577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:11.485771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:11.486988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:11.487050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:11.487170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:11.487213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:11.487292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:11.487430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:11.615619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.616496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.616627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.616692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.616790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.616886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.616991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.617066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.617150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.617218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.617275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.617369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.617458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.617549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:11.617630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 2026-01-07T17:47:25.906119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 2026-01-07T17:47:25.927328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 1 2026-01-07T17:47:25.927474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-07T17:47:25.927523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:25.927663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 2026-01-07T17:47:25.940172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 2026-01-07T17:47:25.951022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 2 2026-01-07T17:47:25.951158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2026-01-07T17:47:25.951204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-07T17:47:25.951352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D8851900D00, stats written 3 2026-01-07T17:47:25.951988Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:25.954170Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 249us result status StatusSuccess 2026-01-07T17:47:25.954662Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> TGRpcCmsTest::RemoveWithAnotherTokenTest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest >> TGRpcCmsTest::SimpleTenantsTest >> TGRpcCmsTest::AuthTokenTest >> TGRpcCmsTest::DescribeOptionsTest >> TGRpcCmsTest::AlterRemoveTest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> KqpWorkload::KV [GOOD] >> TSchemeShardUserAttrsTest::MkDir |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse >> TSchemeShardUserAttrsTest::SetAttrs >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps >> TSchemeShardUserAttrsTest::SpecialAttributes >> TSchemeShardUserAttrsTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:57.306044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:57.306138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:57.306189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:57.306227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:57.306266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:57.306297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:57.306364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:57.306433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:57.307259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:57.307539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:57.398387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:57.398438Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:57.413832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:57.414145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:57.414341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:57.420830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:57.421227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:57.421924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:57.422166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:57.424721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:57.424952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:57.426137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:57.426209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:57.426344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:57.426391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:57.426433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:57.426627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:57.576056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.579938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.580955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.581048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:57.581110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 046 ParentPathId: 80 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 85 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:27.404707Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:27.405053Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" took 357us result status StatusSuccess 2026-01-07T17:47:27.405552Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 91 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:27.406705Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:27.406937Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" took 258us result status StatusSuccess 2026-01-07T17:47:27.407366Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 86 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:27.408517Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:27.408807Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" took 314us result status StatusSuccess 2026-01-07T17:47:27.409287Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 94 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DB2B6D9F500 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:14.702114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:14.702197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.702233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:14.702265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:14.702309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:14.702338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:14.702394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:14.702472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:14.703247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:14.703479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:14.789124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:14.789180Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:14.803571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:14.803845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:14.803999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:14.809502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:14.809799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:14.810467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:14.810696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:14.812895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.813079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:14.814070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:14.814121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:14.814251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:14.814297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:14.814386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:14.814572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:14.959709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.960544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.960648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.960714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.960791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.960866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.960946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.961009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.961088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.961154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.961245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.961339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.961408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.961476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:14.961542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... eshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 2026-01-07T17:47:28.518718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 0 2026-01-07T17:47:28.518893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-07T17:47:28.518961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-07T17:47:28.519024Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-07T17:47:28.519078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DB2B6D04500, stats written 1 2026-01-07T17:47:28.519565Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:28.519834Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 300us result status StatusSuccess 2026-01-07T17:47:28.520320Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 151 LastUpdateTime: 151 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 162304 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 4781, MsgBus: 28773 2026-01-07T17:46:08.011682Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677412039503622:2192];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:08.012003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:46:08.046875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:08.267312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:08.267450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:08.288731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:08.387092Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:08.387626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:08.483275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:08.483322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:08.483336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:08.483457Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:08.595481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:08.884456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:09.014979Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:11.061193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677424924406245:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.061309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.061697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677424924406255:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.061759Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.355124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:11.778003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677424924407813:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.778092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677424924407818:2454], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.778095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.778237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677424924407820:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.778297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:11.781123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:11.791530Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677424924407821:2456], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:46:11.924505Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677424924407873:3556] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:13.011126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677412039503622:2192];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:46:13.011192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:46:23.197023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:46:23.197054Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded took: 0.100186s took: 0.100185s took: 0.101579s took: 0.101664s took: 0.102617s took: 0.102661s took: 0.102515s took: 0.103564s took: 0.107942s took: 0.108181s took: 0.143120s took: 0.143552s took: 0.143443s took: 0.143844s took: 0.144751s took: 0.145060s took: 0.145080s took: 0.145149s took: 0.145911s took: 0.147061s took: 0.182076s took: 0.183104s took: 0.182962s took: 0.182550s took: 0.183154s took: 0.183631s took: 0.186987s took: 0.186134s took: 0.186399s took: 0.188054s took: 0.031335s took: 0.034798s took: 0.035313s took: 0.035338s took: 0.037433s took: 0.040991s took: 0.043632s took: 0.045009s took: 0.044446s took: 0.047502s took: 0.149779s took: 0.149035s took: 0.161880s took: 0.163617s took: 0.166760s took: 0.172767s took: 0.173888s took: 0.174351s took: 0.173394s took: 0.186847s 2026-01-07T17:47:28.073746Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2026-01-07T17:47:28.073779Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:47:28.073795Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-07T17:47:28.073810Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2026-01-07T17:47:28.073825Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2026-01-07T17:47:28.073839Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-01-07T17:47:28.073856Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2026-01-07T17:47:28.073874Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2026-01-07T17:47:28.087403Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2026-01-07T17:47:28.087441Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-01-07T17:47:28.111238Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2026-01-07T17:47:28.111278Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2026-01-07T17:47:28.111296Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-07T17:47:28.111314Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2026-01-07T17:47:28.111332Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-01-07T17:47:28.111354Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2026-01-07T17:47:28.111384Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2026-01-07T17:47:28.111414Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2026-01-07T17:47:28.111434Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2026-01-07T17:47:28.111451Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-07T17:47:28.111468Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2026-01-07T17:47:28.111485Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-01-07T17:47:28.111503Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2026-01-07T17:47:28.111520Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-07T17:47:28.111540Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2026-01-07T17:47:28.111564Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-07T17:47:28.117839Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2026-01-07T17:47:28.117880Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-01-07T17:47:28.120159Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:47:28.120194Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-07T17:47:28.120211Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-01-07T17:47:28.120227Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-07T17:47:28.120243Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2026-01-07T17:47:28.120258Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2026-01-07T17:47:28.120274Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2026-01-07T17:47:28.120290Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2026-01-07T17:47:28.120341Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2026-01-07T17:47:28.120359Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-01-07T17:47:28.120375Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2026-01-07T17:47:28.120390Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> TSchemeShardUserAttrsTest::Boot [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] |93.0%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcCmsTest::DisabledTxTest [GOOD] |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:29.753520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:29.753617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:29.753740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:29.753779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:29.753813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:29.753867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:29.754860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:29.756525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:29.843880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:29.843936Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:29.858231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:29.858530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:29.858684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:29.865000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:29.865311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:29.865980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:29.866209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:29.868544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.868719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:29.875116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:29.875328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:29.875399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:29.877013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.037867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:30.677435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:30.677503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:47:30.679442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.679617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2026-01-07T17:47:30.679803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.679855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.679908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-07T17:47:30.680018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:30.681428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-07T17:47:30.681545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-07T17:47:30.681784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:30.681870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.681920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2026-01-07T17:47:30.682054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:30.682090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:30.682136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:30.682168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:30.682216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:30.682270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:47:30.682335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:30.682364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:30.682411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:47:30.682445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:47:30.682497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:47:30.682530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-07T17:47:30.682564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-07T17:47:30.684041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:30.684089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:30.684323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:30.684370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:47:30.684816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:30.684964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:30.685005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:30.685044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:47:30.685081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:30.685149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:47:30.686421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:47:30.686673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:47:30.686710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:47:30.687007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:47:30.687084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:30.687130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:748:2737] TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:30.687570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.687728Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 155us result status StatusSuccess 2026-01-07T17:47:30.688154Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TGRpcCmsTest::DescribeOptionsTest [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:29.753494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:29.753590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:29.753706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:29.753751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:29.753785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:29.753841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:29.754850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:29.756535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:29.848419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:29.848476Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:29.863219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:29.863528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:29.863690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:29.869729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:29.870055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:29.870737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:29.870981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:29.873550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.873731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:29.875198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:29.875409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:29.875454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:29.877056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.024681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... StateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.691571Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.691733Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 157us result status StatusSuccess 2026-01-07T17:47:30.692145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.692696Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.692835Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 157us result status StatusSuccess 2026-01-07T17:47:30.693188Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.693706Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.693887Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 170us result status StatusSuccess 2026-01-07T17:47:30.694222Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.694756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.694903Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 157us result status StatusSuccess 2026-01-07T17:47:30.695214Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:29.753495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:29.753597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:29.753703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:29.753745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:29.753777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:29.753830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:29.754842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:29.756510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:29.847152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:29.847211Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:29.862039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:29.862351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:29.862530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:29.869013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:29.869325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:29.870060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:29.870311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:29.872788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.873005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:29.875181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:29.875421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:29.875471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:29.877057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.053815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.054751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.054867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.054951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.055867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -07T17:47:30.628529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:30.628595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:30.628622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:30.628650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-07T17:47:30.628690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:30.628759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:47:30.630382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-07T17:47:30.630530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2026-01-07T17:47:30.631032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:30.631151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.631219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038, at schemeshard: 72057594046678944 2026-01-07T17:47:30.631344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:47:30.631511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:30.631572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:30.632066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:30.633287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:30.634450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:30.634489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:30.634632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:30.634726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:30.634760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:47:30.634798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:47:30.635038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.635075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:47:30.635173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:30.635220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:30.635260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:30.635293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:30.635347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:47:30.635394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:30.635428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:47:30.635459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:47:30.635521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:30.635574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:47:30.635612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-07T17:47:30.635641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-07T17:47:30.636229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:30.636307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:30.636424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:30.636470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:47:30.636536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:30.636997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:30.637061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:30.637085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:30.637119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:30.637162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:30.637236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:47:30.640055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:30.640135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2026-01-07T17:47:30.643022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:30.643276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.643388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2026-01-07T17:47:30.645409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.645683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 >> THealthCheckTest::TestStateStorageYellow [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:29.753501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:29.753609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:29.753713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:29.753737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:29.753758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:29.753797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:29.754536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:29.756479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:29.827181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:29.827223Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:29.839533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:29.839773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:29.840828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:29.848742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:29.850123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:29.852002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:29.855569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:29.863781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.865150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:29.875169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:29.875371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:29.875414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:29.877034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.037368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.038953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.039246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... alue: "ValA2" } } ApplyIf { PathId: 38 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:30.651997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_user_attrs.cpp:26: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.652127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-07T17:47:30.652173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2026-01-07T17:47:30.652286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:30.652339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-07T17:47:30.654237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.654448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2026-01-07T17:47:30.654630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.654694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.654746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-07T17:47:30.654853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:30.656580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-07T17:47:30.656723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-07T17:47:30.657073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:30.657184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.657233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2026-01-07T17:47:30.657405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:30.657466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:30.657511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:47:30.657542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:30.657602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:30.657672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:47:30.657727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:47:30.657785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:47:30.657821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:47:30.657855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:47:30.657923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:47:30.657962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-07T17:47:30.657997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:47:30.659971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:30.660020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:30.660146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:30.660202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 FAKE_COORDINATOR: Erasing txId 103 2026-01-07T17:47:30.660681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:30.660773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:47:30.660820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:47:30.660864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:47:30.660943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:47:30.661030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:47:30.662743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:47:30.662977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:47:30.663033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:47:30.663454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:47:30.663545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:47:30.663596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:756:2745] TestWaitNotification: OK eventTxId 103 2026-01-07T17:47:30.664128Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.664290Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 196us result status StatusSuccess 2026-01-07T17:47:30.664659Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:29.753532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:29.753644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:29.753767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:29.753805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:29.753834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:29.753876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:29.754756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:29.756478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:29.826468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:29.826513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:29.840532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:29.840879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:29.841045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:29.848740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:29.850102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:29.852053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:29.855508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:29.863620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.865148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:29.875192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:29.875425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:29.875471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:29.877050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.031399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.033946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.034615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... meshard: 72057594046678944, cookie: 281474976710672 2026-01-07T17:47:30.563609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2026-01-07T17:47:30.563684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2026-01-07T17:47:30.563723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-07T17:47:30.569535Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-07T17:47:30.577143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:30.577433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.577599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:47:30.577638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:47:30.577770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:47:30.577831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:30.579563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:30.579687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:47:30.579835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.579907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:47:30.579951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:47:30.579979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:47:30.581284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.581329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:30.581365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:47:30.582669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.582717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.582765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:30.582813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:47:30.583006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:30.584353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:47:30.584483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-07T17:47:30.584836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:30.584976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.585014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:30.585212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:47:30.585273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:47:30.585395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:30.585447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:47:30.587203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:30.587237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:30.587400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:30.587444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-07T17:47:30.587690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.587729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-07T17:47:30.587817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:30.587856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:30.587888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-07T17:47:30.587920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:30.587952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-07T17:47:30.587983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-07T17:47:30.588049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:47:30.588083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-07T17:47:30.588143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:30.588178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-07T17:47:30.588213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-07T17:47:30.588827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:30.588964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:47:30.589018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:47:30.589069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:47:30.589132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:47:30.589219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:47:30.591767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:47:30.592507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:29.753484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:29.753605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:29.753704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:29.753738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:29.753764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:29.753812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:29.754710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:29.756482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:29.843384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:29.843434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:29.857003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:29.857297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:29.857463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:29.863465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:29.863775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:29.864430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:29.864685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:29.867094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.867284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:29.875067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:29.875272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:29.875360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:29.876983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.039296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.040997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.041066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.041137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.041224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2026-01-07T17:47:30.868033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:30.868082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:30.868277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:47:30.868338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:47:30.868491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:30.868530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-07T17:47:30.868568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 112, path id: 39 2026-01-07T17:47:30.868627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 112, path id: 40 2026-01-07T17:47:30.869413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:47:30.869514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:47:30.869551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2026-01-07T17:47:30.869594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-07T17:47:30.869638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:47:30.870130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:47:30.870275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:47:30.870313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2026-01-07T17:47:30.870348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-07T17:47:30.870385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:47:30.871155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:47:30.871235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:47:30.871275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2026-01-07T17:47:30.871303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-07T17:47:30.871332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:47:30.871422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2026-01-07T17:47:30.871647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:30.871694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:47:30.871761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:47:30.874331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:47:30.874682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:47:30.876385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:47:30.876510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-07T17:47:30.876957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-07T17:47:30.877012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-07T17:47:30.877667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-07T17:47:30.877769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-07T17:47:30.877811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:904:2893] TestWaitNotification: OK eventTxId 112 2026-01-07T17:47:30.878614Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.878850Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 225us result status StatusSuccess 2026-01-07T17:47:30.879366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000044 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2026-01-07T17:47:30.884689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 38 PathVersion: 8 } ApplyIf { PathId: 39 PathVersion: 7 } ApplyIf { PathId: 40 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:30.884951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.885081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:47:30.887780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:30.888004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:29.753507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:29.753612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:29.753706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:29.753757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:29.753788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:29.753858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:29.753926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:29.754731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:29.756525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:29.825917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:29.825966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:29.839179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:29.839471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:29.840813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:29.850830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:29.851273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:29.852187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:29.855566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:29.863936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.865200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:29.875218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:29.875451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:29.875501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:29.875559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:29.877071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:30.051061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.052997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.053080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.053150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:30.053251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Operation and all the parts is done, operation id: 105:0 2026-01-07T17:47:30.774444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:47:30.774497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:47:30.774533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2026-01-07T17:47:30.774567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-07T17:47:30.774594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2026-01-07T17:47:30.775369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:30.776544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:30.777624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:30.777658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:30.777811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:47:30.777951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:30.777983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-07T17:47:30.778032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 105, path id: 40 FAKE_COORDINATOR: Erasing txId 105 2026-01-07T17:47:30.778552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:30.778623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:30.778656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:47:30.778689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-07T17:47:30.778739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-07T17:47:30.779095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:30.779164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:47:30.779186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:47:30.779207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-07T17:47:30.779228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:47:30.779306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-07T17:47:30.779532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:47:30.779581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:47:30.779639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:47:30.781362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:30.782313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:47:30.782385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-07T17:47:30.782661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-07T17:47:30.782719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-07T17:47:30.783101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-07T17:47:30.783198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:47:30.783239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:808:2797] TestWaitNotification: OK eventTxId 105 2026-01-07T17:47:30.783836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.784011Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 186us result status StatusPathDoesNotExist 2026-01-07T17:47:30.784170Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:47:30.784678Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:30.784831Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 163us result status StatusSuccess 2026-01-07T17:47:30.785327Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:18.737700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:18.737771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.737798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:18.737825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:18.737858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:18.737880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:18.737916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:18.737978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:18.738618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:18.738821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:18.807177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:18.807220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:18.820328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:18.820664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:18.820868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:18.827836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:18.828238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:18.829065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:18.829319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:18.832094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.832301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:18.833452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:18.833515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:18.833646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:18.833696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:18.833817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:18.834022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:18.957848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.958530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.958662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.958740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.958811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.958908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.958979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.959044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.959138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.959194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.959240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.959322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.959376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.959454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:18.959518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... nId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:31.027538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:31.027815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:31.027863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:31.028034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:47:31.028167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:31.028207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 107, path id: 38 2026-01-07T17:47:31.028274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 107, path id: 40 2026-01-07T17:47:31.028557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:31.028624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:47:31.028713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:31.028770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:47:31.028809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2026-01-07T17:47:31.030024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:47:31.030127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:47:31.030170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-07T17:47:31.030221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 14 2026-01-07T17:47:31.030266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-07T17:47:31.030854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:47:31.030947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:47:31.030984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-07T17:47:31.031009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-07T17:47:31.031046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-07T17:47:31.031111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2026-01-07T17:47:31.033594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-07T17:47:31.033647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:47:31.033916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:47:31.034038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-07T17:47:31.034070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:47:31.034108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-07T17:47:31.034138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:47:31.034198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2026-01-07T17:47:31.034238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:47:31.034275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-07T17:47:31.034305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-07T17:47:31.034406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:47:31.034779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:31.034811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:47:31.035683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:47:31.037228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:47:31.038067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:31.038112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-07T17:47:31.038690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2026-01-07T17:47:31.039307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2026-01-07T17:47:31.039346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2026-01-07T17:47:31.039778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-01-07T17:47:31.039859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:47:31.039905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1398:3322] TestWaitNotification: OK eventTxId 107 2026-01-07T17:47:31.040521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:47:31.040737Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 230us result status StatusSuccess 2026-01-07T17:47:31.041192Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:47:19.802826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:47:19.802910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:19.802949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:47:19.802980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:47:19.803021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:47:19.803049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:47:19.803099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:47:19.803177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:47:19.803915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:47:19.804125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:47:19.870972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:19.871017Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:19.883375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:47:19.883626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:47:19.883764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:47:19.889302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:47:19.889555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:47:19.890138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:19.890326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:47:19.892604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:19.892798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:47:19.893749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:47:19.893807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:47:19.893901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:47:19.893946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:47:19.894019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:47:19.894158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:47:19.998764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.999722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.999837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.999915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:19.999982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:47:20.000675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 1-07T17:47:30.918056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-07T17:47:30.918095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-07T17:47:30.918274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-07T17:47:30.918441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-07T17:47:30.918476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:856:2809], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-07T17:47:30.918528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:856:2809], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-07T17:47:30.918976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:30.919027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-07T17:47:30.919106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:30.919138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-07T17:47:30.919175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-07T17:47:30.920017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:30.920129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:30.920187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-07T17:47:30.920229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 14 2026-01-07T17:47:30.920269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-07T17:47:30.921457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:30.921519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-07T17:47:30.921540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-07T17:47:30.921564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-07T17:47:30.921589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-07T17:47:30.921651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-07T17:47:30.924649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-07T17:47:30.924718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-07T17:47:30.925123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-07T17:47:30.925314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:47:30.925360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:30.925400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:47:30.925437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:30.925477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-07T17:47:30.925550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:995:2935] message: TxId: 104 2026-01-07T17:47:30.925603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:47:30.925645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:47:30.925680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:47:30.925785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-07T17:47:30.926326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-07T17:47:30.926369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-07T17:47:30.927478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-07T17:47:30.928753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-07T17:47:30.929930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-07T17:47:30.929986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:856:2809], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-07T17:47:30.930505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:47:30.930567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2032:3941] 2026-01-07T17:47:30.930983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-07T17:47:30.935027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-07T17:47:30.935307Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 314us result status StatusSuccess 2026-01-07T17:47:30.935885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2026-01-07T17:47:27.633830Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677748903468244:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:27.634026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:27.799406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:27.925831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:27.926018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:27.951527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:27.966514Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:28.103350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:28.260155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:28.260182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:28.260189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:28.260297Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:28.644809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:28.649384Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:28.767693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) 2026-01-07T17:47:28.786779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcCmsTest::AuthTokenTest [GOOD] |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2026-01-07T17:47:27.623196Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677750995590653:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:27.623274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:27.799542Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:27.927883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:27.927995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:27.964728Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:27.970916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:28.051736Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:28.260140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:28.260177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:28.260184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:28.260291Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:28.633557Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:28.645049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:28.860640Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:131: TTxProcessor(tenants) is now locking 2026-01-07T17:47:28.860661Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:143: TTxProcessor(tenants) is now locked by parent 2026-01-07T17:47:28.865810Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:102: TTxProcessor(tenants) is now active 2026-01-07T17:47:28.904377Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285140, Sender [1:7592677755290558955:2303], Recipient [1:7592677750995591054:2216]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:37068" } 2026-01-07T17:47:28.904421Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:964: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2026-01-07T17:47:28.910200Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3326: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> TFlatTest::CrossRW >> TLocksTest::Range_BrokenLock0 >> TFlatTest::CopyCopiedTableAndRead >> TLocksTest::CK_Range_BrokenLock >> TFlatTest::Ls >> TFlatTest::SelectRangeBytesLimit >> TCancelTx::CrossShardReadOnly >> TLocksFatTest::RangeSetRemove >> TLocksFatTest::PointSetBreak >> TFlatTest::ReadOnlyMode >> TFlatTest::ShardFreezeUnfreezeAlreadySet ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2026-01-07T17:47:27.618679Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677748339757094:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:27.618743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:27.891336Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:27.930501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:27.930583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:27.976206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:27.997250Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:28.183061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:28.260122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:28.260159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:28.260168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:28.260241Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:28.645616Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:28.674205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:28.761028Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7592677752634725482:2302], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:59750" } 2026-01-07T17:47:28.761065Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-07T17:47:28.761096Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.761105Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.761205Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:59750" 2026-01-07T17:47:28.761331Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767808048761209) 2026-01-07T17:47:28.790109Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767808048761209 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-07T17:47:28.790336Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-07T17:47:28.793564Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-07T17:47:28.794653Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048761209&action=1" } } } 2026-01-07T17:47:28.794787Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.794870Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-07T17:47:28.795031Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-07T17:47:28.795405Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-07T17:47:28.795608Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-07T17:47:28.797903Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677752634725490:2303], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048761209&action=1" } UserToken: "" } 2026-01-07T17:47:28.797924Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:28.798055Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048761209&action=1" } } 2026-01-07T17:47:28.799599Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-07T17:47:28.799639Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.799715Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7592677752634725487:2205], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.799752Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.799772Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.799788Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.799826Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-07T17:47:28.799854Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-07T17:47:28.799893Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-07T17:47:28.802238Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:47:28.802260Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.802265Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.802270Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.802306Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-07T17:47:28.802321Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767808048761209 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:47:28.804074Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-07T17:47:28.814025Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.814087Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-07T17:47:28.814094Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-07T17:47:28.819760Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:59750" 2026-01-07T17:47:28.821055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:47:28.823172Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-07T17:47:28.823272Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2026-01-07T17:47:28.826586Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2026-01-07T17:47:28.833652Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2026-01-07T17:47:28.836329Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:872: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1767808048874 ParentPathId: 38 PathState: EPathStateNoChanges Owner: ... chemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-07T17:47:29.235204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-07T17:47:29.235324Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2026-01-07T17:47:29.235335Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-07T17:47:29.235373Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:47:29.235501Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7592677756929693471:2205], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:47:29.235517Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:47:29.235532Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.235539Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.235571Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-07T17:47:29.235597Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1767808049191789 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:47:29.235647Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767808049191789 issue= 2026-01-07T17:47:29.238904Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-07T17:47:29.239625Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-07T17:47:29.239690Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.239703Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.240134Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7592677748339757448:2202], Recipient [1:7592677748339757587:2205]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.240150Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:47:29.240171Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.240187Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.240223Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-07T17:47:29.240265Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1767808049191789 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:47:29.243001Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-07T17:47:29.243108Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.243171Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-07T17:47:29.243319Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-07T17:47:29.244270Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-01-07T17:47:29.244385Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-01-07T17:47:29.247946Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-01-07T17:47:29.248077Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7592677756929693594:2205], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-07T17:47:29.249152Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-07T17:47:29.249183Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.249169Z node 3 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [8ba2ac1d72f7d332] Result# TEvPutResult {Id# [72075186224037888:1:20:0:0:149:0] Status# ERROR StatusFlags# { } ErrorReason# "Request got Poison" ApproximateFreeSpaceShare# 0} GroupId# 2181038081 Marker# BPP12 2026-01-07T17:47:29.250116Z node 3 :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038081 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.194 sample PartId# [72075186224037888:1:20:0:0:149:1] QueryCount# 1 VDiskId# [82000001:2:0:0:0] NodeId# 1 } ] } 2026-01-07T17:47:29.250005Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.250079Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-07T17:47:29.250101Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-07T17:47:29.260732Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:47:29.260789Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.260801Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.260813Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.260917Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1767808049191789 2026-01-07T17:47:29.260987Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767808049191789 issue= 2026-01-07T17:47:29.261027Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1767808049191789 issue= 2026-01-07T17:47:29.261039Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-07T17:47:29.261116Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1767808049191789 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:47:29.261461Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677756929693606:2392], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808049191789&action=2" } UserToken: "" } 2026-01-07T17:47:29.261487Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:29.261661Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808049191789&action=2" } } 2026-01-07T17:47:29.264319Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-07T17:47:29.264437Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.317900Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677756929693623:2394], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808049191789&action=2" } UserToken: "" } 2026-01-07T17:47:29.317933Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:29.318176Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808049191789&action=2" ready: true status: SUCCESS } } 2026-01-07T17:47:29.320589Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7592677756929693626:2396], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:59750" } 2026-01-07T17:47:29.320621Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-07T17:47:29.320740Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2026-01-07T17:47:29.322883Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7592677756929693629:2397], Recipient [1:7592677748339757587:2205]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:59750" } 2026-01-07T17:47:29.322905Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2026-01-07T17:47:29.323114Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2026-01-07T17:47:29.329002Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-07T17:47:29.329195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:47:29.861645Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TLocksTest::BrokenLockErase >> TFlatTest::MiniKQLRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2026-01-07T17:47:27.610187Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677751708634830:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:27.610372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:27.801183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:27.925275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:27.925382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:27.940106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:27.967805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:27.971721Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:28.260155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:28.260185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:28.260192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:28.260266Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:28.409040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:28.625820Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:28.651447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:28.754746Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7592677756003603204:2302], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:53314" } 2026-01-07T17:47:28.754799Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-07T17:47:28.754825Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.754838Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.754975Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:53314" 2026-01-07T17:47:28.755487Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767808048755223) 2026-01-07T17:47:28.785046Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767808048755223 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-07T17:47:28.785244Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-07T17:47:28.788397Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-07T17:47:28.789411Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048755223&action=1" } } } 2026-01-07T17:47:28.789556Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.789625Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-07T17:47:28.789753Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-07T17:47:28.790098Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-07T17:47:28.790261Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-07T17:47:28.792873Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677756003603213:2303], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048755223&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2026-01-07T17:47:28.792907Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:28.793157Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048755223&action=1" } } 2026-01-07T17:47:28.794076Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-07T17:47:28.794119Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.794177Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7592677756003603209:2210], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.794193Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.794205Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.794213Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.794251Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-07T17:47:28.794286Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-07T17:47:28.794348Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-07T17:47:28.796753Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:47:28.796780Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.796797Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.796805Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.796869Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-07T17:47:28.796887Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767808048755223 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:47:28.798990Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-07T17:47:28.810289Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.810398Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-07T17:47:28.810408Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-07T17:47:28.817844Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "Root" PeerName: "ipv6:[::1]:53314" 2026-01-07T17:47:28.821298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:47:28.823504Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-07T17:47:28.823565Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2026-01-07T17:47:28.826780Z node 1 :CMS_TENANTS DEBUG: console ... 960: StateWork, received event# 273154052, Sender [1:7592677751708635195:2209], Recipient [1:7592677751708635321:2210]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.147569Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:47:29.148200Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-07T17:47:29.152294Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7592677760298571159:2373], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:53314" } 2026-01-07T17:47:29.152326Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-07T17:47:29.152360Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.152434Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7592677751708635195:2209], Recipient [1:7592677751708635321:2210]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.152465Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:47:29.153029Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-07T17:47:29.155118Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7592677760298571167:2374], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:53314" } 2026-01-07T17:47:29.155144Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-07T17:47:29.155177Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.155268Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7592677751708635195:2209], Recipient [1:7592677751708635321:2210]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.155278Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:47:29.155702Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-07T17:47:29.158336Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2026-01-07T17:47:29.158360Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-07T17:47:29.158426Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-07T17:47:29.158566Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7592677756003603312:2210], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-07T17:47:29.158601Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2026-01-07T17:47:29.158617Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.158626Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.158677Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:22: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2026-01-07T17:47:29.158719Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1767808048755223 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:47:29.158789Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2904: Update database for /Root/users/user-1 confirmedsubdomain=2 2026-01-07T17:47:29.159600Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7592677760298571171:2375], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:53314" } 2026-01-07T17:47:29.159621Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-07T17:47:29.159650Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.159796Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7592677751708635195:2209], Recipient [1:7592677751708635321:2210]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.159822Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:47:29.160467Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-07T17:47:29.162559Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7592677760298571177:2376], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:53314" } 2026-01-07T17:47:29.162584Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-07T17:47:29.162616Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.162715Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7592677751708635195:2209], Recipient [1:7592677751708635321:2210]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.162739Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:47:29.163223Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-07T17:47:29.165514Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:42: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2026-01-07T17:47:29.165552Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.165748Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7592677760298571182:2377], Recipient [1:7592677751708635321:2210]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:53314" } 2026-01-07T17:47:29.165776Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-07T17:47:29.165808Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.165961Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7592677751708635195:2209], Recipient [1:7592677751708635321:2210]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.166006Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:47:29.166543Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-07T17:47:29.394835Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-07T17:47:29.395351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:47:29.859799Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:29.870975Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:30.860392Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageYellow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:06.140666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:06.275353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:06.283776Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:06.284891Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:06.285270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:06.285458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:06.287302Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:46:06.287651Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:06.287845Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:06.287977Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:06.797377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:06.941570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:06.941720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:06.942515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:06.942602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:06.996853Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:06.997554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:06.997861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:07.091966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:07.175144Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:08.038395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:08.038469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:08.038502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:08.038882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-07T17:46:15.303251Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:15.303822Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:761:2348], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-07T17:46:15.304837Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:15.314495Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:15.316157Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:15.317689Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:766:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:15.318159Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:15.318223Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:15.319734Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:15.319883Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:15.663097Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:15.780426Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:15.780563Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:15.781355Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:15.781443Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:15.829112Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:46:15.830045Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:15.830523Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:15.926434Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:15.969101Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:19.785949Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:19.789522Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-07T17:46:19.790859Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:46:19.794698Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:19.794755Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:19.794792Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:19.795632Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:19.797351Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2026-01-07T17:46:19.797566Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:46:19.817390Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:19.817523Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:19.858316Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-07T17:46:19.859042Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:19.967410Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2026-01-07T17:46:19.967981Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-3" reason: "YELLOW-7932-1231c6b1-4" reason: "YELLOW-7932-1231c6b1-5" type: "COMPUTE" level: 2 } issue_log { id: "RED-b954-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" leve ... 1-07T17:47:25.326541Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:25.326583Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:25.326711Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:25.326750Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:25.326866Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:25.326902Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:25.327018Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:25.327056Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:25.327168Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:25.327210Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:25.387223Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-07T17:47:25.387576Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2026-01-07T17:47:25.387706Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2026-01-07T17:47:25.387781Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2026-01-07T17:47:25.387847Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 19 Cookie 19 2026-01-07T17:47:25.387944Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 20 Cookie 20 2026-01-07T17:47:25.388063Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 21 Cookie 21 2026-01-07T17:47:25.388187Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 22 Cookie 22 2026-01-07T17:47:25.388524Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.389212Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.389419Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.389694Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.389919Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.390147Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.390310Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.390458Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.390611Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:25.457935Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:25.472467Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:25.515737Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:25.530061Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:25.601726Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:25.628819Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:25.671759Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:25.686396Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:25.724553Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:47:26.660170Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:26.660239Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:26.660282Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:26.660749Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:26.735703Z node 15 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:26.735867Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:26.735930Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:26.736013Z node 17 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:26.736078Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:26.736144Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:26.736215Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:26.736283Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:26.736748Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-17" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 17 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-18" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 18 host: "::1" port: 12005 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-19" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 19 host: "::1" port: 12006 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-20" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 20 host: "::1" port: 12007 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-21" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 21 host: "::1" port: 12008 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-22" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 22 host: "::1" port: 12009 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-17" reason: "YELLOW-7932-1231c6b1-18" reason: "YELLOW-7932-1231c6b1-19" reason: "YELLOW-7932-1231c6b1-20" reason: "YELLOW-7932-1231c6b1-21" reason: "YELLOW-7932-1231c6b1-22" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1da1" status: YELLOW message: "Multiple rings have unavailable replicas" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-14" type: "STATE_STORAGE_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-b30b-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 3 count: 3 } issue_log { id: "YELLOW-59f1" status: YELLOW message: "Multiple rings have unavailable replicas" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-14" type: "SCHEME_BOARD_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-106b-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 3 count: 3 } issue_log { id: "YELLOW-7532" status: YELLOW message: "Multiple rings have unavailable replicas" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-14" type: "BOARD_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-0632-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "BOARD_NODE" level: 3 listed: 3 count: 3 } location { id: 14 host: "::1" port: 12001 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest Test command err: 2026-01-07T17:47:27.613938Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007367s 2026-01-07T17:47:27.630780Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677747569923467:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:27.631097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:27.874714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:27.925727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:27.925838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:27.976955Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:27.984725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:28.057803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:28.260124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:28.260201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:28.260209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:28.260269Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:28.637078Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:28.638061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:28.703862Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7592677751864891762:2302], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:51100" } 2026-01-07T17:47:28.703915Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-07T17:47:28.703972Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.703986Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.704106Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:51100" 2026-01-07T17:47:28.704247Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767808048702869) 2026-01-07T17:47:28.753636Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767808048702869 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-07T17:47:28.753964Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-07T17:47:28.763907Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-07T17:47:28.765026Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048702869&action=1" } } } 2026-01-07T17:47:28.765181Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.765248Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-07T17:47:28.765414Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-07T17:47:28.765791Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-07T17:47:28.765929Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-07T17:47:28.769047Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677751864891771:2303], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048702869&action=1" } UserToken: "" } 2026-01-07T17:47:28.769083Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:28.769318Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048702869&action=1" } } 2026-01-07T17:47:28.769629Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-07T17:47:28.769672Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.769730Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7592677751864891767:2225], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.769762Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.769806Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.769815Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.769856Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-07T17:47:28.769883Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-07T17:47:28.769948Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-07T17:47:28.772564Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:47:28.772587Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.772594Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.772601Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.772643Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-07T17:47:28.772667Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767808048702869 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:47:28.774760Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-07T17:47:28.782767Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.782841Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-07T17:47:28.782857Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-07T17:47:28.788533Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:51100" 2026-01-07T17:47:28.790049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:47:28.792118Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-07T17:47:28.792217Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2026-01-07T17:47:28.795379Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2026-01-07T17:47:28.803064Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2026-01-07T17:47:28.813206Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:872: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain ... dnumgroups=2 2026-01-07T17:47:28.838406Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:872: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1767808048839 ParentPathId: 38 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 39 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 39 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:47:28.838426Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:629: TSubDomainManip(/Root/users/user-1) drop subdomain 2026-01-07T17:47:28.838572Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:642: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:51100" 2026-01-07T17:47:28.840162Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677751864891937:2310], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } UserToken: "" } 2026-01-07T17:47:28.840218Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:28.840371Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } } 2026-01-07T17:47:28.841044Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:47:28.841068Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.841790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5826: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 39] by tx: 281474976715660 2026-01-07T17:47:28.842087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp:342) 2026-01-07T17:47:28.893299Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677751864891991:2313], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } UserToken: "" } 2026-01-07T17:47:28.893331Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:28.893477Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } } 2026-01-07T17:47:28.945796Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677751864891994:2315], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } UserToken: "" } 2026-01-07T17:47:28.945833Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:28.945987Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } } 2026-01-07T17:47:28.998698Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677751864892000:2317], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } UserToken: "" } 2026-01-07T17:47:28.998724Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:28.998889Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } } 2026-01-07T17:47:29.054914Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677756159859299:2319], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } UserToken: "" } 2026-01-07T17:47:29.054945Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:29.055093Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } } 2026-01-07T17:47:29.108032Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677756159859305:2321], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } UserToken: "" } 2026-01-07T17:47:29.108063Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:29.108252Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } } 2026-01-07T17:47:29.162035Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677756159859309:2323], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } UserToken: "" } 2026-01-07T17:47:29.162064Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:29.162203Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } } 2026-01-07T17:47:29.221644Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677756159859312:2325], Recipient [1:7592677747569923886:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } UserToken: "" } 2026-01-07T17:47:29.221674Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:29.221845Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048833162&action=2" } } VERIFY failed (2026-01-07T17:47:28.842319Z): part is blocked and done: 1 ydb/core/tx/schemeshard/schemeshard__operation.h:134 IsDoneBarrier(): requirement !DoneParts.contains(blocked) failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+888 (0x1B1808A8) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x1B16E5DB) NKikimr::NSchemeShard::TOperation::IsDoneBarrier() const+1758 (0x3B4C3DCE) NKikimr::NSchemeShard::TSideEffects::DoCheckBarriers(NKikimr::NSchemeShard::TSchemeShard*, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&)+2438 (0x3B4A3B56) NKikimr::NSchemeShard::TSideEffects::ApplyOnExecute(NKikimr::NSchemeShard::TSchemeShard*, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&)+1031 (0x3B4A0B67) NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&)+5937 (0x3B5A0091) NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*)+3489 (0x222E2451) NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode)+8685 (0x222DC75D) NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&)+47 (0x222E507F) NKikimr::NTabletFlatExecutor::TTabletExecutedFlat::Execute(TAutoPtr, NActors::TActorContext const&)+459 (0x22277CDB) NKikimr::NSchemeShard::TSchemeShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+954 (0x3AD6DC4A) NKikimr::NSchemeShard::TSchemeShard::StateWork(TAutoPtr&)+14272 (0x3ACACEC0) NActors::IActor::Receive(TAutoPtr&)+744 (0x1D0F62F8) NActors::TExecutorThread::Execute(NActors::TMailbox*, bool)+8322 (0x1D1E2F82) ??+0 (0x1D1ECBA7) NActors::TExecutorThread::ProcessExecutorPool()+1486 (0x1D1EC15E) NActors::TExecutorThread::ThreadProc()+399 (0x1D1EE1CF) ??+0 (0x1B185905) ??+0 (0x1AE27547) ??+0 (0x7F77A58EEAC3) ??+0 (0x7F77A59808C0) |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2026-01-07T17:47:27.610230Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677751039983433:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:27.612561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:27.858780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:47:27.926319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:27.926455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:27.937260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:27.977794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:28.144982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:47:28.260141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:28.260170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:28.260176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:28.260248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:28.619216Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:28.631966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:28.833761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:28.876820Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7592677755334951871:2302], Recipient [1:7592677751039983941:2205]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:32992" } 2026-01-07T17:47:28.876883Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-07T17:47:28.876914Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.876942Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.877067Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:32992" 2026-01-07T17:47:28.877289Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767808048876830) 2026-01-07T17:47:28.914527Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767808048876830 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-07T17:47:28.914790Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-07T17:47:28.921808Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-07T17:47:28.923004Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048876830&action=1" } } } 2026-01-07T17:47:28.923173Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.923258Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-07T17:47:28.923423Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-07T17:47:28.923813Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-07T17:47:28.923987Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-07T17:47:28.927331Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677755334951882:2303], Recipient [1:7592677751039983941:2205]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048876830&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" } 2026-01-07T17:47:28.927355Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:28.927552Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808048876830&action=1" } } 2026-01-07T17:47:28.929220Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-07T17:47:28.929285Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.929369Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7592677755334951876:2205], Recipient [1:7592677751039983941:2205]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.929386Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-07T17:47:28.929398Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.929406Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.929443Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-07T17:47:28.929470Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-07T17:47:28.929538Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-07T17:47:28.932291Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:47:28.932321Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:28.932329Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.932343Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:28.932392Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-07T17:47:28.932410Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767808048876830 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-07T17:47:28.934597Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-07T17:47:28.945397Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:28.945469Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-07T17:47:28.945479Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-07T17:47:28.952601Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" DatabaseName: "Root" PeerName: "ipv6:[::1]:32992" 2026-01-07T17:47:28.954547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:47:28.956742Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-07T17:47:28.956839Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notificat ... 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:7 2026-01-07T17:47:29.562888Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found - using supplied 72075186224037892 2026-01-07T17:47:29.562896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:4 2026-01-07T17:47:29.562920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2026-01-07T17:47:29.562967Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found - using supplied 72075186224037895 2026-01-07T17:47:29.563267Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found - using supplied 72075186224037889 2026-01-07T17:47:29.563312Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found - using supplied 72075186224037896 2026-01-07T17:47:29.563874Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715663 2026-01-07T17:47:29.563897Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-07T17:47:29.563988Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:47:29.564152Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7592677759629919893:2205], Recipient [1:7592677751039983941:2205]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:47:29.564186Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-07T17:47:29.564214Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.564225Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.564282Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-07T17:47:29.564323Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1767808049534084 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-07T17:47:29.564385Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767808049534084 issue=AccessDenied: Access denied for request 2026-01-07T17:47:29.565629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-01-07T17:47:29.565710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-01-07T17:47:29.565751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-01-07T17:47:29.565790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-07T17:47:29.565846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-07T17:47:29.567139Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-07T17:47:29.569560Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-07T17:47:29.569618Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.569628Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.569998Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7592677751039983802:2212], Recipient [1:7592677751039983941:2205]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-07T17:47:29.570040Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-07T17:47:29.570089Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.570100Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.570134Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-07T17:47:29.570156Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1767808049534084 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-07T17:47:29.573106Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-07T17:47:29.573174Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.573211Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-07T17:47:29.573345Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-07T17:47:29.574063Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2026-01-07T17:47:29.574159Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2026-01-07T17:47:29.578337Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2026-01-07T17:47:29.578558Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7592677759629919984:2205], Recipient [1:7592677751039983941:2205]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-07T17:47:29.578627Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-07T17:47:29.578654Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.578672Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.578735Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-07T17:47:29.578757Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-07T17:47:29.578945Z node 3 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [de35a15f3cf7c353] Result# TEvPutResult {Id# [72075186224037888:1:17:0:0:150:0] Status# ERROR StatusFlags# { } ErrorReason# "Request got Poison" ApproximateFreeSpaceShare# 0} GroupId# 2181038081 Marker# BPP12 2026-01-07T17:47:29.579157Z node 3 :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038081 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.244 sample PartId# [72075186224037888:1:17:0:0:150:1] QueryCount# 1 VDiskId# [82000001:2:0:0:0] NodeId# 1 } ] } 2026-01-07T17:47:29.584541Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-07T17:47:29.584571Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-07T17:47:29.584579Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.584585Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-07T17:47:29.584643Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1767808049534084 2026-01-07T17:47:29.584659Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767808049534084 issue=AccessDenied: Access denied for request 2026-01-07T17:47:29.584696Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1767808049534084 issue=AccessDenied: Access denied for request 2026-01-07T17:47:29.584704Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-07T17:47:29.584774Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1767808049534084 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-07T17:47:29.589548Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-07T17:47:29.589596Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-07T17:47:29.593646Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7592677759629920001:2374], Recipient [1:7592677751039983941:2205]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808049534084&action=2" } UserToken: "" } 2026-01-07T17:47:29.593679Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-07T17:47:29.593821Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767808049534084&action=2" ready: true status: SUCCESS } } 2026-01-07T17:47:29.598774Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-07T17:47:29.599020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:47:29.989350Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] >> TFlatTest::ShardFreezeRejectBadProtobuf >> TFlatTest::SelectRangeNullArgs3 >> TLocksTest::GoodLock >> TLocksTest::GoodSameKeyLock |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::WriteSplitByPartialKeyAndRead >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> TFlatTest::CopyTableAndCompareColumnsSchema >> TFlatTest::WriteSplitKillRead >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable [GOOD] >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools >> TFlatTest::CopyTableAndRead |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 |93.0%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksTest::UpdateLockedKey |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TObjectStorageListingTest::CornerCases >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 >> TFlatTest::AutoSplitBySize >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> TLocksTest::Range_GoodLock0 >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> TFlatTest::Mix_DML_DDL >> TLocksTest::GoodDupLock >> TFlatTest::Init >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TLocksTest::BrokenLockUpdate >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:30.264317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:30.264419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:30.264460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:30.264496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:30.264537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:30.264569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:30.264645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:30.264742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:30.265634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:30.265914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:30.352314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:30.352389Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:30.367308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:30.367637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:30.367823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:30.374187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:30.374553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:30.375300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:30.375555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:30.378177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:30.378433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:30.379575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:30.379634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:30.379776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:30.379825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:30.379873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:30.380072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:30.519212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.520947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.521028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.521091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.521156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:30.521231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:47:36.201446Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:47:36.202119Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:36.202217Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:47:36.203974Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:36.204131Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:36.204193Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:36.204259Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:47:36.204335Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:47:36.205237Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:36.205323Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:47:36.205353Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:47:36.205384Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-07T17:47:36.205417Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:47:36.205495Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-07T17:47:36.205754Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1772 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:47:36.205789Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:47:36.205912Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1772 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-07T17:47:36.206086Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1772 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-07T17:47:36.206789Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 749 RawX2: 73014446767 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:36.206858Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-07T17:47:36.207058Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 749 RawX2: 73014446767 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:36.207163Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:47:36.207330Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 749 RawX2: 73014446767 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:47:36.207452Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:47:36.207508Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:36.207558Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:47:36.207609Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:47:36.210516Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:36.210992Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:47:36.211067Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:36.211152Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:36.211405Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:47:36.211457Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:47:36.211621Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:36.211672Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:36.211724Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:47:36.211769Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:36.211825Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-07T17:47:36.211909Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:717:2707] message: TxId: 102 2026-01-07T17:47:36.211967Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:47:36.212023Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:47:36.212065Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:47:36.212198Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:47:36.213821Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:47:36.213895Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:777:2755] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-07T17:47:36.218176Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" Columns { Name: "value" FamilyName: "new_family" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:47:36.218532Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:47:36.218907Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Column families require StorageConfig specification, at schemeshard: 72057594046678944 2026-01-07T17:47:36.222298Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Column families require StorageConfig specification" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:47:36.222668Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column families require StorageConfig specification, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> TFlatTest::LsPathId [GOOD] >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> TFlatTest::MergeEmptyAndWrite [GOOD] >> TFlatTest::GetTabletCounters [GOOD] >> TFlatTest::SelectRangeReverseItemsLimit >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy >> TFlatTest::SelectRangeNullArgs4 [GOOD] >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2026-01-07T17:47:37.159004Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677791252483341:2115];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.159632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.458413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.516853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.516961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.585341Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677791252483234:2081] 1767808057096487 != 1767808057096490 2026-01-07T17:47:37.594689Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.596383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.727061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.878916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.925587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.056650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2026-01-07T17:47:38.086908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:38.113840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:38.135316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:38.163655Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TFlatTest::CopyTableAndDropOriginal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2026-01-07T17:47:33.621953Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677775959524434:2218];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.622020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.964312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:33.987842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:33.987973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.057450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.104799Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.213834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.438349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.445796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.533032Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677780254492369:2523] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-07T17:47:34.544535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T17:47:34.629305Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:37.234869Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677792718659784:2179];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.234966Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.259563Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:37.401886Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.405169Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.405242Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.406181Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677792718659624:2081] 1767808057225483 != 1767808057225486 2026-01-07T17:47:37.406234Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.454475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.576131Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.734450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.809779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2026-01-07T17:47:33.613137Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677775330363606:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.613431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.651766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:34.019982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.097940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.098024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.135728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.178985Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.180914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677775330363489:2081] 1767808053595710 != 1767808053595713 2026-01-07T17:47:34.243614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.488024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.513820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.544608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:34.617908Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.658309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.418251Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:37.421484Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.556345Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.556659Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.588201Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677794421621779:2081] 1767808057330259 != 1767808057330262 2026-01-07T17:47:37.607475Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.615931Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.724646Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.858202Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.870447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.889677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:37.903969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2026-01-07T17:47:33.604238Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677776315407762:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.606881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.651508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:34.021079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.021336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.021445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.096598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.149382Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.313049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.419849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.429123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.449660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:34.540586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.613593Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:37.268092Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677790667510015:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.268830Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.295853Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.400438Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.417301Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677790667509942:2081] 1767808057254506 != 1767808057254509 2026-01-07T17:47:37.430586Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.430667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.440508Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.530420Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.737253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.745709Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.758154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:37.767952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.898020Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:37.915958Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.031s,wait=0.008s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:37.944834Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:38.007424Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2342 1432 5183)b }, ecr=1.000 2026-01-07T17:47:38.086171Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:38.088132Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.088289Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:38.089678Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.089824Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:38.090412Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:38.091239Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.091367Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:38.091795Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:38.092456Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.092547Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:38.093814Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:38.094594Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.094689Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:38.095087Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:38.095763Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.095855Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:38.096255Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:38.096898Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.097342Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:38.097734Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:38.098443Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.098530Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:38.098901Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:38.099543Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.099967Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:38.100361Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:38.101074Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.101168Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:38.101575Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:38.102240Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:38.102786Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:38.103210Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:38.103863Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:47:38.103892Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:47:38.104078Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:38.104450Z n ... __publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2026-01-07T17:47:38.522370Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 7 2026-01-07T17:47:38.522380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-07T17:47:38.522548Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2026-01-07T17:47:38.522585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2026-01-07T17:47:38.522592Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2026-01-07T17:47:38.522601Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], version: 18446744073709551615 2026-01-07T17:47:38.522610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:47:38.522641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 0/1, is published: true 2026-01-07T17:47:38.522773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677794962478385 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-07T17:47:38.522786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2026-01-07T17:47:38.522874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677794962478385 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-07T17:47:38.522904Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2026-01-07T17:47:38.522972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7592677794962478385 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-07T17:47:38.523011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, shard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2026-01-07T17:47:38.523025Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-07T17:47:38.523039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-01-07T17:47:38.523056Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715687:0 129 -> 240 2026-01-07T17:47:38.523363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2026-01-07T17:47:38.523521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2026-01-07T17:47:38.523541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2026-01-07T17:47:38.523580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-07T17:47:38.523651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-07T17:47:38.523680Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:47:38.523923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:38.524026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715687:0 progress is 1/1 2026-01-07T17:47:38.524038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-07T17:47:38.524054Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715687:0 progress is 1/1 2026-01-07T17:47:38.524063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-07T17:47:38.524077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2026-01-07T17:47:38.524119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7592677794962478615:2382] message: TxId: 281474976715687 2026-01-07T17:47:38.524159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-07T17:47:38.524179Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715687:0 2026-01-07T17:47:38.524189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715687:0 2026-01-07T17:47:38.524252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-07T17:47:38.525330Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2026-01-07T17:47:38.525375Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-07T17:47:38.526700Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:47:38.526758Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-07T17:47:38.528362Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:47:38.528777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677794962478385 RawX2: 4503608217307432 } TabletId: 72075186224037890 State: 4 2026-01-07T17:47:38.528819Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:38.529140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:38.529168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:38.529703Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-07T17:47:38.530643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:47:38.530848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:47:38.531063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:38.531083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:47:38.531122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:47:38.531420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:47:38.531442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:47:38.531474Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:38.532264Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:47:38.532301Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7592677794962478501:2776], serverId# [2:7592677794962478502:2777], sessionId# [0:0:0] 2026-01-07T17:47:38.532737Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-07T17:47:38.533543Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:47:38.533607Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2026-01-07T17:47:33.623868Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677775133529568:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.624113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.071076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.071188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.085640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.102816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.114234Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677775133529454:2081] 1767808053618485 != 1767808053618488 2026-01-07T17:47:34.145265Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.367392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.414280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.420795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.507467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.649060Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.750180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:34.781432Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677779428497712:2598] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-07T17:47:34.783846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:34.798314Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677779428497752:2632] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-07T17:47:37.313700Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677792088704363:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.314037Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.363184Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.531919Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.535308Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.535390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.550450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.584008Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.787938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.796304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.803358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.875715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:37.905157Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710660: 2026-01-07T17:47:37.905419Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7592677792088705274:2594] txid# 281474976710660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-07T17:47:37.905509Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7592677792088705274:2594] txid# 281474976710660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-07T17:47:37.905527Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7592677792088705274:2594] txid# 281474976710660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-07T17:47:37.908214Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710661: 2026-01-07T17:47:37.908286Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7592677792088705282:2599] txid# 281474976710661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-07T17:47:37.908333Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7592677792088705282:2599] txid# 281474976710661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-07T17:47:37.908344Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7592677792088705282:2599] txid# 281474976710661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-07T17:47:37.914865Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2026-01-07T17:47:33.604685Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677774925768501:2203];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.605128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.981621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:33.981722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.056865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.093179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.157685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.341934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.419939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.430349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.441265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:34.517179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.616133Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:37.433004Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677793624258523:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.433077Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.513939Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.626122Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.642876Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.642947Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.703082Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.781025Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.927283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.932761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.947363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2026-01-07T17:47:34.385781Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677780370031361:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:34.385868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.674546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.737875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.737998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.848416Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.853163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677780370031334:2081] 1767808054384313 != 1767808054384316 2026-01-07T17:47:34.862079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.964922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.163556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:35.193463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:35.268222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.402710Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:38.086294Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677794865897553:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:38.096250Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:38.225128Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.249948Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:38.261093Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677794865897343:2081] 1767808058034966 != 1767808058034969 2026-01-07T17:47:38.263649Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:38.263718Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:38.289324Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:38.487119Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.509945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:38.514468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:38.524032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:38.526642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2026-01-07T17:47:33.642126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677776117989361:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.642350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.679404Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:34.004545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.361028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.402187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.402283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.449242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.514511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.661211Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.854895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.914867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.009953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:47:35.010122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-07T17:47:35.010224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-07T17:47:35.010259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2026-01-07T17:47:35.010279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710658:0 type: TxMkDir target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2026-01-07T17:47:35.010338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:47:35.010523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:47:35.010563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:47:35.017799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 38, at schemeshard: 72057594046644480 2026-01-07T17:47:35.017961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2026-01-07T17:47:35.018133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:47:35.018153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:47:35.018274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:47:35.018357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:47:35.018378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592677780412957301:2383], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2026-01-07T17:47:35.018391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592677780412957301:2383], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 38 2026-01-07T17:47:35.018485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-07T17:47:35.018514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:47:35.018543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2026-01-07T17:47:35.035587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:47:35.043551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-01-07T17:47:35.043698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-01-07T17:47:35.043712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2026-01-07T17:47:35.043754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2026-01-07T17:47:35.043779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-07T17:47:35.044010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-01-07T17:47:35.044062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-01-07T17:47:35.044068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2026-01-07T17:47:35.044078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 2 2026-01-07T17:47:35.044087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-07T17:47:35.044124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2026-01-07T17:47:35.044211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:35.044226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2026-01-07T17:47:35.044249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:35.045671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2026-01-07T17:47:35.045777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:47:35.047965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2026-01-07T17:47:35.048033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2026-01-07T17:47:35.054110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808055090, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:47:35.054335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767808055090 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:47:35.054428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 281474976710658:0 HandleReply TEvPrivate:: ... atorConfirmations} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations 2026-01-07T17:47:35.458351Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:20} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:47:35.458427Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:20} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} hope 1 -> done Change{19, redo 124b alter 0b annex 0, ~{ 4, 0 } -{ }, 0 gb} 2026-01-07T17:47:35.458445Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:20} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:47:35.459314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-01-07T17:47:35.459351Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2026-01-07T17:47:35.459371Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:47:35.459411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-01-07T17:47:35.459420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2026-01-07T17:47:35.459432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 12 2026-01-07T17:47:35.459442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 5 2026-01-07T17:47:35.459511Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{223, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2026-01-07T17:47:35.459533Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:47:35.459626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-01-07T17:47:35.459668Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2026-01-07T17:47:35.459683Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:47:35.459718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-01-07T17:47:35.459725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2026-01-07T17:47:35.459737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], version: 3 2026-01-07T17:47:35.459757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-07T17:47:35.459786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2026-01-07T17:47:35.459797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7592677784707925273:2291] 2026-01-07T17:47:35.459835Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{224, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2026-01-07T17:47:35.459852Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:47:35.462998Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:47:35.463030Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:133:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:47:35.463069Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:20:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:47:35.463090Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:20:1:24576:109:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:47:35.463127Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:47:35.463154Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:118:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:47:35.463182Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:47:35.463195Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:123:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:47:35.463264Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2026-01-07T17:47:35.463350Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2026-01-07T17:47:35.463364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2026-01-07T17:47:35.463383Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2026-01-07T17:47:35.463390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2026-01-07T17:47:35.463426Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:21} commited cookie 1 for step 20 2026-01-07T17:47:35.463796Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:406: TClient[72057594046644480] received poison pill [1:7592677784707925274:2291] 2026-01-07T17:47:35.463821Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594046644480] notify reset [1:7592677784707925274:2291] 2026-01-07T17:47:35.464711Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:182: [72057594046644480] Got PeerClosed from# [1:7592677784707925274:2291] 2026-01-07T17:47:37.746281Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677792553744210:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.746461Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.821091Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.983701Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:38.001084Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677792553743964:2081] 1767808057674984 != 1767808057674987 2026-01-07T17:47:38.045009Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.074761Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:38.074850Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:38.076335Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:38.250063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:38.261364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:38.273350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:38.283336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.733481Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:41.256356Z node 2 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [2:7592677809733614497:2802] txid# 281474976710700 FailProposedRequest: Transaction incoming read set size 1000087 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2026-01-07T17:47:41.256433Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7592677809733614497:2802] txid# 281474976710700 RESPONSE Status# ExecError marker# P13c |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2026-01-07T17:47:34.430648Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677780213460246:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:34.430744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.809665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.936551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.936664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.938673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.005154Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.009080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677780213460130:2081] 1767808054397133 != 1767808054397136 2026-01-07T17:47:35.069487Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.256137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:35.262209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:35.302747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.436026Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:35.507477Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677784508428340:2566] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2026-01-07T17:47:35.510048Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677784508428354:2573] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2026-01-07T17:47:35.515330Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677784508428360:2578] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2026-01-07T17:47:35.518930Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677784508428366:2583] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2026-01-07T17:47:38.250312Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677794997313166:2091];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:38.295696Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:38.318808Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.425104Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677794997313092:2081] 1767808058197060 != 1767808058197063 2026-01-07T17:47:38.465629Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:38.465706Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:38.466006Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:38.469894Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:38.611113Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.753858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:38.761051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:38.768832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:38.786267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2026-01-07T17:47:34.857589Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677777900775943:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:34.857683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.970478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:35.254866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:35.254936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:35.259778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.330484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.407772Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677777900775841:2081] 1767808054847910 != 1767808054847913 2026-01-07T17:47:35.409453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.565045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.673050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:35.716883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:35.779185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.883000Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:35.993337Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:36.021527Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.007s,wait=0.006s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:36.067929Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:36.079408Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-07T17:47:36.111782Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.004s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-01-07T17:47:36.260492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:47:36.260901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2026-01-07T17:47:36.265142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:36.265188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:47:36.265203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-07T17:47:36.265477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-07T17:47:36.265499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710680:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-07T17:47:36.265845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2026-01-07T17:47:36.265891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:47:36.266956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:47:36.267129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2026-01-07T17:47:36.267312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2026-01-07T17:47:36.267371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-07T17:47:36.269793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:36.269951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:36.270036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:36.270467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-07T17:47:36.270541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2026-01-07T17:47:36.270583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2026-01-07T17:47:36.270628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2026-01-07T17:47:36.270658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2026-01-07T17:47:36.270677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2026-01-07T17:47:36.270684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 2026-01-07T17:47:36.273476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-07T17:47:36.273526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation. ... TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800973343742 RawX2: 4503608217307444 } TabletId: 72075186224037892 State: 4 2026-01-07T17:47:39.859585Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.859721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800973343394 RawX2: 4503608217307373 } TabletId: 72075186224037889 State: 4 2026-01-07T17:47:39.859739Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.860437Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:47:39.860644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.860664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.860756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.860763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.861175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800973343740 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2026-01-07T17:47:39.861197Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.861373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.861390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.861510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800973343926 RawX2: 4503608217307469 } TabletId: 72075186224037893 State: 4 2026-01-07T17:47:39.861548Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.861678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800973343739 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2026-01-07T17:47:39.861694Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.861788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800973343928 RawX2: 4503608217307470 } TabletId: 72075186224037894 State: 4 2026-01-07T17:47:39.861801Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.861887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.861903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.862260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.862272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.862311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.862321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.865765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-07T17:47:39.865966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-07T17:47:39.866101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-07T17:47:39.866207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-07T17:47:39.866291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:47:39.866393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:47:39.866470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-07T17:47:39.866568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:39.866643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:47:39.866739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-07T17:47:39.866815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-07T17:47:39.866908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:47:39.866993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:39.867005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:47:39.867038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:47:39.868501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-07T17:47:39.868515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-07T17:47:39.868541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-07T17:47:39.868548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-07T17:47:39.868562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:47:39.868569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:47:39.868583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-07T17:47:39.868592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-07T17:47:39.868607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:47:39.868613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:47:39.868626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-07T17:47:39.868638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-07T17:47:39.868661Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:39.870966Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-01-07T17:47:39.870987Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-07T17:47:39.870998Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-07T17:47:39.871009Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2026-01-07T17:47:39.871020Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-07T17:47:39.871034Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2026-01-07T17:47:35.001270Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677779452701387:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:35.001650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:35.328995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.401325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:35.401400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:35.485163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677779452701210:2081] 1767808054969811 != 1767808054969814 2026-01-07T17:47:35.488727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.496208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.500540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.783651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:35.793329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:35.806630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:35.934827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.032849Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:36.159641Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:36.168295Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:36.194905Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:36.218622Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.014s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-07T17:47:36.373184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:47:36.373466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2026-01-07T17:47:36.373839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-07T17:47:36.373864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-07T17:47:36.373889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:36.373916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:47:36.373937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-07T17:47:36.373947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-07T17:47:36.374049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-07T17:47:36.374529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:47:36.375124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-07T17:47:36.375147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-07T17:47:36.375680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-07T17:47:36.375838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-07T17:47:36.375976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:47:36.375991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:47:36.376097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-07T17:47:36.376151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:47:36.376162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592677783747669034:2243], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 38 2026-01-07T17:47:36.376184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592677783747669034:2243], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 40 2026-01-07T17:47:36.376207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2026-01-07T17:47:36.376235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-07T17:47:36.376512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:36.376640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:36.378087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-07T17:47:36.378174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2026-01-07T17:47:36.378252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715676, partId: 0, tablet: 72057594037968897 2026-01-07T17:47:36.378279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:3, partId: 0 2026-01-07T17:47:36.378287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:4, partId: 0 2026-01-07T17:47:36.379729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-07T17:47:36.380197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-07T17:47:36.380215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 720575940466 ... ard: 72057594046644480, message: Source { RawX1: 7592677800293740186 RawX2: 4503608217307374 } TabletId: 72075186224037888 State: 4 2026-01-07T17:47:39.722077Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.722195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800293740491 RawX2: 4503608217307434 } TabletId: 72075186224037891 State: 4 2026-01-07T17:47:39.722210Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.722288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800293740491 RawX2: 4503608217307434 } TabletId: 72075186224037891 State: 4 2026-01-07T17:47:39.722301Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.722661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.722692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.722733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.722742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.722768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.722775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.723300Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-07T17:47:39.723320Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-07T17:47:39.723329Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-07T17:47:39.723447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800293740490 RawX2: 4503608217307433 } TabletId: 72075186224037890 State: 4 2026-01-07T17:47:39.723476Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.723637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677800293740490 RawX2: 4503608217307433 } TabletId: 72075186224037890 State: 4 2026-01-07T17:47:39.723653Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:39.723868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.723881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.723884Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-07T17:47:39.724113Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-07T17:47:39.724121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:39.724134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:39.724487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-07T17:47:39.724683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:47:39.724841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:47:39.724975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-07T17:47:39.725096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:47:39.725197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:47:39.725292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-07T17:47:39.725370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:47:39.725472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:39.725494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-07T17:47:39.725537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-07T17:47:39.725553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:47:39.725567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:47:39.725708Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:47:39.725729Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-07T17:47:39.725740Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:47:39.726073Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:47:39.726141Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:47:39.727482Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-07T17:47:39.727540Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-07T17:47:39.727647Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:47:39.727660Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-07T17:47:39.727672Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-07T17:47:39.728654Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:47:39.728699Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-07T17:47:39.729577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-07T17:47:39.729602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-07T17:47:39.729629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:47:39.729634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:47:39.729650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:47:39.729663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:47:39.729673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:47:39.729687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:47:39.729711Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:39.948308Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:40.025315Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2026-01-07T17:47:40.029441Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2026-01-07T17:47:40.029777Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2026-01-07T17:47:40.030199Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2026-01-07T17:47:35.472414Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677785786557748:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:35.472458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:35.763956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.791492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:35.791572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:35.852735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.940229Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.941118Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677785786557635:2081] 1767808055467278 != 1767808055467281 2026-01-07T17:47:35.981105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:36.469642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:36.469662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:36.469756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:36.469846Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:36.478213Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:37.197619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.209412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.233596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:37.322230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:39.558781Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677801949735788:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:39.558818Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:39.574806Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:39.624776Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677801949735555:2081] 1767808059520762 != 1767808059520765 2026-01-07T17:47:39.658226Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:39.686324Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:39.686408Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:39.690829Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:39.799431Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:39.799456Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:39.799462Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:39.799529Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:39.833776Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:39.988340Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:39.994403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:40.011176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 16683, msgbus: 3805 2026-01-07T17:44:50.140173Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677076846039843:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.140265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.298973Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.472908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.473039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.500613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.566225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.567215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.628533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.628562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.628571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.628668Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.819822Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677076846040058:2117] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.819942Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677076846040890:2751] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.820483Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677076846040890:2751] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.847764Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677076846040890:2751] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:50.861452Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677076846040890:2751] Handle TEvDescribeSchemeResult Forward to# [1:7592677076846040889:2750] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:50.863389Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677076846040058:2117] Handle TEvProposeTransaction 2026-01-07T17:44:50.863429Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677076846040058:2117] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:44:50.863496Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677076846040058:2117] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592677076846040895:2756] 2026-01-07T17:44:51.018857Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677076846040895:2756] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.018942Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677076846040895:2756] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.018959Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677076846040895:2756] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.019016Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677076846040895:2756] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.019444Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677076846040895:2756] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.019542Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677076846040895:2756] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:51.019637Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677076846040895:2756] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:44:51.019839Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677076846040895:2756] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:44:51.020295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:51.022619Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677076846040895:2756] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:44:51.022721Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677076846040895:2756] txid# 281474976715657 SEND to# [1:7592677076846040894:2755] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:44:51.034702Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677076846040058:2117] Handle TEvProposeTransaction 2026-01-07T17:44:51.034729Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677076846040058:2117] TxId# 281474976715658 ProcessProposeTransaction 2026-01-07T17:44:51.034763Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677076846040058:2117] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7592677081141008229:2791] 2026-01-07T17:44:51.037437Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677081141008229:2791] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.037490Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677081141008229:2791] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.037504Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677081141008229:2791] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.037564Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677081141008229:2791] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.037907Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677081141008229:2791] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.038053Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677081141008229:2791] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:51.038128Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677081141008229:2791] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-07T17:44:51.038303Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677081141008229:2791] txid# 281474976715658 HANDLE EvClientConnected 2026-01-07T17:44:51.038810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:51.041362Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677081141008229:2791] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-07T17:44:51.041419Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677081141008229:2791] txid# 281474976715658 SEND to# [1:7592677081141008228:2790] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-07T17:44:51.062658Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677076846040058:2117] Handle TEvProposeTransaction 2026-01-07T17:44:51.062689Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677076846040058:2117] TxId# 281474976715659 ProcessProposeTransaction 2026-01-07T17:44:51.062740Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1 ... 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:41.488411Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677809254836545:2994] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-07T17:47:41.488599Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677809254836545:2994] txid# 281474976710661 HANDLE EvClientConnected 2026-01-07T17:47:41.493222Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677809254836545:2994] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2026-01-07T17:47:41.493390Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677809254836545:2994] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:47:41.493428Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677809254836545:2994] txid# 281474976710661 SEND to# [59:7592677809254836467:2336] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-07T17:47:41.515140Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677787779998673:2113] Handle TEvProposeTransaction 2026-01-07T17:47:41.515178Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677787779998673:2113] TxId# 281474976710662 ProcessProposeTransaction 2026-01-07T17:47:41.515231Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677787779998673:2113] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7592677809254836569:3006] 2026-01-07T17:47:41.518328Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677809254836569:3006] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:50704" 2026-01-07T17:47:41.518416Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677809254836569:3006] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:41.518443Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677809254836569:3006] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:41.518500Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677809254836569:3006] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:41.518921Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677809254836569:3006] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:41.519021Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677809254836569:3006] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:41.566379Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677809254836569:3006] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-07T17:47:41.566557Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677809254836569:3006] txid# 281474976710662 HANDLE EvClientConnected 2026-01-07T17:47:41.574593Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677809254836569:3006] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-07T17:47:41.574658Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677809254836569:3006] txid# 281474976710662 SEND to# [59:7592677809254836568:2326] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-07T17:47:41.588537Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677787779998673:2113] Handle TEvProposeTransaction 2026-01-07T17:47:41.588571Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677787779998673:2113] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:47:41.588622Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677787779998673:2113] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7592677809254836585:3018] 2026-01-07T17:47:41.591609Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677809254836585:3018] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:33814" 2026-01-07T17:47:41.591689Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677809254836585:3018] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:41.591711Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677809254836585:3018] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:41.591764Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677809254836585:3018] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:41.592105Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677809254836585:3018] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:41.592198Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677809254836585:3018] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:41.592280Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677809254836585:3018] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2026-01-07T17:47:41.592425Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677809254836585:3018] txid# 281474976710663 HANDLE EvClientConnected 2026-01-07T17:47:41.592802Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:41.598405Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677809254836585:3018] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2026-01-07T17:47:41.598459Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677809254836585:3018] txid# 281474976710663 SEND to# [59:7592677809254836584:2342] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2026-01-07T17:47:41.670841Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677787779998673:2113] Handle TEvProposeTransaction 2026-01-07T17:47:41.670877Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677787779998673:2113] TxId# 281474976710664 ProcessProposeTransaction 2026-01-07T17:47:41.670915Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677787779998673:2113] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7592677809254836617:3032] 2026-01-07T17:47:41.673881Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677809254836617:3032] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:50748" 2026-01-07T17:47:41.673958Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677809254836617:3032] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:41.673982Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677809254836617:3032] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2026-01-07T17:47:41.674266Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677809254836617:3032] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:41.674308Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677809254836617:3032] txid# 281474976710664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2026-01-07T17:47:41.674357Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677809254836617:3032] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:41.674635Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677809254836617:3032] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:41.674718Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677809254836617:3032] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:41.674809Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677809254836617:3032] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2026-01-07T17:47:41.675024Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677809254836617:3032] txid# 281474976710664 HANDLE EvClientConnected 2026-01-07T17:47:41.682636Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677809254836617:3032] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2026-01-07T17:47:41.682711Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677809254836617:3032] txid# 281474976710664 SEND to# [59:7592677809254836615:2347] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TLocksTest::Range_IncorrectDot1 >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2026-01-07T17:47:34.746210Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677780560046297:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:34.756207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:35.240619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:35.240716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:35.266252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.310494Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677780560046228:2081] 1767808054689247 != 1767808054689250 2026-01-07T17:47:35.323734Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.326987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.446337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.622040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:35.646969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:35.666341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:35.755976Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:35.816192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.129056Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:36.130338Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:47:36.130359Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:47:36.261422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:47:36.261650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2026-01-07T17:47:36.264461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-07T17:47:36.264523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:36.268156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:47:36.268207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710668:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-07T17:47:36.273223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2026-01-07T17:47:36.273286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:47:36.274013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:47:36.274120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2026-01-07T17:47:36.274284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2026-01-07T17:47:36.274321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-07T17:47:36.274623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:36.274776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:36.275268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2026-01-07T17:47:36.275337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-07T17:47:36.275380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2026-01-07T17:47:36.275396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2026-01-07T17:47:36.275404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2026-01-07T17:47:36.278175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710668, at schemeshard: 72057594046644480 2026-01-07T17:47:36.278192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710668, ready parts: 0/1, is published: true 2026-01-07T17:47:36.278207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710668, at schemeshard: 72057594046644480 2026-01-07T17:47:36.279806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-07T17:47:36.279828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2026-01-07T17:47:36.279930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-07T17:47:36.279957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-07T17:47:36.280010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-07T17:47:36.280202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-07T17:47:36.280212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2026-01-07T17:47:36.280278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-07T17:47:36.280298Z node 1 :FLAT_ ... 186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:40.385131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:40.385139Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-07T17:47:40.385152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:40.385264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:47:40.385525Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:47:40.385565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-07T17:47:40.385800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:47:40.385935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:47:40.385968Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:47:40.386016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-07T17:47:40.386085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:40.386144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-07T17:47:40.386233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-07T17:47:40.387000Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-07T17:47:40.387049Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-07T17:47:40.387070Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-07T17:47:40.387085Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-07T17:47:40.387105Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:47:40.387360Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:47:40.387466Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-07T17:47:40.388884Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-07T17:47:40.390326Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:47:40.389949Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2026-01-07T17:47:40.389979Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2026-01-07T17:47:40.389996Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2026-01-07T17:47:40.390012Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-07T17:47:40.390063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-07T17:47:40.390198Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:47:40.390244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:47:40.390406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:47:40.390424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:47:40.390455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:47:40.390461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:47:40.390478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-07T17:47:40.390484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-07T17:47:40.390499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-07T17:47:40.390506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-07T17:47:40.390508Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-01-07T17:47:40.390552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:40.390571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:47:40.390617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:47:40.390903Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:47:40.390944Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7592677801906394134:3030], serverId# [2:7592677801906394135:3031], sessionId# [0:0:0] 2026-01-07T17:47:40.391198Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:47:40.392196Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-07T17:47:40.392290Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:47:40.392319Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2026-01-07T17:47:40.393098Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-07T17:47:40.393157Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7592677806201362116:3570], serverId# [2:7592677806201362118:3572], sessionId# [0:0:0] 2026-01-07T17:47:40.392361Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:47:40.393303Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-07T17:47:40.393343Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-07T17:47:40.393386Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7592677806201362115:3569], serverId# [3:7592677803816906687:2344], sessionId# [0:0:0] 2026-01-07T17:47:40.393403Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-07T17:47:40.393422Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-07T17:47:40.393444Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7592677801906394152:3045], serverId# [3:7592677799521939191:2187], sessionId# [0:0:0] 2026-01-07T17:47:40.394622Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-07T17:47:40.393510Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:47:40.393578Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:47:40.394563Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-01-07T17:47:40.395116Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2026-01-07T17:47:40.395137Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-07T17:47:40.395138Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2026-01-07T17:47:40.395185Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-07T17:47:40.396247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-07T17:47:40.396268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-07T17:47:40.396324Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:40.396102Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:47:40.396162Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:47:40.397589Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:47:40.398697Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-07T17:47:40.399961Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-07T17:47:40.400023Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock2 >> TObjectStorageListingTest::Split >> TLocksTest::NoLocksSet >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> TFlatTest::SplitEmptyAndWrite >> TLocksTest::Range_CorrectNullDot >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize >> TLocksTest::Range_IncorrectNullDot1 >> TFlatTest::LargeDatashardReplyDistributed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2026-01-07T17:47:33.612123Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677774150196238:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.613066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.124555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.124649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.246586Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.249566Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677774150196107:2081] 1767808053593299 != 1767808053593302 2026-01-07T17:47:34.249649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.280353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.433427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.635602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.668150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.681434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.690177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:34.830735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.099375Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.018s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:35.100422Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.014s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:35.167595Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.030s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:35.193515Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.039s,wait=0.005s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-07T17:47:35.383943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:47:35.384301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-07T17:47:35.384866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-07T17:47:35.385126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-07T17:47:35.385169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:35.385189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:47:35.385228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-07T17:47:35.385240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-07T17:47:35.387343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-07T17:47:35.387493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:47:35.389361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-07T17:47:35.389421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-07T17:47:35.390098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-07T17:47:35.390337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-07T17:47:35.390557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:47:35.390569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:47:35.390682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-07T17:47:35.390750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:47:35.390780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592677778445163940:2251], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2026-01-07T17:47:35.390804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592677778445163940:2251], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2026-01-07T17:47:35.390841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-07T17:47:35.390894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-07T17:47:35.391268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:35.391367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:35.392869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-07T17:47:35.393520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2026-01-07T17:47:35.393611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2026-01-07T17:47:35.393657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2026-01-07T17:47:35.393687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2026-01-07T17:47:35.395456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-07T17:47:35.395608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-07T17:47:35.395629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 720575940466 ... t reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-07T17:47:42.424140Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:47:42.424268Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-07T17:47:42.424390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 41] was 1 2026-01-07T17:47:42.424500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-07T17:47:42.424607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:47:42.424705Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-07T17:47:42.424806Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677812356511158 RawX2: 4503612512274730 } TabletId: 72075186224037891 State: 4 2026-01-07T17:47:42.424830Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:42.425002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:42.425021Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 41], at schemeshard: 72057594046644480 2026-01-07T17:47:42.425064Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-07T17:47:42.425083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:47:42.425097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-07T17:47:42.425424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-07T17:47:42.425438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-07T17:47:42.425943Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-07T17:47:42.425953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-07T17:47:42.425973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-07T17:47:42.425979Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-07T17:47:42.425993Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-07T17:47:42.426021Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:47:42.426037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:47:42.426073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:42.426094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:42.426228Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:42.426706Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-07T17:47:42.426741Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7592677808061543670:2592], serverId# [3:7592677808061543671:2593], sessionId# [0:0:0] 2026-01-07T17:47:42.426755Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:47:42.426772Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7592677808061543660:2585], serverId# [3:7592677808061543661:2586], sessionId# [0:0:0] 2026-01-07T17:47:42.426784Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037893 reason = ReasonStop 2026-01-07T17:47:42.426798Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037893, clientId# [3:7592677812356511481:2947], serverId# [3:7592677812356511482:2948], sessionId# [0:0:0] 2026-01-07T17:47:42.426808Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:47:42.426823Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-07T17:47:42.426976Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2026-01-07T17:47:42.427171Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:47:42.427229Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:47:42.427238Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2026-01-07T17:47:42.427257Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2026-01-07T17:47:42.427408Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-07T17:47:42.428620Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037893 2026-01-07T17:47:42.428656Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037893 2026-01-07T17:47:42.429323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:47:42.429477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-07T17:47:42.429583Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:42.429592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-07T17:47:42.429621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:47:42.430021Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:47:42.430062Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-07T17:47:42.430733Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:47:42.430758Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:47:42.430784Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:42.431429Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:47:42.431558Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:47:42.432487Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037893 from 72075186224037891 is reset 2026-01-07T17:47:42.432531Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-07T17:47:42.432565Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:7592677812356511229:2767], serverId# [3:7592677812356511233:2771], sessionId# [0:0:0] 2026-01-07T17:47:42.432834Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2026-01-07T17:47:42.433236Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-07T17:47:42.433276Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-07T17:47:42.585188Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:42.717778Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2026-01-07T17:47:42.721548Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2026-01-07T17:47:42.722019Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-01-07T17:47:42.722549Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-01-07T17:47:42.723043Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-01-07T17:47:42.723612Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameKeyLock >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 >> TFlatTest::SelectBigRangePerf >> TFlatTest::SplitInvalidPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 22850, msgbus: 7931 2026-01-07T17:44:50.070861Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677074375848282:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.070922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.104238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:50.328171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.330060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.338771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.364430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.402656Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.434728Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /dc-1 Strong=0 2026-01-07T17:44:50.438476Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /dc-1 Strong=0 2026-01-07T17:44:50.525893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.525915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.525923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.526015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.610518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.743112Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677074375848451:2136] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.743179Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677074375849267:2764] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.743641Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677074375849267:2764] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.774209Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677074375849267:2764] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:50.785422Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677074375849267:2764] Handle TEvDescribeSchemeResult Forward to# [1:7592677074375849266:2763] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:50.787388Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677074375848451:2136] Handle TEvProposeTransaction 2026-01-07T17:44:50.787412Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677074375848451:2136] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:44:50.787486Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677074375848451:2136] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677074375849273:2770] 2026-01-07T17:44:50.928260Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677074375849273:2770] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:50.928340Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677074375849273:2770] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:50.928363Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677074375849273:2770] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:50.928415Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677074375849273:2770] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:50.928657Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677074375849273:2770] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.928718Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677074375849273:2770] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:50.928776Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677074375849273:2770] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-07T17:44:50.928876Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677074375849273:2770] txid# 281474976710657 HANDLE EvClientConnected 2026-01-07T17:44:50.929696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:50.931908Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677074375849273:2770] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-07T17:44:50.931969Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677074375849273:2770] txid# 281474976710657 SEND to# [1:7592677074375849272:2769] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-07T17:44:50.946761Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677074375848451:2136] Handle TEvProposeTransaction 2026-01-07T17:44:50.946789Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677074375848451:2136] TxId# 281474976710658 ProcessProposeTransaction 2026-01-07T17:44:50.946849Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677074375848451:2136] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7592677074375849309:2803] 2026-01-07T17:44:50.949178Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677074375849309:2803] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:50.949228Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677074375849309:2803] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:50.949240Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677074375849309:2803] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:50.949284Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677074375849309:2803] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:50.949503Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677074375849309:2803] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.949591Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677074375849309:2803] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:50.949627Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677074375849309:2803] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-07T17:44:50.949771Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677074375849309:2803] txid# 281474976710658 HANDLE EvClientConnected 2026-01-07T17:44:50.950252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:50.953401Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677074375849309:2803] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-07T17:44:50.953451Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677074375849309:2803] txid# 281474976710658 SEND to# [1:7592677 ... 336] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2026-01-07T17:47:44.564548Z node 59 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7592677824577176071:2336], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:47:44.654623Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677807397305654:2114] Handle TEvProposeTransaction 2026-01-07T17:47:44.654648Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677807397305654:2114] TxId# 281474976710661 ProcessProposeTransaction 2026-01-07T17:47:44.654688Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677807397305654:2114] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7592677824577176142:2946] 2026-01-07T17:47:44.658156Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677824577176142:2946] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\317\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2026-01-07T17:47:44.658210Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677824577176142:2946] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:47:44.658235Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677824577176142:2946] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2026-01-07T17:47:44.658856Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [59:7592677824577176142:2946] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:47:44.658951Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677824577176142:2946] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:44.659211Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677824577176142:2946] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:44.659291Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677824577176142:2946] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:44.659413Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677824577176142:2946] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-07T17:47:44.659563Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677824577176142:2946] txid# 281474976710661 HANDLE EvClientConnected 2026-01-07T17:47:44.662940Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677824577176142:2946] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2026-01-07T17:47:44.663072Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677824577176142:2946] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:47:44.663102Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677824577176142:2946] txid# 281474976710661 SEND to# [59:7592677824577176071:2336] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-07T17:47:44.688911Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677807397305654:2114] Handle TEvProposeTransaction 2026-01-07T17:47:44.688959Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677807397305654:2114] TxId# 281474976710662 ProcessProposeTransaction 2026-01-07T17:47:44.689020Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677807397305654:2114] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7592677824577176165:2957] 2026-01-07T17:47:44.692037Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677824577176165:2957] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:43010" 2026-01-07T17:47:44.692110Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677824577176165:2957] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:47:44.692135Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677824577176165:2957] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:44.692192Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677824577176165:2957] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:44.692534Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677824577176165:2957] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:44.692632Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677824577176165:2957] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:44.726996Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677824577176165:2957] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-07T17:47:44.727220Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677824577176165:2957] txid# 281474976710662 HANDLE EvClientConnected 2026-01-07T17:47:44.730530Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677824577176165:2957] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-07T17:47:44.730586Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677824577176165:2957] txid# 281474976710662 SEND to# [59:7592677824577176164:2328] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-07T17:47:44.801656Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677807397305654:2114] Handle TEvProposeTransaction 2026-01-07T17:47:44.801698Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677807397305654:2114] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:47:44.801762Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677807397305654:2114] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7592677824577176198:2972] 2026-01-07T17:47:44.805087Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677824577176198:2972] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:43042" 2026-01-07T17:47:44.805174Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677824577176198:2972] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:47:44.805201Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677824577176198:2972] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2026-01-07T17:47:44.805253Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677824577176198:2972] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:44.805669Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677824577176198:2972] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:44.805729Z node 59 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [59:7592677824577176198:2972] txid# 281474976710663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2026-01-07T17:47:44.805823Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677824577176198:2972] txid# 281474976710663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2026-01-07T17:47:44.805862Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677824577176198:2972] txid# 281474976710663 SEND to# [59:7592677824577176197:2345] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:47:44.808319Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=NmE2NmQ0N2MtYzdmOGNkZTEtN2MyMmU5Mi1lOWJiNWEwMA==, ActorId: [59:7592677824577176183:2345], ActorState: ExecuteState, LegacyTraceId: 01kecs4q8b89ncsyqqk0aw6t5d, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-07T17:47:44.808701Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7592677807397305654:2114] Handle TEvExecuteKqpTransaction 2026-01-07T17:47:44.808734Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7592677807397305654:2114] TxId# 281474976710664 ProcessProposeKqpTransaction 2026-01-07T17:47:45.116481Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7592677807397305584:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.116557Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> TCancelTx::ImmediateReadOnly [GOOD] >> TLocksFatTest::LocksLimit [GOOD] >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2026-01-07T17:47:41.231370Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677810579633694:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:41.232047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:41.572648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:41.572668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.572787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:41.581950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:41.700499Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:41.802098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.960720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:41.966138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:42.065497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.233425Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:44.732711Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677821448033926:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:44.732903Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:44.760014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:44.844125Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:44.845227Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677821448033779:2081] 1767808064713418 != 1767808064713421 2026-01-07T17:47:44.879607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:44.879675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:44.881768Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:44.976486Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.057685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:45.081182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2026-01-07T17:47:33.602152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677774678907050:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.602301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.025103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.025179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.029374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.053743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.211833Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.278957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.465776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.473219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.534934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.640589Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:35.131503Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7592677783268842571:2586] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-07T17:47:35.131580Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592677783268842571:2586] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:35.143424Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7592677783268842584:2596] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-07T17:47:35.143504Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592677783268842584:2596] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:35.158814Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7592677783268842597:2606] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-07T17:47:35.158871Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592677783268842597:2606] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:35.185166Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7592677783268842623:2626] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-07T17:47:35.185249Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592677783268842623:2626] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:35.202625Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7592677783268842636:2636] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-07T17:47:35.202684Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592677783268842636:2636] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:35.211211Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7592677783268842649:2646] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-07T17:47:35.211279Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592677783268842649:2646] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:37.507563Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677792507000453:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.549512Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.633096Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.694148Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.699406Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677792507000428:2081] 1767808057500001 != 1767808057500004 2026-01-07T17:47:37.708220Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.708293Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.733246Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.829075Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.974215Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.985313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.994964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.455239Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677811157286797:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:41.455291Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:41.477169Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.557707Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:41.561097Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677811157286769:2081] 1767808061454179 != 1767808061454182 2026-01-07T17:47:41.578299Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:41.578385Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:41.584159Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:41.665063Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.808089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:41.815116Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:41.818313Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.154531Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7592677815452255011:2580] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-07T17:47:42.154623Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7592677815452255011:2580] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:42.170519Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7592677815452255027:2593] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-07T17:47:42.170591Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7592677815452255027:2593] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:42.182398Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7592677815452255041:2604] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-07T17:47:42.182469Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7592677815452255041:2604] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:42.212182Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7592677815452255067:2624] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-07T17:47:42.212249Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7592677815452255067:2624] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:42.228540Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7592677815452255080:2634] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-07T17:47:42.228625Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7592677815452255080:2634] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:42.242074Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7592677815452255094:2645] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-07T17:47:42.242143Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7592677815452255094:2645] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:45.229566Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677828513976704:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.229616Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:45.239877Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:45.337687Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:45.341349Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677828513976677:2081] 1767808065228326 != 1767808065228329 2026-01-07T17:47:45.370236Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:45.370320Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:45.378566Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.379107Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:45.596737Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:45.604408Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.889984Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2026-01-07T17:47:45.891733Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7592677828513977622:2579] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-07T17:47:45.905616Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2026-01-07T17:47:45.905850Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7592677828513977636:2587] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2026-01-07T17:47:33.602571Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677777146390457:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.602626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.983729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:33.983807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.031781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.057147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.081063Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677777146390418:2081] 1767808053592241 != 1767808053592244 2026-01-07T17:47:34.093481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.255534Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.419168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.461100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:34.604557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.611459Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.791541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.852580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.605886Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677777146390457:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:38.605987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:47:42.006204Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677807870919675:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:42.008354Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:42.047776Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:42.127993Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:42.133314Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677807870919619:2081] 1767808061989917 != 1767808061989920 2026-01-07T17:47:42.162886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:42.162969Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:42.166610Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:42.258519Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:42.361981Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:42.368236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:42.382138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.438619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.483292Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.366144Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677827237885264:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.366218Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:45.391833Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.470269Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:45.503047Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:45.520818Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:45.528531Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:45.645391Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.705846Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:45.725097Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.795241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.844522Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [FAIL] >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> DataShardWrite::ReplaceImmediate |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 3224, msgbus: 12114 2026-01-07T17:44:50.152573Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677073555720284:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.155004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.447539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.481257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.481343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.532852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.580201Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.625550Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.676822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.676844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.676856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.676969Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.891252Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677073555720446:2142] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.891345Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677073555721243:2751] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.891766Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677073555721243:2751] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.928489Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677073555721243:2751] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:50.934400Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677073555721243:2751] Handle TEvDescribeSchemeResult Forward to# [1:7592677073555721242:2750] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:50.936397Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073555720446:2142] Handle TEvProposeTransaction 2026-01-07T17:44:50.936432Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073555720446:2142] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:44:50.936495Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677073555720446:2142] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677073555721248:2756] 2026-01-07T17:44:51.050207Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677073555721248:2756] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.050286Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677073555721248:2756] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.050303Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677073555721248:2756] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.050359Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677073555721248:2756] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.050799Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677073555721248:2756] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.050892Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677073555721248:2756] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:51.050988Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677073555721248:2756] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-07T17:44:51.051126Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677073555721248:2756] txid# 281474976710657 HANDLE EvClientConnected 2026-01-07T17:44:51.051716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:51.053587Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677073555721248:2756] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-07T17:44:51.053689Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677073555721248:2756] txid# 281474976710657 SEND to# [1:7592677073555721247:2755] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-07T17:44:51.064094Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073555720446:2142] Handle TEvProposeTransaction 2026-01-07T17:44:51.064119Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073555720446:2142] TxId# 281474976710658 ProcessProposeTransaction 2026-01-07T17:44:51.064147Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677073555720446:2142] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7592677077850688582:2791] 2026-01-07T17:44:51.066227Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677077850688582:2791] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.066277Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677077850688582:2791] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.066291Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677077850688582:2791] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.066353Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677077850688582:2791] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.066600Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677077850688582:2791] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.066721Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677077850688582:2791] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:51.066797Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677077850688582:2791] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-07T17:44:51.066928Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677077850688582:2791] txid# 281474976710658 HANDLE EvClientConnected 2026-01-07T17:44:51.067382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:51.071137Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677077850688582:2791] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-07T17:44:51.071188Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677077850688582:2791] txid# 281474976710658 SEND to# [1:7592677077850688581:2790] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-07T17:44:51.103606Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073555720446:2142] Handle TEvProposeTransaction 2026-01-07T17:44:51.103634Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073555720446:2142] TxId# 281474976710659 ProcessProposeTransaction 2026-01-07T17:44:51.103674Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1 ... orkload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:47:48.018551Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677839228231120:2898] txid# 281474976710660 SEND to# [59:7592677834933263752:2334] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2026-01-07T17:47:48.041409Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677817753393516:2116] Handle TEvProposeTransaction 2026-01-07T17:47:48.041454Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677817753393516:2116] TxId# 281474976710661 ProcessProposeTransaction 2026-01-07T17:47:48.041506Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677817753393516:2116] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7592677839228231144:2910] 2026-01-07T17:47:48.044499Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677839228231144:2910] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:55666" 2026-01-07T17:47:48.044581Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677839228231144:2910] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:48.044604Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677839228231144:2910] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:48.044658Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677839228231144:2910] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:48.045121Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677839228231144:2910] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:48.045220Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677839228231144:2910] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:48.091603Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677839228231144:2910] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-07T17:47:48.091786Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677839228231144:2910] txid# 281474976710661 HANDLE EvClientConnected 2026-01-07T17:47:48.095071Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677839228231144:2910] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2026-01-07T17:47:48.095150Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677839228231144:2910] txid# 281474976710661 SEND to# [59:7592677839228231143:2327] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-07T17:47:48.188062Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677817753393516:2116] Handle TEvProposeTransaction 2026-01-07T17:47:48.188088Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677817753393516:2116] TxId# 281474976710662 ProcessProposeTransaction 2026-01-07T17:47:48.188131Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677817753393516:2116] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7592677839228231168:2928] 2026-01-07T17:47:48.191388Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677839228231168:2928] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43646" 2026-01-07T17:47:48.191466Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677839228231168:2928] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:48.191487Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677839228231168:2928] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:48.191540Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677839228231168:2928] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:48.191899Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677839228231168:2928] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:48.192024Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677839228231168:2928] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:48.192091Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677839228231168:2928] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-07T17:47:48.192614Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677839228231168:2928] txid# 281474976710662 HANDLE EvClientConnected 2026-01-07T17:47:48.193339Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:48.195747Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677839228231168:2928] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-07T17:47:48.195799Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677839228231168:2928] txid# 281474976710662 SEND to# [59:7592677839228231167:2341] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-07T17:47:48.273146Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677817753393516:2116] Handle TEvProposeTransaction 2026-01-07T17:47:48.273179Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677817753393516:2116] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:47:48.273223Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677817753393516:2116] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7592677839228231255:2998] 2026-01-07T17:47:48.276050Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677839228231255:2998] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI2OCwiaWF0IjoxNzY3ODA4MDY4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.FZrLVPUymBYVBqqcp34I639rzWUSmLo5UxgPbZeIGo5PPEso_Pvjbs3REf__sIPJL0TqlCyTlNtE3FrEy-jshY35K7TH-GfoGI80AqhWOSDODNcEvTile_9BrVi6y92AqtBtIZrtdmdXHFplnHVGXKk2_ueseahkUp7L240EgNqfXQODADE6M3i-KAYj3XhtIgqMUIByfugdT6ASyZl8c0Xh7VGKseIFd7i9mI_OhRUaUs8aa9csQm_VN5wTsWw_JHvKfrXTsZsHkdxWIZhyKOWTPvm_Uz9WjCYK43-dXlaAYW8WS-MZUbHq8PG0eYCNTOgDYkLF_sqMUt6WVJgaEA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI2OCwiaWF0IjoxNzY3ODA4MDY4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43680" 2026-01-07T17:47:48.276127Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677839228231255:2998] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:48.276148Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677839228231255:2998] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-01-07T17:47:48.276301Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677839228231255:2998] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:48.276345Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677839228231255:2998] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:48.276388Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677839228231255:2998] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:48.276664Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677839228231255:2998] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:48.276697Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7592677839228231255:2998] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2026-01-07T17:47:48.276785Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677839228231255:2998] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-01-07T17:47:48.276820Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677839228231255:2998] txid# 281474976710663 SEND to# [59:7592677839228231254:2346] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:47:48.277410Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=ZDNhNDU5NDgtOTE2M2FmNmItNWExNzZkYi1lNjM5YTI4YQ==, ActorId: [59:7592677839228231240:2346], ActorState: ExecuteState, LegacyTraceId: 01kecs4tmt5r5gev23z53aqjxr, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-07T17:47:48.277635Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7592677817753393516:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:47:48.277657Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7592677817753393516:2116] TxId# 281474976710664 ProcessProposeKqpTransaction 2026-01-07T17:47:48.420533Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7592677817753393326:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:48.420615Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |93.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> DataShardWrite::UpsertImmediate >> DataShardWrite::AsyncIndexKeySizeConstraint >> TFlatTest::LargeDatashardReply [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 8838, msgbus: 1455 2026-01-07T17:44:50.039479Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677076324443223:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.040016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.075551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:50.348005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.348115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.385339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.455273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.459663Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.540724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.540755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.540770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.540866Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.731423Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677076324443464:2123] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.731500Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677076324444300:2769] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.731989Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677076324444300:2769] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.780881Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677076324444300:2769] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:50.792679Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677076324444300:2769] Handle TEvDescribeSchemeResult Forward to# [1:7592677076324444299:2768] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:50.794504Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677076324443464:2123] Handle TEvProposeTransaction 2026-01-07T17:44:50.794540Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677076324443464:2123] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:44:50.794616Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677076324443464:2123] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592677076324444305:2774] 2026-01-07T17:44:50.891549Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677076324444305:2774] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:50.891654Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677076324444305:2774] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:50.891681Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677076324444305:2774] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:50.891735Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677076324444305:2774] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:50.892093Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677076324444305:2774] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.892250Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677076324444305:2774] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:50.892342Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677076324444305:2774] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:44:50.892499Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677076324444305:2774] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:44:50.893140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:50.895538Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677076324444305:2774] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:44:50.895624Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677076324444305:2774] txid# 281474976715657 SEND to# [1:7592677076324444304:2773] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:44:50.913786Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677076324443464:2123] Handle TEvProposeTransaction 2026-01-07T17:44:50.913812Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677076324443464:2123] TxId# 281474976715658 ProcessProposeTransaction 2026-01-07T17:44:50.913847Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677076324443464:2123] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7592677076324444341:2807] 2026-01-07T17:44:50.916776Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677076324444341:2807] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:50.916840Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677076324444341:2807] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:50.916886Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677076324444341:2807] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:50.916967Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677076324444341:2807] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:50.917422Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677076324444341:2807] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.917560Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677076324444341:2807] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:50.917643Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677076324444341:2807] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-07T17:44:50.917817Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677076324444341:2807] txid# 281474976715658 HANDLE EvClientConnected 2026-01-07T17:44:50.918310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:50.921092Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677076324444341:2807] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-07T17:44:50.921139Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677076324444341:2807] txid# 281474976715658 SEND to# [1:7592677076324444340:2806] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-07T17:44:50.948544Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677076324443464:2123] Handle TEvProposeTransaction 2026-01-07T17:44:50.948583Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677076324443464:2123] TxId# 281474976715659 ProcessProposeTransaction 2026-01-07T17:44:50.948627Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677076324443464:2 ... BUG: schemereq.cpp:1822: Actor# [59:7592677839167718807:2980] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2026-01-07T17:47:48.769092Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677839167718807:2980] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:48.769109Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677839167718807:2980] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2026-01-07T17:47:48.769290Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677839167718807:2980] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:48.769321Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677839167718807:2980] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:48.770043Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [59:7592677839167718807:2980] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:47:48.770143Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677839167718807:2980] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:48.770281Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677839167718807:2980] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:48.770352Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677839167718807:2980] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:48.770461Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677839167718807:2980] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-07T17:47:48.770574Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677839167718807:2980] txid# 281474976710661 HANDLE EvClientConnected 2026-01-07T17:47:48.774202Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677839167718807:2980] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2026-01-07T17:47:48.774307Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677839167718807:2980] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:47:48.774334Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677839167718807:2980] txid# 281474976710661 SEND to# [59:7592677839167718736:2336] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-07T17:47:48.791664Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677821987848346:2116] Handle TEvProposeTransaction 2026-01-07T17:47:48.791697Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677821987848346:2116] TxId# 281474976710662 ProcessProposeTransaction 2026-01-07T17:47:48.791743Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677821987848346:2116] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7592677839167718831:2992] 2026-01-07T17:47:48.794197Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677839167718831:2992] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36902" 2026-01-07T17:47:48.794253Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677839167718831:2992] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:48.794268Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677839167718831:2992] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:48.794301Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677839167718831:2992] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:48.794608Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677839167718831:2992] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:48.794710Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677839167718831:2992] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:48.845950Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677839167718831:2992] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-07T17:47:48.846147Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677839167718831:2992] txid# 281474976710662 HANDLE EvClientConnected 2026-01-07T17:47:48.850339Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677839167718831:2992] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-07T17:47:48.850406Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677839167718831:2992] txid# 281474976710662 SEND to# [59:7592677839167718830:2328] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-07T17:47:48.911570Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677821987848346:2116] Handle TEvProposeTransaction 2026-01-07T17:47:48.911603Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677821987848346:2116] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:47:48.911649Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677821987848346:2116] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7592677839167718864:3007] 2026-01-07T17:47:48.914684Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677839167718864:3007] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36930" 2026-01-07T17:47:48.914771Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677839167718864:3007] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:48.914794Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677839167718864:3007] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-01-07T17:47:48.915053Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677839167718864:3007] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:48.915115Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677839167718864:3007] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:48.915170Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677839167718864:3007] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:48.915531Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677839167718864:3007] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:48.915568Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7592677839167718864:3007] txid# 281474976710663, Access denied for ordinaryuser@builtin, attempt to manage user 2026-01-07T17:47:48.915674Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677839167718864:3007] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-01-07T17:47:48.915705Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677839167718864:3007] txid# 281474976710663 SEND to# [59:7592677839167718863:2345] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:47:48.916146Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=MzFmZWNlNzItZWU4YWZmOTYtZTQ4Mzc1MjYtYWZhZjUzMWQ=, ActorId: [59:7592677839167718849:2345], ActorState: ExecuteState, LegacyTraceId: 01kecs4v8t5ht4dvdbx23zzmqj, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-07T17:47:48.916448Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7592677821987848346:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:47:48.916466Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7592677821987848346:2116] TxId# 281474976710664 ProcessProposeKqpTransaction 2026-01-07T17:47:49.218173Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7592677821987848316:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:49.218261Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2026-01-07T17:47:34.909574Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677777659568406:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:34.910545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.947822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:35.292464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:35.292558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:35.293001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.346902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.450812Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.530494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.664542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:35.670466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:35.713364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.929611Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:35.950340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.032952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.119104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.228712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.313941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.414953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.517039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.606300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.698749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.824706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.907998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.023204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.122393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.224287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.348805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.454123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.560139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.695434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.811084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.915710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.057244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.186690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.293792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.420403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.524491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself ... Result from SS at 72075186224037894 2026-01-07T17:47:47.454935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710686 ready parts: 1/1 2026-01-07T17:47:47.454951Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710686:0 2026-01-07T17:47:47.454959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710686:0 2026-01-07T17:47:47.455070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 4 2026-01-07T17:47:47.461915Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:7592677835498765038:3222], serverId# [2:7592677835498765039:3223], sessionId# [0:0:0] 2026-01-07T17:47:47.462050Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:47:47.463506Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:47:47.463555Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:47:47.468064Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037895, clientId# [2:7592677835498765048:3229], serverId# [2:7592677835498765049:3230], sessionId# [0:0:0] 2026-01-07T17:47:47.468183Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-07T17:47:47.469597Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-07T17:47:47.469647Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-07T17:47:47.472870Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:47:47.474241Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:47:47.474294Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:47:47.480742Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-07T17:47:47.485169Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-07T17:47:47.485264Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-07T17:47:47.489057Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:47:47.490677Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:47:47.490735Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:47:47.497538Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-07T17:47:47.497872Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:47.499063Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:47:47.499097Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2026-01-07T17:47:47.500368Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-07T17:47:47.500417Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-07T17:47:47.503017Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:47.503766Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:47:47.504880Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:47:47.504942Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2026-01-07T17:47:47.505507Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:47:47.505579Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:47:47.508493Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-07T17:47:47.509766Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-07T17:47:47.509816Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-07T17:47:47.512642Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:47:47.513766Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:47:47.513811Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:47:47.516770Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-07T17:47:47.517956Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-07T17:47:47.518000Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-07T17:47:47.520704Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:47:47.521990Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:47:47.522037Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:47:47.524876Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:47.525610Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:47:47.525632Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2026-01-07T17:47:47.530115Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-07T17:47:47.532209Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-07T17:47:47.532265Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-07T17:47:47.534701Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-07T17:47:47.535123Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:47:47.535147Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2026-01-07T17:47:47.538100Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:47:47.541735Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:47:47.541804Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:47:47.545296Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-07T17:47:47.546966Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-07T17:47:47.547016Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-07T17:47:47.554161Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-07T17:47:47.558356Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-07T17:47:47.558424Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-07T17:47:47.567569Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-07T17:47:47.569615Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-07T17:47:47.569694Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2026-01-07T17:47:47.573527Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2026-01-07T17:47:47.575576Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2026-01-07T17:47:47.576298Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-01-07T17:47:47.576739Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-01-07T17:47:47.577203Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-01-07T17:47:47.577891Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> TObjectStorageListingTest::SuffixColumns [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock >> TLocksTest::CK_GoodLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2026-01-07T17:47:33.629276Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677777516815868:2212];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.629355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.925035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.017325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.017443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.071784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.185201Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.215125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.440953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.451333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.555225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.631219Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.726484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.783106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.974057Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677798020495540:2201];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:38.974298Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:39.042629Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:39.170257Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:39.193116Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:39.193183Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:39.195277Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:39.216994Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:39.398049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:39.405786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:39.415921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:39.424398Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:39.484635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:39.527171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:39.980525Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:43.354416Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677819339802194:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:43.354467Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:43.405087Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:43.446506Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:43.504405Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:43.504482Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:43.510558Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:43.644457Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:43.697613Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:43.705150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:43.724889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:43.804246Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:43.864431Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.986851Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677831637220930:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:46.986901Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:47.062538Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.127501Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:47.127578Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:47.141132Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677831637220906:2081] 1767808066984301 != 1767808066984304 2026-01-07T17:47:47.154561Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:47.165424Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:47.281220Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.357428Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:47.362588Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:47.373684Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:47.378939Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.438744Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.493760Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2026-01-07T17:47:37.140115Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677791878053939:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.145227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.464249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.464325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.535555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.610803Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.620529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.678579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715682 2026-01-07T17:47:37.678630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715681 2026-01-07T17:47:37.678662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715680 2026-01-07T17:47:37.678690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715675 2026-01-07T17:47:37.678709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715679 2026-01-07T17:47:37.678742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715674 2026-01-07T17:47:37.678778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2026-01-07T17:47:37.678840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2026-01-07T17:47:37.678866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715677 2026-01-07T17:47:37.678885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715672 2026-01-07T17:47:37.678902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715676 2026-01-07T17:47:37.678909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/tables' 2026-01-07T17:47:37.678926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715671 2026-01-07T17:47:37.678935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/pg_class' 2026-01-07T17:47:37.678952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_duration_one_minute' 2026-01-07T17:47:37.678953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715670 2026-01-07T17:47:37.678966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_partitions_by_tli_one_minute' 2026-01-07T17:47:37.678971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715669 2026-01-07T17:47:37.678979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/query_sessions' 2026-01-07T17:47:37.678989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2026-01-07T17:47:37.678993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/ds_storage_pools' 2026-01-07T17:47:37.679006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715668 2026-01-07T17:47:37.679024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/auth_owners' 2026-01-07T17:47:37.679037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715663 2026-01-07T17:47:37.679041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/hive_tablets' 2026-01-07T17:47:37.679053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/compile_cache_queries' 2026-01-07T17:47:37.679070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715667 2026-01-07T17:47:37.679078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/streaming_queries' 2026-01-07T17:47:37.679096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/resource_pool_classifiers' 2026-01-07T17:47:37.679097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2026-01-07T17:47:37.679109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_read_bytes_one_minute' 2026-01-07T17:47:37.679121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 2026-01-07T17:47:37.679159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2026-01-07T17:47:37.679169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_read_bytes_one_hour' 2026-01-07T17:47:37.679186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2026-01-07T17:47:37.679195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_cpu_time_one_hour' 2026-01-07T17:47:37.679213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_partitions_one_minute' 2026-01-07T17:47:37.679231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/nodes' 2026-01-07T17:47:37.679248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/auth_group_members' 2026-01-07T17:47:37.679276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_partitions_by_tli_one_hour' 2026-01-07T17:47:37.679297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/ds_pdisks' 2026-01-07T17:47:37.679313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/ds_vslots' 2026-01-07T17:47:37.679353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/pg_tables' 2026-01-07T17:47:37.679377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7592677791878054343:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_request_units_one_hour' 2026-01-07T17:47:37.679463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:47:37.679482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_bo ... on DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7592677796173022896:2368] message: TxId: 281474976710674 2026-01-07T17:47:38.825515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2026-01-07T17:47:38.825534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710674:0 2026-01-07T17:47:38.825553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710674:0 2026-01-07T17:47:38.825616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 2 2026-01-07T17:47:38.826487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710674 datashard 72075186224037899 state PreOffline 2026-01-07T17:47:38.826526Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037899 Got TEvSchemaChangedResult from SS at 72075186224037899 2026-01-07T17:47:38.827058Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:47:38.827120Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037899 Initiating switch from PreOffline to Offline state 2026-01-07T17:47:38.829738Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:47:38.831793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677796173022467 RawX2: 4503603922340115 } TabletId: 72075186224037899 State: 4 2026-01-07T17:47:38.831882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:38.832239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:38.832273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:38.832578Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 0 TabletID: 72075186224037899 2026-01-07T17:47:38.832599Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2026-01-07T17:47:38.832669Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2026-01-07T17:47:38.832759Z node 1 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2026-01-07T17:47:38.832863Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2026-01-07T17:47:38.833647Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7592677791878053866:2101] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7592677791878054173:2152] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2026-01-07T17:47:38.833809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2026-01-07T17:47:38.834028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 1 2026-01-07T17:47:38.834209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:38.834221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 42], at schemeshard: 72057594046644480 2026-01-07T17:47:38.834263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:47:38.834635Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2026-01-07T17:47:38.834662Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2026-01-07T17:47:38.834716Z node 1 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2026-01-07T17:47:38.835041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:12 2026-01-07T17:47:38.835072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2026-01-07T17:47:38.835102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:38.835223Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2026-01-07T17:47:38.835355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2026-01-07T17:47:38.835382Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037899 reason = ReasonStop 2026-01-07T17:47:38.835665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037899, clientId# [1:7592677796173022632:2961], serverId# [1:7592677796173022633:2962], sessionId# [0:0:0] 2026-01-07T17:47:38.836004Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037899 2026-01-07T17:47:38.836079Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037899 2026-01-07T17:47:38.837551Z node 1 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2026-01-07T17:47:38.837587Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-07T17:47:38.837813Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2026-01-07T17:47:40.762083Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677804365497685:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:40.762126Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:40.789087Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:40.874119Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:40.881052Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677804365497660:2081] 1767808060761090 != 1767808060761093 2026-01-07T17:47:40.888278Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:40.888353Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:40.908446Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:41.033023Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.095178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:41.099687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:41.111665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.769831Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:45.762538Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677804365497685:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.762611Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:47:50.826959Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-07T17:47:50.838084Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976711360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-07T17:47:50.839205Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976711360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2026-01-07T17:47:50.856887Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7592677847315177252:6107] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] >> TFlatTest::SplitBoundaryRead [GOOD] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> TObjectStorageListingTest::TestFilter >> TFlatTest::SelectRangeForbidNullArgs2 >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 11052, msgbus: 30215 2026-01-07T17:44:49.913903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677071433384935:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:49.914582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.129382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.151909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.151995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.169911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.208031Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.273098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.273129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.273138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.273243Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.372277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.493235Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677071433385145:2117] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.493311Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677075728353263:2740] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.493727Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677075728353263:2740] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.524628Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677075728353263:2740] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:50.531251Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677075728353263:2740] Handle TEvDescribeSchemeResult Forward to# [1:7592677075728353262:2739] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:50.534838Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677071433385145:2117] Handle TEvProposeTransaction 2026-01-07T17:44:50.534892Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677071433385145:2117] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:44:50.534964Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677071433385145:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677075728353268:2745] 2026-01-07T17:44:50.664335Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677075728353268:2745] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:50.664445Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677075728353268:2745] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:44:50.664478Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677075728353268:2745] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:50.664526Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677075728353268:2745] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:50.664833Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677075728353268:2745] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.664952Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677075728353268:2745] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:50.665047Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677075728353268:2745] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-07T17:44:50.665250Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677075728353268:2745] txid# 281474976710657 HANDLE EvClientConnected 2026-01-07T17:44:50.665737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:50.670611Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677075728353268:2745] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-07T17:44:50.670705Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677075728353268:2745] txid# 281474976710657 SEND to# [1:7592677075728353267:2744] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-07T17:44:50.689687Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677071433385145:2117] Handle TEvProposeTransaction 2026-01-07T17:44:50.689711Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677071433385145:2117] TxId# 281474976710658 ProcessProposeTransaction 2026-01-07T17:44:50.689748Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677071433385145:2117] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7592677075728353304:2778] 2026-01-07T17:44:50.692283Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677075728353304:2778] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:50.692345Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677075728353304:2778] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:44:50.692359Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677075728353304:2778] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:50.692426Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677075728353304:2778] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:50.692724Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677075728353304:2778] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.692841Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677075728353304:2778] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:50.692899Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677075728353304:2778] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-07T17:44:50.693038Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677075728353304:2778] txid# 281474976710658 HANDLE EvClientConnected 2026-01-07T17:44:50.693455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:50.696180Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677075728353304:2778] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-07T17:44:50.696214Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677075728353304:2778] txid# 281474976710658 SEND to# [1:7592677075728353303:2777] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-07T17:44:50.923411Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:52.619030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677084318288045:2331], DatabaseId: /dc-1, PoolId: default, Failed ... quired# true 2026-01-07T17:47:50.796361Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677847286663032:2938] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-07T17:47:50.796548Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677847286663032:2938] txid# 281474976710661 HANDLE EvClientConnected 2026-01-07T17:47:50.801956Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677847286663032:2938] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2026-01-07T17:47:50.802023Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677847286663032:2938] txid# 281474976710661 SEND to# [59:7592677847286663031:2327] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-07T17:47:50.845439Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677830106792500:2111] Handle TEvProposeTransaction 2026-01-07T17:47:50.845484Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677830106792500:2111] TxId# 281474976710662 ProcessProposeTransaction 2026-01-07T17:47:50.845533Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677830106792500:2111] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7592677847286663056:2956] 2026-01-07T17:47:50.848464Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677847286663056:2956] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42600" 2026-01-07T17:47:50.848543Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677847286663056:2956] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:50.848564Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677847286663056:2956] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:50.848617Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677847286663056:2956] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:50.849086Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677847286663056:2956] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:50.849237Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677847286663056:2956] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:50.849312Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677847286663056:2956] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-07T17:47:50.849501Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677847286663056:2956] txid# 281474976710662 HANDLE EvClientConnected 2026-01-07T17:47:50.850231Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:50.855153Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677847286663056:2956] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-07T17:47:50.855222Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677847286663056:2956] txid# 281474976710662 SEND to# [59:7592677847286663055:2341] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-07T17:47:50.913146Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677830106792500:2111] Handle TEvProposeTransaction 2026-01-07T17:47:50.913178Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677830106792500:2111] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:47:50.913225Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677830106792500:2111] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7592677847286663141:3027] 2026-01-07T17:47:50.917785Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677847286663141:3027] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:42994" 2026-01-07T17:47:50.917870Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677847286663141:3027] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:50.917894Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677847286663141:3027] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:50.917949Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677847286663141:3027] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:50.918330Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677847286663141:3027] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:50.918414Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677847286663141:3027] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:50.964811Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677847286663141:3027] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2026-01-07T17:47:50.965016Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677847286663141:3027] txid# 281474976710663 HANDLE EvClientConnected 2026-01-07T17:47:50.968816Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677847286663141:3027] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2026-01-07T17:47:50.968873Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677847286663141:3027] txid# 281474976710663 SEND to# [59:7592677847286663140:2343] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2026-01-07T17:47:51.043681Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677830106792500:2111] Handle TEvProposeTransaction 2026-01-07T17:47:51.043705Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677830106792500:2111] TxId# 281474976710664 ProcessProposeTransaction 2026-01-07T17:47:51.043740Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677830106792500:2111] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7592677851581630469:3044] 2026-01-07T17:47:51.046018Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677851581630469:3044] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI3MCwiaWF0IjoxNzY3ODA4MDcwLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.f4_rluLz5VI2kPXjA1puYquXBpJSJcn7jskhdaXvD-lu-Ofh7lqVM9IfSFsPHDuuDa4Fob-5EvRqeG4t-l1he3DH3JYm9p7cTCiLIFYK70Dlvnw1VovEjbdQTIN3XUQbqHv1zLmrD5js_ZrCvzYHXR99fqG8WdjTfvh0RBvbrg-3il0CaxinjV3orUI_VlWa2kY5m6emkmjA2BfKsrDjXBE4o5VdLeKf6h8Nz_SzjCQGqB68coDYExW0XKhvlfA1yEspzIf-VmVQvul5rKzJZWwyVeSuoFI6ylz8dJ-tcVUcFuDPdxeLdEsCU5hQKxYvhKA3NvM7zKn8NGtrg21S1w\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI3MCwiaWF0IjoxNzY3ODA4MDcwLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42646" 2026-01-07T17:47:51.046097Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677851581630469:3044] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:51.046117Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677851581630469:3044] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-01-07T17:47:51.046283Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677851581630469:3044] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:51.046344Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677851581630469:3044] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:51.046394Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677851581630469:3044] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:51.046669Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677851581630469:3044] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:51.046700Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7592677851581630469:3044] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2026-01-07T17:47:51.046785Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677851581630469:3044] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-01-07T17:47:51.046817Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677851581630469:3044] txid# 281474976710664 SEND to# [59:7592677851581630468:2354] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:47:51.047194Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=YzY5NThhYjgtOTIyMmM5NGQtZDVjZmFiY2UtNjQ4ZjU3YTQ=, ActorId: [59:7592677851581630459:2354], ActorState: ExecuteState, LegacyTraceId: 01kecs4xbh2h8aq7q43mdyz0cp, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-07T17:47:51.047457Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7592677830106792500:2111] Handle TEvExecuteKqpTransaction 2026-01-07T17:47:51.047471Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7592677830106792500:2111] TxId# 281474976710665 ProcessProposeKqpTransaction |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 14744, msgbus: 23847 2026-01-07T17:44:50.282938Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.005978s 2026-01-07T17:44:50.338877Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677073396698891:2243];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.338943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.624669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.656128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.656272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.694336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.729205Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.825417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.825436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.825441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.825553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.904589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.997392Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677073396698972:2143] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.997463Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677073396699746:2731] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.997882Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677073396699746:2731] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.030261Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677073396699746:2731] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:51.036314Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677073396699746:2731] Handle TEvDescribeSchemeResult Forward to# [1:7592677073396699745:2730] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:51.037798Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073396698972:2143] Handle TEvProposeTransaction 2026-01-07T17:44:51.037828Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073396698972:2143] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:44:51.037906Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677073396698972:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592677077691667048:2737] 2026-01-07T17:44:51.158133Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677077691667048:2737] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.158223Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677077691667048:2737] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.158252Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677077691667048:2737] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.158314Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677077691667048:2737] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.158624Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677077691667048:2737] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.158725Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677077691667048:2737] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:51.158819Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677077691667048:2737] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:44:51.158999Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677077691667048:2737] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:44:51.159575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:51.161590Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677077691667048:2737] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:44:51.161690Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677077691667048:2737] txid# 281474976715657 SEND to# [1:7592677077691667047:2736] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:44:51.171343Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073396698972:2143] Handle TEvProposeTransaction 2026-01-07T17:44:51.171371Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073396698972:2143] TxId# 281474976715658 ProcessProposeTransaction 2026-01-07T17:44:51.171401Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677073396698972:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7592677077691667084:2770] 2026-01-07T17:44:51.174105Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677077691667084:2770] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.174156Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677077691667084:2770] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.174180Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677077691667084:2770] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.174233Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677077691667084:2770] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.174547Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677077691667084:2770] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.174665Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677077691667084:2770] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:51.174753Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677077691667084:2770] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-07T17:44:51.174898Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677077691667084:2770] txid# 281474976715658 HANDLE EvClientConnected 2026-01-07T17:44:51.175431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:51.177931Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677077691667084:2770] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-07T17:44:51.177968Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677077691667084:2770] txid# 281474976715658 SEND to# [1:7592677077691667083:2769] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-07T17:44:51.199494Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073396698972:2143] Handle TEvProposeTransaction 2026-01-07T17:44:51.199529Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073396698972:2143] ... orkload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:47:51.519849Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677851999210853:2918] txid# 281474976710660 SEND to# [59:7592677851999210776:2335] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2026-01-07T17:47:51.545491Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677830524373133:2116] Handle TEvProposeTransaction 2026-01-07T17:47:51.545533Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677830524373133:2116] TxId# 281474976710661 ProcessProposeTransaction 2026-01-07T17:47:51.545601Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677830524373133:2116] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7592677851999210877:2930] 2026-01-07T17:47:51.548547Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677851999210877:2930] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:59420" 2026-01-07T17:47:51.548629Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677851999210877:2930] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:51.548668Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677851999210877:2930] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:51.548732Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677851999210877:2930] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:51.549189Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677851999210877:2930] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:51.549277Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677851999210877:2930] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:51.594805Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677851999210877:2930] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-07T17:47:51.594992Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677851999210877:2930] txid# 281474976710661 HANDLE EvClientConnected 2026-01-07T17:47:51.598627Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677851999210877:2930] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2026-01-07T17:47:51.598694Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677851999210877:2930] txid# 281474976710661 SEND to# [59:7592677851999210876:2326] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-07T17:47:51.666991Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677830524373133:2116] Handle TEvProposeTransaction 2026-01-07T17:47:51.667029Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677830524373133:2116] TxId# 281474976710662 ProcessProposeTransaction 2026-01-07T17:47:51.667096Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677830524373133:2116] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7592677851999210901:2948] 2026-01-07T17:47:51.670101Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677851999210901:2948] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:59426" 2026-01-07T17:47:51.670180Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677851999210901:2948] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:51.670204Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677851999210901:2948] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:51.670263Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677851999210901:2948] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:51.670765Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677851999210901:2948] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:51.670962Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677851999210901:2948] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:51.671075Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677851999210901:2948] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-07T17:47:51.671254Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677851999210901:2948] txid# 281474976710662 HANDLE EvClientConnected 2026-01-07T17:47:51.672069Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:51.677685Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677851999210901:2948] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-07T17:47:51.677754Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677851999210901:2948] txid# 281474976710662 SEND to# [59:7592677851999210900:2341] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-07T17:47:51.734443Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7592677830524373057:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:51.734507Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:47:51.749278Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677830524373133:2116] Handle TEvProposeTransaction 2026-01-07T17:47:51.749322Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677830524373133:2116] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:47:51.749375Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677830524373133:2116] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7592677851999210988:3017] 2026-01-07T17:47:51.752543Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677851999210988:3017] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI3MSwiaWF0IjoxNzY3ODA4MDcxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.T5vCwyQXBub-AP9Ox-jpoXnBg-cJNI2vRSeF4WWQCcCnpND3DO1HLSlWmj042eaQQsfcg1nw2idbxlAfsMVfSLcSmXTphPlMcRJO3GGFOXlaoY91MY3Ia46Ajeid8mVdqPWagTw-6KTX4uU9plcMVCE98t2ZDZDYER9oGkeobIpD2GIZkN_bqrg4cTn0EJgF5nNuU-2-pA9xR6xQ3VsScdVorRQidaTu1WM4pkCY7k8RUHRrDDTG_R5C6bum1X5XZg7A5rCOlnkGJnqU1qCSHfUXjvd3jWRpHH3C0OV_cRkIb_eyGVsSplJ6zfnfOYkSIRJU_EGLlW2QqNxCt0ymxA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI3MSwiaWF0IjoxNzY3ODA4MDcxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:59720" 2026-01-07T17:47:51.752629Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677851999210988:3017] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:51.752651Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677851999210988:3017] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-01-07T17:47:51.752830Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677851999210988:3017] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:51.752874Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677851999210988:3017] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:51.753193Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677851999210988:3017] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:51.753608Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677851999210988:3017] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:51.753641Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7592677851999210988:3017] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2026-01-07T17:47:51.753754Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677851999210988:3017] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-01-07T17:47:51.753785Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677851999210988:3017] txid# 281474976710663 SEND to# [59:7592677851999210987:2346] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:47:51.755521Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=ZDI5ZGRjNTMtMTg0MWY1MWYtNTUxNTQ5ODctZDdjODY0MGE=, ActorId: [59:7592677851999210971:2346], ActorState: ExecuteState, LegacyTraceId: 01kecs4y1f4zvk14s6bn4c4b2m, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-07T17:47:51.755769Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7592677830524373133:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:47:51.755791Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7592677830524373133:2116] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2026-01-07T17:47:45.850667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677825163718004:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.851060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:46.087631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.111137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:46.111647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:46.186677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:46.249381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:46.253068Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677825163717964:2081] 1767808065848904 != 1767808065848907 2026-01-07T17:47:46.308125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:46.308153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:46.308161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:46.308233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:46.344401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.501825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:46.573414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.881247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:49.253163Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677845708371881:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:49.253331Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:49.283401Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:49.355715Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:49.361204Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677845708371857:2081] 1767808069252130 != 1767808069252133 2026-01-07T17:47:49.372708Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:49.372792Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:49.399953Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:49.509496Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:49.509516Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:49.509523Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:49.509607Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:49.553448Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:49.707301Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:49.713007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:49.723397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:49.730403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.214413Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553163, Sender [2:7592677850003340696:2471], Recipient [2:7592677845708372744:2300]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2026-01-07T17:47:50.214452Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2026-01-07T17:47:50.214695Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-07T17:47:50.214941Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2026-01-07T17:47:50.215001Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2026-01-07T17:47:50.215036Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2026-01-07T17:47:50.215066Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2026-01-07T17:47:50.215095Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2026-01-07T17:47:50.215173Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2026-01-07T17:47:50.225923Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553163, Sender [2:7592677850003340700:2472], Recipient [2:7592677845708372744:2300]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2026-01-07T17:47:50.225958Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2026-01-07T17:47:50.226117Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-07T17:47:50.226298Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2026-01-07T17:47:50.226332Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2026-01-07T17:47:50.226396Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 2026-01-07T17:47:50.290549Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TFlatTest::SplitThenMerge [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> TFlatTest::ShardUnfreezeNonFrozen ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2026-01-07T17:47:46.502255Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677830042546143:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:46.502442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:46.764278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.768992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:46.769119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:46.822604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:46.860334Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:46.861570Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677830042546005:2081] 1767808066453523 != 1767808066453526 2026-01-07T17:47:47.004571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.072572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:47.140052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.285631Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.008s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:47.289422Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.005s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:47.330871Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:47.343481Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-07T17:47:47.472840Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-07T17:47:47.477763Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.004s,wait=0.000s,interrupts=0} 2026-01-07T17:47:47.478068Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-07T17:47:47.478091Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-07T17:47:47.478738Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=Done, 4 blobs 2r (max 2), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2026-01-07T17:47:47.485501Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.32, eph 3} end=Done, 4 blobs 8r (max 8), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-01-07T17:47:47.518279Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:47.567726Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:47:47.568538Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:47:47.570787Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-07T17:47:47.571396Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-07T17:47:47.571430Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-07T17:47:49.972549Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677845919054348:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:49.972589Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:50.009204Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.080844Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:50.118613Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:50.118683Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:50.125358Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:50.216334Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.299849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:50.304590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:50.312180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:50.321808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.401357Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:50.408523Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:50.431059Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:50.435576Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-07T17:47:50.457164Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 2026-01-07T17:47:50.575389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } } } TxId: 281474976710678 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:47:50.575582Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710678:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } 2026-01-07T17:47:50.575736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:50.575756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:47:50.575926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-07T17:47:50.575953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710678:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-07T17:47:50.576110Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710678:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000g+\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000g+\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 4 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } 2026-01-07T17:47:50.576136Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710678:1, propose status:StatusAccepted, reason: , at s ... 710678, partId: 0, tablet: 72075186224037890 2026-01-07T17:47:50.589257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037891 2026-01-07T17:47:50.603255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710678, at schemeshard: 72057594046644480 2026-01-07T17:47:50.603280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 0/1, is published: true 2026-01-07T17:47:50.603298Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710678, at schemeshard: 72057594046644480 2026-01-07T17:47:50.607311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037891 2026-01-07T17:47:50.607368Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710678 TabletId: 72075186224037891 2026-01-07T17:47:50.607591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037890 2026-01-07T17:47:50.607623Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710678 TabletId: 72075186224037890 2026-01-07T17:47:50.607644Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710678:0 3 -> 131 2026-01-07T17:47:50.607817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-07T17:47:50.607893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-07T17:47:50.607955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-07T17:47:50.607977Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710678:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:47:50.608004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:353: TSplitMerge TTransferData operationId# 281474976710678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976710678:0 at tablet 72057594046644480 2026-01-07T17:47:50.608204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2026-01-07T17:47:50.608279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037888 2026-01-07T17:47:50.613314Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-07T17:47:50.613317Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-07T17:47:50.613562Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-07T17:47:50.613586Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-07T17:47:50.613742Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=Done, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-07T17:47:50.618782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2026-01-07T17:47:50.618836Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2026-01-07T17:47:50.619100Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710678:0 131 -> 132 2026-01-07T17:47:50.619194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-07T17:47:50.619525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-07T17:47:50.619672Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:47:50.619690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710678, path id: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:47:50.619893Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:47:50.619905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:7592677850214022129:2243], at schemeshard: 72057594046644480, txId: 281474976710678, path id: 39 2026-01-07T17:47:50.619935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-07T17:47:50.619955Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710678:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:47:50.619983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:468: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976710678 at tablet 72057594046644480 2026-01-07T17:47:50.620945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2026-01-07T17:47:50.622413Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710678 2026-01-07T17:47:50.622497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710678 2026-01-07T17:47:50.622507Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710678 2026-01-07T17:47:50.622526Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710678, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], version: 4 2026-01-07T17:47:50.622540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 7 2026-01-07T17:47:50.622593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 0/1, is published: true 2026-01-07T17:47:50.622787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2026-01-07T17:47:50.622812Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-07T17:47:50.622856Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710678:0 progress is 1/1 2026-01-07T17:47:50.622865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2026-01-07T17:47:50.622878Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710678:0 progress is 1/1 2026-01-07T17:47:50.622888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2026-01-07T17:47:50.622900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 1/1, is published: true 2026-01-07T17:47:50.622933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7592677850214022771:2347] message: TxId: 281474976710678 2026-01-07T17:47:50.622955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2026-01-07T17:47:50.622977Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710678:0 2026-01-07T17:47:50.622986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710678:0 2026-01-07T17:47:50.623091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-07T17:47:50.623276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710678 2026-01-07T17:47:50.623392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-07T17:47:50.623405Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710678:0 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 24769, msgbus: 16590 2026-01-07T17:44:50.163810Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677073292055097:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.166007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.212102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:50.487908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.522175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.522317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.529693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.623060Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.676652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.694637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.694656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.694662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.694756Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.910395Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677073292055309:2121] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.910489Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677073292056137:2754] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.910941Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677073292056137:2754] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.946532Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677073292056137:2754] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:50.952451Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677073292056137:2754] Handle TEvDescribeSchemeResult Forward to# [1:7592677073292056136:2753] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:50.954903Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073292055309:2121] Handle TEvProposeTransaction 2026-01-07T17:44:50.954925Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073292055309:2121] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:44:50.954981Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677073292055309:2121] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677073292056145:2762] 2026-01-07T17:44:51.080378Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677073292056145:2762] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.080465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677073292056145:2762] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.080484Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677073292056145:2762] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.080530Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677073292056145:2762] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.080830Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677073292056145:2762] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.080975Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677073292056145:2762] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:51.081072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677073292056145:2762] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-07T17:44:51.081211Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677073292056145:2762] txid# 281474976710657 HANDLE EvClientConnected 2026-01-07T17:44:51.081794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:51.083512Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677073292056145:2762] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-07T17:44:51.083584Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677073292056145:2762] txid# 281474976710657 SEND to# [1:7592677073292056144:2761] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-07T17:44:51.093599Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073292055309:2121] Handle TEvProposeTransaction 2026-01-07T17:44:51.093627Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073292055309:2121] TxId# 281474976710658 ProcessProposeTransaction 2026-01-07T17:44:51.093655Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677073292055309:2121] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7592677077587023476:2794] 2026-01-07T17:44:51.095629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677077587023476:2794] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.095684Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677077587023476:2794] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.095700Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677077587023476:2794] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.095752Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677077587023476:2794] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.096074Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677077587023476:2794] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.096166Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677077587023476:2794] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:51.096219Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677077587023476:2794] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-07T17:44:51.096355Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677077587023476:2794] txid# 281474976710658 HANDLE EvClientConnected 2026-01-07T17:44:51.096808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:51.099078Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677077587023476:2794] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-07T17:44:51.099125Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677077587023476:2794] txid# 281474976710658 SEND to# [1:7592677077587023475:2793] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-07T17:44:51.133170Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073292055309:2121] Handle TEvProposeTransaction 202 ... -01-07T17:47:52.618517Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677858724582546:2916] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2026-01-07T17:47:52.618577Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677858724582546:2916] txid# 281474976710661 SEND to# [59:7592677858724582545:2327] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-07T17:47:52.682754Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7592677837249744713:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:52.682834Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:47:52.771623Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677837249744857:2114] Handle TEvProposeTransaction 2026-01-07T17:47:52.771669Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677837249744857:2114] TxId# 281474976710662 ProcessProposeTransaction 2026-01-07T17:47:52.771721Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677837249744857:2114] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7592677858724582577:2937] 2026-01-07T17:47:52.774811Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677858724582577:2937] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53124" 2026-01-07T17:47:52.774894Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677858724582577:2937] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:52.774918Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677858724582577:2937] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:52.774978Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677858724582577:2937] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:52.775471Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677858724582577:2937] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:52.775646Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677858724582577:2937] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:52.775734Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677858724582577:2937] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-07T17:47:52.775938Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677858724582577:2937] txid# 281474976710662 HANDLE EvClientConnected 2026-01-07T17:47:52.776746Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:52.780043Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677858724582577:2937] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-07T17:47:52.780097Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677858724582577:2937] txid# 281474976710662 SEND to# [59:7592677858724582576:2342] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-07T17:47:52.833450Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677837249744857:2114] Handle TEvProposeTransaction 2026-01-07T17:47:52.833491Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677837249744857:2114] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:47:52.833548Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677837249744857:2114] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7592677858724582658:3004] 2026-01-07T17:47:52.836822Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677858724582658:3004] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53142" 2026-01-07T17:47:52.836903Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677858724582658:3004] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:52.836942Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677858724582658:3004] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:52.837003Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677858724582658:3004] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:52.837442Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677858724582658:3004] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:52.837552Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677858724582658:3004] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:52.871366Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677858724582658:3004] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2026-01-07T17:47:52.871492Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677858724582658:3004] txid# 281474976710663 HANDLE EvClientConnected 2026-01-07T17:47:52.874292Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677858724582658:3004] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2026-01-07T17:47:52.874343Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677858724582658:3004] txid# 281474976710663 SEND to# [59:7592677858724582657:2344] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2026-01-07T17:47:52.957598Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677837249744857:2114] Handle TEvProposeTransaction 2026-01-07T17:47:52.957636Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677837249744857:2114] TxId# 281474976710664 ProcessProposeTransaction 2026-01-07T17:47:52.957678Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677837249744857:2114] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7592677858724582692:3023] 2026-01-07T17:47:52.960598Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677858724582692:3023] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI3MiwiaWF0IjoxNzY3ODA4MDcyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.ikd2HGi7CketDOcx346yVMLRa1dlQdcszMdzwpz9EAxl_ex8x1yMlYojWXoX5CwSu4T00L_FVCoKxvo3A1nXZzIiinlcRLcGt_HOUMGPrZnZFD_6f9b08J-8v5JjQyNZucTHnLKgDdxi7xgYD_l96oxu1PUSOJHIO3d4awlgBkvkDYzvnG0SFndf-J1qxCd9yFDTZ6Z058rfA1gDAPa9DhP4QQ1PydR1mY82Oupg0vz47VrcW-tTMCge3DiKTpZX2a1IoIkHgbpexuJCubr6TOUVGKADZohuji9NJRiPtD_vsdSblbIughOFUTakzG2M5UUY8inLFzeCHTyDBXTifQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI3MiwiaWF0IjoxNzY3ODA4MDcyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53154" 2026-01-07T17:47:52.960684Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677858724582692:3023] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:52.960710Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677858724582692:3023] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-01-07T17:47:52.961165Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677858724582692:3023] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:52.961224Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677858724582692:3023] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:52.961270Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677858724582692:3023] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:52.961743Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677858724582692:3023] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:52.961785Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7592677858724582692:3023] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2026-01-07T17:47:52.961889Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677858724582692:3023] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-01-07T17:47:52.961925Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677858724582692:3023] txid# 281474976710664 SEND to# [59:7592677858724582691:2355] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:47:52.962317Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=OThlZmIwMjktOTJkYjQ1Y2EtNmI0NjBlNGQtZmUzOGNlMGM=, ActorId: [59:7592677858724582679:2355], ActorState: ExecuteState, LegacyTraceId: 01kecs4z77bknv30r933sdygrj, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-07T17:47:52.962562Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7592677837249744857:2114] Handle TEvExecuteKqpTransaction 2026-01-07T17:47:52.962592Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7592677837249744857:2114] TxId# 281474976710665 ProcessProposeKqpTransaction |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TLocksFatTest::PointSetNotBreak ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2026-01-07T17:47:47.883502Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677835500763858:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:47.883749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:47.921713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:48.166335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:48.166416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:48.168803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:48.169700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.220898Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:48.227379Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677835500763818:2081] 1767808067881728 != 1767808067881731 2026-01-07T17:47:48.381009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.416288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:48.430137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:48.436183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:48.458856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-07T17:47:48.501319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:48.894092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; insert finished 10165 usec 12396 usec 9346 usec 8780 usec 7852 usec 9187 usec 9770 usec 10211 usec 10306 usec 10750 usec 2026-01-07T17:47:51.152428Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677851167230903:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:51.152834Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:51.173601Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:51.262888Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:51.264493Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677851167230688:2081] 1767808071140976 != 1767808071140979 2026-01-07T17:47:51.273408Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:51.273783Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:51.273842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:51.287577Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:51.502428Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:51.509313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:51.537129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:52.149030Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2026-01-07T17:47:47.902990Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677837137461156:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:47.903473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:48.141052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.200268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:48.200361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:48.212113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:48.214523Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677837137461027:2081] 1767808067896432 != 1767808067896435 2026-01-07T17:47:48.220092Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:48.309417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.430051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:48.436946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:48.515972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), tableStr: /dc-1/Dir1, tableId: , opId: 281474976710659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } 2026-01-07T17:47:48.520512Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677841432429131:2500] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-07T17:47:51.175024Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677853262066728:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:51.175093Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:51.196210Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:51.275028Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:51.277084Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677853262066703:2081] 1767808071174357 != 1767808071174360 2026-01-07T17:47:51.316828Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:51.316916Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:51.323884Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:51.395773Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:51.491910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:51.497959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:51.514664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:51.658198Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:51.663283Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:51.688141Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:51.693444Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-07T17:47:51.717533Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:51.719310Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.719466Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:51.720757Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.720917Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:51.721774Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:51.722597Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.722727Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:51.723168Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:51.723834Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.724189Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:51.724618Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:51.725328Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.725439Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:51.725840Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:51.726491Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.726572Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:51.726943Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:51.727553Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.727641Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:51.728052Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:51.728701Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.729096Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:51.729509Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:51.730125Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.730213Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:51.730585Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:51.731190Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.731268Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:51.731625Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-07T17:47:51.732239Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.732859Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:51.733273Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-07T17:47:51.733897Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-07T17:47:51.733982Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:51.734381Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 28147497671567 ... hemaChanged at tablet: 72057594046644480 2026-01-07T17:47:52.146420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7592677857557035465 RawX2: 4503608217307465 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2026-01-07T17:47:52.146437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, shard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146455Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146518Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 129 -> 240 2026-01-07T17:47:52.146676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146812Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2026-01-07T17:47:52.146815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146839Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-07T17:47:52.146842Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:47:52.146913Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2026-01-07T17:47:52.146931Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-01-07T17:47:52.147137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:47:52.147271Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-07T17:47:52.147306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-01-07T17:47:52.147331Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-07T17:47:52.147339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-01-07T17:47:52.147353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2026-01-07T17:47:52.147394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7592677857557035691:2415] message: TxId: 281474976715693 2026-01-07T17:47:52.147410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-01-07T17:47:52.147424Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-07T17:47:52.147432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715693:0 2026-01-07T17:47:52.147523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:52.148236Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:47:52.148291Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037894 Initiating switch from PreOffline to Offline state 2026-01-07T17:47:52.149385Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:47:52.149423Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-07T17:47:52.150212Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:47:52.150464Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:47:52.150830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677857557035465 RawX2: 4503608217307465 } TabletId: 72075186224037894 State: 4 2026-01-07T17:47:52.150871Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:52.151142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677853262067550 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2026-01-07T17:47:52.151171Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:52.151319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:52.151354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:52.151454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:52.151473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:52.151585Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2026-01-07T17:47:52.151601Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-07T17:47:52.152874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-07T17:47:52.153206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-07T17:47:52.153328Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037894 reason = ReasonStop 2026-01-07T17:47:52.153365Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-07T17:47:52.153479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-07T17:47:52.153581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:47:52.153730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:52.153762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:47:52.153815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:47:52.154499Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-07T17:47:52.154529Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2026-01-07T17:47:52.154572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-07T17:47:52.154585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-07T17:47:52.154648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-07T17:47:52.154668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-07T17:47:52.154697Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:52.155477Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:47:52.155685Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:47:52.156798Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037894 2026-01-07T17:47:52.156902Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037894 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] >> DataShardWrite::AsyncIndexKeySizeConstraint [GOOD] >> DataShardWrite::DeleteImmediate >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> TFlatTest::SelectRangeReverse >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards >> TLocksTest::Range_Pinhole >> TFlatTest::WriteMergeAndRead >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 4312, msgbus: 4720 2026-01-07T17:44:50.211692Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677074822271051:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.211730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.248030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:50.492362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.519826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.519943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.531052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.580182Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.676485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.676505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.676512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.676600Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.703925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.883547Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677074822271204:2117] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.883615Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677074822272040:2757] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.884023Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677074822272040:2757] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.909739Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677074822272040:2757] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:50.914276Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677074822272040:2757] Handle TEvDescribeSchemeResult Forward to# [1:7592677074822272039:2756] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:50.916054Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677074822271204:2117] Handle TEvProposeTransaction 2026-01-07T17:44:50.916075Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677074822271204:2117] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:44:50.916143Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677074822271204:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677074822272045:2762] 2026-01-07T17:44:51.060147Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677074822272045:2762] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.060239Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677074822272045:2762] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.060260Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677074822272045:2762] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.060310Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677074822272045:2762] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.060598Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677074822272045:2762] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.060696Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677074822272045:2762] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:51.060797Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677074822272045:2762] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-07T17:44:51.060944Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677074822272045:2762] txid# 281474976710657 HANDLE EvClientConnected 2026-01-07T17:44:51.061452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:51.062918Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677074822272045:2762] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-07T17:44:51.062997Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677074822272045:2762] txid# 281474976710657 SEND to# [1:7592677074822272044:2761] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-07T17:44:51.071351Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677074822271204:2117] Handle TEvProposeTransaction 2026-01-07T17:44:51.071373Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677074822271204:2117] TxId# 281474976710658 ProcessProposeTransaction 2026-01-07T17:44:51.071439Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677074822271204:2117] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7592677079117239379:2797] 2026-01-07T17:44:51.073805Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677079117239379:2797] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.073860Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677079117239379:2797] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.073873Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677079117239379:2797] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.073923Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677079117239379:2797] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.074179Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677079117239379:2797] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.074288Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677079117239379:2797] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:51.074340Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677079117239379:2797] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-07T17:44:51.074449Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677079117239379:2797] txid# 281474976710658 HANDLE EvClientConnected 2026-01-07T17:44:51.074877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:51.076839Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677079117239379:2797] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-07T17:44:51.076874Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677079117239379:2797] txid# 281474976710658 SEND to# [1:7592677079117239378:2796] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-07T17:44:51.104996Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677074822271204:2117] Handle TEvProposeTransaction 2026- ... ProposeTransaction 2026-01-07T17:47:54.781043Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677845206876064:2104] TxId# 281474976715661 ProcessProposeTransaction 2026-01-07T17:47:54.781110Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677845206876064:2104] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7592677866681713937:2981] 2026-01-07T17:47:54.784343Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677866681713937:2981] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2026-01-07T17:47:54.784416Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677866681713937:2981] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:54.784441Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677866681713937:2981] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2026-01-07T17:47:54.784610Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677866681713937:2981] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:54.784648Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677866681713937:2981] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:54.785656Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [59:7592677866681713937:2981] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:47:54.785760Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677866681713937:2981] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:54.785995Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677866681713937:2981] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:54.786102Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677866681713937:2981] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:54.786225Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677866681713937:2981] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2026-01-07T17:47:54.786386Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677866681713937:2981] txid# 281474976715661 HANDLE EvClientConnected 2026-01-07T17:47:54.790384Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677866681713937:2981] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2026-01-07T17:47:54.790539Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677866681713937:2981] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:47:54.790573Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677866681713937:2981] txid# 281474976715661 SEND to# [59:7592677866681713854:2336] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2026-01-07T17:47:54.811060Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677845206876064:2104] Handle TEvProposeTransaction 2026-01-07T17:47:54.811092Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677845206876064:2104] TxId# 281474976715662 ProcessProposeTransaction 2026-01-07T17:47:54.811134Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677845206876064:2104] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7592677866681713964:2993] 2026-01-07T17:47:54.813925Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677866681713964:2993] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:56724" 2026-01-07T17:47:54.813997Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677866681713964:2993] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:54.814023Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677866681713964:2993] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:54.814077Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677866681713964:2993] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:54.814434Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677866681713964:2993] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:54.814524Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677866681713964:2993] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:54.857484Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677866681713964:2993] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2026-01-07T17:47:54.857660Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677866681713964:2993] txid# 281474976715662 HANDLE EvClientConnected 2026-01-07T17:47:54.861138Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677866681713964:2993] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2026-01-07T17:47:54.861195Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677866681713964:2993] txid# 281474976715662 SEND to# [59:7592677866681713960:2328] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2026-01-07T17:47:54.915751Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677845206876064:2104] Handle TEvProposeTransaction 2026-01-07T17:47:54.915784Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677845206876064:2104] TxId# 281474976715663 ProcessProposeTransaction 2026-01-07T17:47:54.915829Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677845206876064:2104] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7592677866681713997:3008] 2026-01-07T17:47:54.918775Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677866681713997:3008] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:56764" 2026-01-07T17:47:54.918846Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677866681713997:3008] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:54.918872Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677866681713997:3008] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-01-07T17:47:54.919020Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677866681713997:3008] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:54.919055Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677866681713997:3008] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:54.919099Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677866681713997:3008] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:54.919360Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677866681713997:3008] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:54.919393Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7592677866681713997:3008] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2026-01-07T17:47:54.919476Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677866681713997:3008] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-01-07T17:47:54.919505Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677866681713997:3008] txid# 281474976715663 SEND to# [59:7592677866681713996:2346] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:47:54.920179Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=OTIwMDhlNTMtY2ExYmE1ZmItZTlmNWMxZjktNGQzNmE3YTM=, ActorId: [59:7592677866681713982:2346], ActorState: ExecuteState, LegacyTraceId: 01kecs514h91ndwrk58n8sxnt1, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-07T17:47:54.920455Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7592677845206876064:2104] Handle TEvExecuteKqpTransaction 2026-01-07T17:47:54.920475Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7592677845206876064:2104] TxId# 281474976715664 ProcessProposeKqpTransaction |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> TFlatTest::SelectRangeItemsLimit >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> PartitionStats::Collector >> PartitionStats::Collector [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> TFlatTest::PathSorting |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2026-01-07T17:47:50.729245Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677848724991411:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:50.729545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:50.941041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.962063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:50.962150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:50.977543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:51.078062Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:51.082833Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677848724991307:2081] 1767808070721350 != 1767808070721353 2026-01-07T17:47:51.106981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:51.241570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:51.311389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:51.487602Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:51.492037Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:51.522280Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:51.527965Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-07T17:47:51.682858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:47:51.683214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-07T17:47:51.683973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-07T17:47:51.684018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-07T17:47:51.684029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:47:51.684062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:47:51.684096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-07T17:47:51.684113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-07T17:47:51.684232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-07T17:47:51.684354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:47:51.685324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-07T17:47:51.685401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-07T17:47:51.686038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-07T17:47:51.686208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-07T17:47:51.686379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:47:51.686396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:47:51.686508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-07T17:47:51.686703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:47:51.686734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592677848724991844:2253], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2026-01-07T17:47:51.686748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592677848724991844:2253], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2026-01-07T17:47:51.686788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-07T17:47:51.686835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-07T17:47:51.687173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:51.687275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:47:51.687598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-07T17:47:51.687682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2026-01-07T17:47:51.687738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2026-01-07T17:47:51.687759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2026-01-07T17:47:51.687767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2026-01-07T17:47:51.689555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-07T17:47:51.689635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-07T17:47:51.689650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txI ... 01-07T17:47:55.708759Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:47:55.708849Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2026-01-07T17:47:55.708896Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:47:55.708972Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2026-01-07T17:47:55.710235Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:47:55.710345Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7592677870087407111:2678], serverId# [2:7592677870087407115:3635], sessionId# [0:0:0] Check that tablet 72075186224037888 was deleted 2026-01-07T17:47:55.710434Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:47:55.710474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677865792438121 RawX2: 4503608217307375 } TabletId: 72075186224037889 State: 4 2026-01-07T17:47:55.710485Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-07T17:47:55.710533Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 Check that tablet 72075186224037889 was deleted 2026-01-07T17:47:55.710829Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2026-01-07T17:47:55.710952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:55.710994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:55.711951Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-07T17:47:55.712223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677865792438391 RawX2: 4503608217307431 } TabletId: 72075186224037891 State: 4 2026-01-07T17:47:55.712281Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:55.712496Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2026-01-07T17:47:55.712773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:55.712794Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:47:55.712803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:55.712882Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-07T17:47:55.713510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-07T17:47:55.713767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:47:55.713977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:47:55.714148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-07T17:47:55.714280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:55.714302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:47:55.714352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 Check that tablet 72075186224037890 was deleted 2026-01-07T17:47:55.714700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-07T17:47:55.714742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-07T17:47:55.714924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:47:55.714957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:47:55.715033Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:47:55.715207Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-07T17:47:55.715264Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7592677865792438229:2595], serverId# [2:7592677865792438230:2596], sessionId# [0:0:0] 2026-01-07T17:47:55.715570Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-07T17:47:55.715752Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:47:55.715845Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:47:55.717175Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-07T17:47:55.717231Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7592677870087406966:3507], serverId# [2:7592677870087406967:3508], sessionId# [0:0:0] 2026-01-07T17:47:55.717251Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7592677865792438483:2762], serverId# [2:7592677865792438485:2764], sessionId# [0:0:0] 2026-01-07T17:47:55.717492Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-07T17:47:55.717528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677865792438390 RawX2: 4503608217307430 } TabletId: 72075186224037890 State: 4 2026-01-07T17:47:55.717580Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:47:55.717915Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-07T17:47:55.717980Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-07T17:47:55.718059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:47:55.718089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:47:55.719220Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-07T17:47:55.719487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:47:55.719518Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:47:55.719768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-07T17:47:55.719970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:47:55.719999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-07T17:47:55.720075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:47:55.720384Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:47:55.720466Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-07T17:47:55.721246Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-07T17:47:55.721406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:47:55.721429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:47:55.721475Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Check that tablet 72075186224037891 was deleted 2026-01-07T17:47:56.018918Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2026-01-07T17:47:56.019999Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> TObjectStorageListingTest::TestSkipShards [GOOD] >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:27.996520Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:28.085053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:28.092157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:28.092523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:28.092654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:28.452603Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:28.540566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:28.540706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:28.575290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:28.657589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:29.312601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:29.313608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:29.313659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:29.313694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:29.314171Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:29.384025Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:29.921031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:46:32.978494Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:32.986555Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:46:32.990129Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:33.031070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:33.031213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:33.071396Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:33.073126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:33.252892Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:33.253045Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:33.254456Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.255220Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.255790Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.256614Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.257187Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.257375Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.257582Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.257783Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.257915Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.273636Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:33.464569Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:33.561090Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:46:33.561183Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:46:33.590716Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:46:33.592107Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:46:33.592400Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:46:33.592472Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:46:33.592529Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:46:33.592581Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:46:33.592654Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:46:33.592709Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:46:33.593582Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:46:33.606950Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:33.607049Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:33.636294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:46:33.637156Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:46:33.697234Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:46:33.698725Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:46:33.713078Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:46:33.713149Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:46:33.713239Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:46:33.720562Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:46:33.723945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:33.735730Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:46:33.735871Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:46:33.747639Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:46:33.902313Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:46:33.965412Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:46:34.320917Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:46:34.466649Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:46:34.466753Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:46:35.227823Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... chemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:46:44.588596Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:46.097096Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:46:46.097166Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:56.187078Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:46:56.187175Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:46:56.187232Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:46:56.187277Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:46:58.139430Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:46:58.139516Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:46:58.139810Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:46:58.155116Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:46:58.452333Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:46:58.457287Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4425:2463], ActorId: [2:4435:3997], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:46:58.471569Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4425:2463], ActorId: [2:4435:3997], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-07T17:46:58.471750Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4425:2463], ActorId: [2:4435:3997], Start read next stream part 2026-01-07T17:46:58.483260Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4447:4003], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:58.483402Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4459:4008], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:58.483529Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:58.485336Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4464:4013], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:58.485687Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:46:58.491580Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4480:4017], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:46:58.494909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:46:58.575590Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4462:4011], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-07T17:46:58.685101Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4546:4060], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:46:58.731348Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4545:4059] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:46:59.038880Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4567:4073]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:46:59.039197Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:46:59.039307Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4569:4075] 2026-01-07T17:46:59.039383Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4569:4075] 2026-01-07T17:46:59.039761Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4570:4076] 2026-01-07T17:46:59.039877Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4569:4075], server id = [2:4570:4076], tablet id = 72075186224037894, status = OK 2026-01-07T17:46:59.039989Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4570:4076], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:46:59.040063Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:46:59.040203Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:46:59.040290Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4567:4073], StatRequests.size() = 1 2026-01-07T17:46:59.040376Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:47:59.048592Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs3a17fv2cy3fpexqwydj2", SessionId: ydb://session/3?node_id=2&id=ODliZDJmZWYtMjI3NmJiMzctOTk1OTgxZi1hMTg5ZmJiZA==, Slow query, duration: 60.569478s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:47:59.050233Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4425:2463], ActorId: [2:4435:3997], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:47:59.050518Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4425:2463], ActorId: [2:4435:3997], Start read next stream part 2026-01-07T17:47:59.050900Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-01-07T17:47:59.051219Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4689:4188], ActorId: [2:4690:4189], Starting query actor #1 [2:4691:4190] 2026-01-07T17:47:59.051278Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4690:4189], ActorId: [2:4691:4190], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:47:59.054976Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4690:4189], ActorId: [2:4691:4190], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODAxNzk1YzUtMmVkMGZlYmUtYjc5OTVjZmQtZjA5ZjNiZjQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:47:59.057778Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4425:2463], ActorId: [2:4435:3997], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:47:59.057884Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4425:2463], ActorId: [2:4435:3997], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTFlZGY0MjgtNDdmMDdkOTAtNDMwOWNmODMtZDAwMWU5NGE=, TxId: 2026-01-07T17:47:59.143662Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4708:4204]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:47:59.143957Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:47:59.144013Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4708:4204], StatRequests.size() = 1 2026-01-07T17:47:59.311996Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4690:4189], ActorId: [2:4691:4190], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODAxNzk1YzUtMmVkMGZlYmUtYjc5OTVjZmQtZjA5ZjNiZjQ=, TxId: 2026-01-07T17:47:59.312103Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4690:4189], ActorId: [2:4691:4190], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODAxNzk1YzUtMmVkMGZlYmUtYjc5OTVjZmQtZjA5ZjNiZjQ=, TxId: 2026-01-07T17:47:59.312537Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4689:4188], ActorId: [2:4690:4189], Got response [2:4691:4190] SUCCESS 2026-01-07T17:47:59.313367Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:47:59.330534Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:47:59.330616Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3305:3446] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2026-01-07T17:47:54.144242Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677866637758997:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:54.144725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:54.429036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:54.435544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:54.437207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:54.493640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:54.574584Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:54.600267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:54.756417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:54.828955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.339982Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677877096638530:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:57.340064Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:57.366741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.496857Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:57.496948Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:57.526600Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:57.535551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:57.604657Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.783332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:57.790106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:57.806232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:57.811564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2026-01-07T17:47:54.127528Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677866560255216:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:54.127610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:54.353041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:54.379413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:54.379493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:54.432394Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677866560255143:2081] 1767808074103959 != 1767808074103962 2026-01-07T17:47:54.439965Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:54.447402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:54.537257Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:54.556441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:47:54.556467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:47:54.556478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:47:54.556589Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:47:54.728046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:54.736406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:54.823158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:55.135860Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::IncrementImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2026-01-07T17:47:55.117348Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677868993486305:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:55.118867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:55.292886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:55.293014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:55.339215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:55.352141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:55.356537Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:55.360208Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677868993486201:2081] 1767808075106448 != 1767808075106451 2026-01-07T17:47:55.541470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:55.597728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:55.644462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:55.707149Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677868993487107:2564] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-07T17:47:58.150937Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677882699157289:2197];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:58.151440Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:58.185068Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.299742Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:58.299815Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:58.317074Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677882699157116:2081] 1767808078111692 != 1767808078111695 2026-01-07T17:47:58.330471Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:58.337387Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:58.425913Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.543126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:58.557520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:58.567756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.648127Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:58.662936Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677882699158056:2593] txid# 281474976710660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2026-01-07T17:47:58.665088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:47:58.678281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks |93.2%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2026-01-07T17:47:35.446543Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677783100465493:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:35.446601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:35.694604Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.785091Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677783100465242:2081] 1767808055357956 != 1767808055357959 2026-01-07T17:47:35.792683Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.796388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:35.796476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:35.799781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.905026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:36.109419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:36.119307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:36.132987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:36.203878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.337652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.392534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:36.446976Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:38.901762Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677794852831244:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:38.901835Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:38.912856Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:39.027337Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:39.053461Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:39.056846Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:39.057079Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:39.067708Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:39.307083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:39.321372Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:39.334491Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:39.335760Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:39.344212Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:39.411764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:39.466486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.625315Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677812105855085:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:42.626375Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:42.680831Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:42.725118Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677812105854973:2081] 1767808062618208 != 1767808062618211 2026-01-07T17:47:42.753979Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:42.766570Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:42.766656Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:42.791083Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:42.969034Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:42.999725Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:43.005342Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:43.026226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:43.098542Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:43.155720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.137944Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677830408012747:2203];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:46.140420Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:46.166388Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.264330Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:46.283294Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:46.283378Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:46.311793Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#720575 ... ut propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:46.530741Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:46.542632Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.608723Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.662153Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.062973Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592677849186721999:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:50.063027Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:50.098635Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.162134Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:50.163947Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592677849186721974:2081] 1767808070062175 != 1767808070062178 2026-01-07T17:47:50.196706Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:50.196768Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:50.202100Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:50.355179Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.380367Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:50.388144Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:50.400173Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.452959Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.505838Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.866669Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592677860473949758:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:53.866746Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:53.906854Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:53.985982Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:53.987639Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592677860473949730:2081] 1767808073864120 != 1767808073864123 2026-01-07T17:47:54.024956Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:54.025051Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:54.030756Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:54.205667Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:54.291827Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:54.299417Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:54.308470Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:54.313578Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.398592Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.454954Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.260255Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592677880880956296:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:58.260306Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:58.308193Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.392151Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:58.397075Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7592677880880956265:2081] 1767808078259851 != 1767808078259854 2026-01-07T17:47:58.417467Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:58.417574Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:58.449497Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:58.514822Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.724783Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:58.742123Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:58.747619Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.817291Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.876995Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> TFlatTest::PartBloomFilter [GOOD] >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2026-01-07T17:47:57.215109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677879086819477:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:57.215214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:57.544089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:57.544197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:57.545008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.546857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:57.649046Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677879086819450:2081] 1767808077213005 != 1767808077213008 2026-01-07T17:47:57.654500Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:57.779034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.900109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:57.909820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:57.929426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:58.038420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.231561Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:00.641415Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677892282538842:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:00.641481Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:00.655851Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:00.730004Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:00.730330Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677892282538814:2081] 1767808080640115 != 1767808080640118 2026-01-07T17:48:00.772748Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:00.772860Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:00.776910Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:00.860461Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:00.968561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:00.973129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:48:00.990426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2026-01-07T17:47:57.753122Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677877193700459:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:57.762137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:57.800826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:58.113014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.138353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:58.138470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:58.161080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:58.245832Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:58.370701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.442297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:58.448610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:58.529544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.719931Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-07T17:47:58.722839Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-07T17:47:58.752769Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-07T17:47:58.758660Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-07T17:47:58.773579Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:58.783386Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.785281Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:47:58.785392Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:47:58.788388Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.788645Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-01-07T17:47:58.789910Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:47:58.789945Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2026-01-07T17:47:58.790052Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:47:58.790100Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:47:58.792968Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:58.794728Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:47:58.794804Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:47:58.796546Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2026-01-07T17:47:58.797393Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:47:58.797410Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2026-01-07T17:47:58.800110Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:58.801830Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:47:58.801941Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:47:58.812314Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.814206Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.814356Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:58.815638Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.815781Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.816360Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-07T17:47:58.817305Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.817415Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:58.817874Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-07T17:47:58.818574Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.818669Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.819088Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-07T17:47:58.819744Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.820074Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:58.820611Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-07T17:47:58.821372Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.821480Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.821972Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-07T17:47:58.822723Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.822810Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:58.823435Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-07T17:47:58.824104Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.824210Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.824725Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-07T17:47:58.825573Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.825662Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:58.826106Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-07T17:47:58.826867Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.826951Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.827388Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-07T17:47:58.828170Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.828254Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:47:58.828727Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-07T17:47:58.829485Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-07T17:47:58.829588Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:58.830102Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-07T17:47:58.830870Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:5 ... 224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:48:01.994860Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-07T17:48:01.994963Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:48:01.996070Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:48:01.996119Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-07T17:48:02.006154Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:48:02.006297Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:48:02.006749Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:48:02.006824Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:48:02.008038Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:48:02.008100Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:48:02.008373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677895741255787 RawX2: 4503608217307447 } TabletId: 72075186224037891 State: 4 2026-01-07T17:48:02.008428Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:48:02.008600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677895741255782 RawX2: 4503608217307446 } TabletId: 72075186224037892 State: 4 2026-01-07T17:48:02.008623Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:48:02.008697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677895741255435 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2026-01-07T17:48:02.008718Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:48:02.008785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592677895741255779 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2026-01-07T17:48:02.008804Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:48:02.008908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:48:02.008948Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-07T17:48:02.008978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:48:02.009044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:48:02.009061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:48:02.009065Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-07T17:48:02.009092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:48:02.009096Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-07T17:48:02.009100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:48:02.009140Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-07T17:48:02.009140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:48:02.009148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:48:02.010750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:48:02.010965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-07T17:48:02.010997Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-07T17:48:02.011134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-07T17:48:02.011164Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-07T17:48:02.011219Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-07T17:48:02.011249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-07T17:48:02.011251Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:48:02.011350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-07T17:48:02.011451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-07T17:48:02.011548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:48:02.011655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-07T17:48:02.011753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:48:02.011772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-07T17:48:02.011813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:48:02.011837Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-07T17:48:02.011923Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-07T17:48:02.012055Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-01-07T17:48:02.012084Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-07T17:48:02.013340Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-07T17:48:02.013390Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-07T17:48:02.013498Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-07T17:48:02.013562Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-07T17:48:02.013795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:48:02.013820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:48:02.013846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-07T17:48:02.013852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-07T17:48:02.013869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-07T17:48:02.013875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-07T17:48:02.013890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:48:02.013902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:48:02.013926Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:48:02.015048Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:48:02.015097Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:48:02.016544Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:48:02.016592Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2026-01-07T17:47:58.015261Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677882783464190:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:58.015343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:58.256768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.286069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:58.286163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:58.287474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:58.381076Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677878488496652:2081] 1767808077988444 != 1767808077988447 2026-01-07T17:47:58.386782Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:58.522267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.640782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:58.708879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:59.006450Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:01.333595Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677897722409592:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:01.333661Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:01.351569Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.459313Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:01.477264Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:01.477324Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:01.480206Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:01.621144Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.686428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:01.703993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2026-01-07T17:47:59.017696Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677888286259762:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:59.017900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:59.250874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:59.308297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:59.308431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:59.319413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:59.341694Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:59.344053Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677888286259739:2081] 1767808079016849 != 1767808079016852 2026-01-07T17:47:59.420140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:59.529878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:59.609336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-07T17:48:02.270230Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677897879786834:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:02.270295Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:02.284706Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:02.351336Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:02.366834Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677897879786807:2081] 1767808082268925 != 1767808082268928 2026-01-07T17:48:02.407768Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:02.407843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:02.412450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:02.522357Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:02.583804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:02.634130Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:02.955291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710719:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TInterconnectTest::TestManyEvents >> TInterconnectTest::TestBlobEvent >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> TestProtocols::TestConnectProtocol ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2026-01-07T17:47:59.734546Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:47:59.773216Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:47:59.781876Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2152] 2026-01-07T17:47:59.788485Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:47:59.812718Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:47:59.918391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:59.918452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:59.927443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:47:59.927672Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:47:59.930652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:47:59.930754Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:47:59.930809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:47:59.932105Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:47:59.932395Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:47:59.932453Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:195:2152] in generation 2 2026-01-07T17:48:00.045799Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:00.084134Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:48:00.086486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:00.086644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-07T17:48:00.086708Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:48:00.086752Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:48:00.086792Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:00.087128Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:128:2152], Recipient [1:128:2152]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:00.087890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:00.089180Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:48:00.089302Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:48:00.089435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:00.089478Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:00.089551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-07T17:48:00.089606Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:00.089657Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:00.090580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:48:00.090638Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:48:00.090680Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:48:00.090714Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:48:00.090762Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:00.100699Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:128:2152]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:48:00.100782Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:00.100873Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:00.101078Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:48:00.101168Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:48:00.101398Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:48:00.101522Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:48:00.101560Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:48:00.101593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:48:00.101625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:00.101920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:00.101956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:48:00.101987Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:48:00.102017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:00.102080Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:48:00.102116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:48:00.102151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:48:00.102179Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:00.102200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:00.114529Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:48:00.114603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:00.114638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:00.114698Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:48:00.114778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:48:00.118966Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:00.119035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:00.119077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-07T17:48:00.119225Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:128:2152]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:48:00.119272Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:48:00.119430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:00.119466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:48:00.119500Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:48:00.119535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:48:00.127651Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:48:00.127745Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:00.128021Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:128:2152], Recipient [1:128:2152]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:00.128069Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:00.128133Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:00.128174Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:48:00.128208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:00.128246Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:48:00.128283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:06.159437Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:48:06.159454Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:06.159569Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:237:2229], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-07T17:48:06.159604Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:06.159629Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-07T17:48:06.159724Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:06.159749Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-07T17:48:06.159783Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-07T17:48:06.159834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:48:06.159857Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:06.159933Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:06.160102Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:237:2229], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-07T17:48:06.160127Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:06.160149Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-07T17:48:06.160243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2229]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:48:06.160257Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:06.160275Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-07T17:48:06.160327Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:237:2229], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:48:06.160342Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:06.160355Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-07T17:48:06.160385Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:237:2229], Recipient [1:459:2401]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-07T17:48:06.160402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T17:48:06.160426Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-07T17:48:06.160485Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-07T17:48:06.160535Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-07T17:48:06.160583Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:48:06.160651Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:237:2229], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:48:06.160667Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:06.160682Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-07T17:48:06.160725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:459:2401], Recipient [1:459:2401]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:06.160743Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:06.160769Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-07T17:48:06.160793Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:48:06.160816Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-07T17:48:06.160834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-07T17:48:06.160857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:48:06.160898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-07T17:48:06.160956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-07T17:48:06.160981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-07T17:48:06.160997Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:48:06.161010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-07T17:48:06.161023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-07T17:48:06.161039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-07T17:48:06.161465Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-07T17:48:06.161514Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:48:06.161555Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-07T17:48:06.161578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-07T17:48:06.161603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-07T17:48:06.161627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:48:06.161775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-07T17:48:06.161804Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-07T17:48:06.161824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-07T17:48:06.161840Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-07T17:48:06.161859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-07T17:48:06.161873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-07T17:48:06.161888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-07T17:48:06.161907Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:06.161932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-07T17:48:06.161960Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-07T17:48:06.161977Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-07T17:48:06.175615Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:06.175679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:48:06.175737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-07T17:48:06.175803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:48:06.175856Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:06.176129Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:237:2229]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:48:06.176169Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:06.176208Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [FAIL] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardWrite::IncrementImmediate [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> THealthCheckTest::TestStateStorageBlue [GOOD] >> THealthCheckTest::TestStateStorageRed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2026-01-07T17:47:46.980002Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677832025621473:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:46.980103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:47.251094Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.339432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:47.339546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:47.341866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:47.353153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:47.393125Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677832025621447:2081] 1767808066977418 != 1767808066977421 2026-01-07T17:47:47.442299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.586604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:47.665220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.992902Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:51.980531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677832025621473:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:51.980586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:47:56.407214Z node 1 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2026-01-07T17:47:56.423612Z node 1 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [1767808075957:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2026-01-07T17:47:56.429100Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7592677870680333778:6128] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2026-01-07T17:47:56.429211Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7592677870680333778:6128] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2026-01-07T17:47:57.082583Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677877647675204:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:57.082665Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:57.099515Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:57.182810Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677877647675159:2081] 1767808077080650 != 1767808077080653 2026-01-07T17:47:57.214410Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:57.217758Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:57.217842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:57.218987Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.222504Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:57.456908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:57.470122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:57.479806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:57.485423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.517653Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.115345Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:02.082639Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592677877647675204:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:02.082720Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:48:05.957880Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976711361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-07T17:48:05.965768Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976711361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-07T17:48:05.968066Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976711361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2026-01-07T17:48:05.968218Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7592677912007420160:6107] txid# 281474976711361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> TInterconnectTest::TestConnectAndDisconnect >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> KqpUserConstraint::KqpReadNull-UploadNull |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [FAIL] |93.2%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [FAIL] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TPartitionTests::CommitOffsetRanges >> TPQTabletTests::Parallel_Transactions_1 >> TPQTestInternal::TestPartitionedBigTest >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> PQCountersLabeled::Partition >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> TLocksFatTest::PointSetRemove [GOOD] >> TInterconnectTest::OldNbs [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> TPQTabletTests::Parallel_Transactions_2 |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [FAIL] |93.3%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> TPartitionTests::CommitOffsetRanges [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TSentinelTests::PDiskFaultyState >> TPartitionTests::ConflictingActsInSeveralBatches >> TPQTabletTests::Partition_Send_Predicate_With_False >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3c ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 10141, msgbus: 9781 2026-01-07T17:44:50.270642Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677073303045365:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.270701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.282484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:44:50.514354Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.631382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.631505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.666451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.691611Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.698549Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.783643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.783669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.783685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.783789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.975644Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677073303045482:2116] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.975729Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677073303046303:2728] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.976219Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677073303046303:2728] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.004472Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677073303046303:2728] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:51.009708Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677073303046303:2728] Handle TEvDescribeSchemeResult Forward to# [1:7592677073303046302:2727] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:51.011223Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073303045482:2116] Handle TEvProposeTransaction 2026-01-07T17:44:51.011245Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073303045482:2116] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:44:51.011311Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677073303045482:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592677077598013604:2733] 2026-01-07T17:44:51.159851Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677077598013604:2733] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.159927Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677077598013604:2733] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.159943Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677077598013604:2733] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.159991Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677077598013604:2733] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.160271Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677077598013604:2733] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.160353Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677077598013604:2733] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:51.160434Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677077598013604:2733] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-07T17:44:51.160602Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677077598013604:2733] txid# 281474976710657 HANDLE EvClientConnected 2026-01-07T17:44:51.161056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:51.162658Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677077598013604:2733] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-07T17:44:51.162727Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677077598013604:2733] txid# 281474976710657 SEND to# [1:7592677077598013603:2732] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-07T17:44:51.172303Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677073303045482:2116] Handle TEvProposeTransaction 2026-01-07T17:44:51.172325Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677073303045482:2116] TxId# 281474976710658 ProcessProposeTransaction 2026-01-07T17:44:51.172378Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677073303045482:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7592677077598013640:2766] 2026-01-07T17:44:51.174784Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677077598013640:2766] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.174831Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677077598013640:2766] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.174843Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677077598013640:2766] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.174895Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677077598013640:2766] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.175133Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677077598013640:2766] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.175217Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677077598013640:2766] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:51.175267Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677077598013640:2766] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-07T17:44:51.175390Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677077598013640:2766] txid# 281474976710658 HANDLE EvClientConnected 2026-01-07T17:44:51.175802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:51.177907Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677077598013640:2766] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-07T17:44:51.177954Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677077598013640:2766] txid# 281474976710658 SEND to# [1:7592677077598013639:2765] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-07T17:44:51.274961Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NAc ... 1-07T17:47:54.583219Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677864793228944:2934] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2026-01-07T17:47:54.583289Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677864793228944:2934] txid# 281474976710661 SEND to# [59:7592677864793228943:2326] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-07T17:47:54.710587Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677843318391207:2116] Handle TEvProposeTransaction 2026-01-07T17:47:54.710626Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677843318391207:2116] TxId# 281474976710662 ProcessProposeTransaction 2026-01-07T17:47:54.710684Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677843318391207:2116] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7592677864793228968:2952] 2026-01-07T17:47:54.713519Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677864793228968:2952] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46174" 2026-01-07T17:47:54.713576Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677864793228968:2952] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:54.713593Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677864793228968:2952] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:54.713651Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677864793228968:2952] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:54.714099Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677864793228968:2952] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:54.714274Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677864793228968:2952] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:54.714361Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677864793228968:2952] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-07T17:47:54.714514Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677864793228968:2952] txid# 281474976710662 HANDLE EvClientConnected 2026-01-07T17:47:54.715227Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:47:54.720796Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677864793228968:2952] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-07T17:47:54.720860Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677864793228968:2952] txid# 281474976710662 SEND to# [59:7592677864793228967:2341] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-07T17:47:54.789650Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677843318391207:2116] Handle TEvProposeTransaction 2026-01-07T17:47:54.789680Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677843318391207:2116] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:47:54.789731Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677843318391207:2116] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7592677864793229057:3027] 2026-01-07T17:47:54.792791Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677864793229057:3027] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46188" 2026-01-07T17:47:54.792862Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677864793229057:3027] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:54.792883Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677864793229057:3027] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:47:54.794621Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677864793229057:3027] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:54.794993Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677864793229057:3027] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:54.795084Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7592677864793229057:3027] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:47:54.800580Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7592677843318391196:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:54.800662Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:47:54.843103Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7592677864793229057:3027] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2026-01-07T17:47:54.843378Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7592677864793229057:3027] txid# 281474976710663 HANDLE EvClientConnected 2026-01-07T17:47:54.847811Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7592677864793229057:3027] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2026-01-07T17:47:54.847876Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677864793229057:3027] txid# 281474976710663 SEND to# [59:7592677864793229056:2343] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2026-01-07T17:47:54.915833Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7592677843318391207:2116] Handle TEvProposeTransaction 2026-01-07T17:47:54.915871Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7592677843318391207:2116] TxId# 281474976710664 ProcessProposeTransaction 2026-01-07T17:47:54.915936Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7592677843318391207:2116] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7592677864793229093:3044] 2026-01-07T17:47:54.918405Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7592677864793229093:3044] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI3NCwiaWF0IjoxNzY3ODA4MDc0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.mrxSufl4Mdc6c15LEeXbzoQL9VvufQVuXv13SPZ683Ku8T-nNoVyUjaaMQpACYhcGcVI-Xym-bu2dcCSHTqvzlw7xbvRhJ0w0v0m4yHN5LnZlyD-i3Bw7CQWU_lpAo2EzaTwsfjC7bMEuGPi6DoUTz7yaudPbjMKn0Kw5GTk-ZzkJLUYRnZDyeX1s0W2I3ZJvGBBwRWhBKfMjySq4wzEzqscvkRlfxDTnDPu9ZnPRbHVTNLuWgkn3US6uiNNy0N9nt6jmmwLkSo4EweYoNh1cbf_BqPmZFANdMotJxcFGuuI7r2ae1UrlVVgvi3ldrj6NX99gWg4ahZrIoO1TAu8HA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTI3NCwiaWF0IjoxNzY3ODA4MDc0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:47498" 2026-01-07T17:47:54.918471Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7592677864793229093:3044] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-07T17:47:54.918488Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7592677864793229093:3044] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-01-07T17:47:54.918702Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7592677864793229093:3044] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-07T17:47:54.918792Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7592677864793229093:3044] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-07T17:47:54.918847Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7592677864793229093:3044] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:47:54.919187Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7592677864793229093:3044] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:47:54.919222Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7592677864793229093:3044] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2026-01-07T17:47:54.919306Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7592677864793229093:3044] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-01-07T17:47:54.919329Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7592677864793229093:3044] txid# 281474976710664 SEND to# [59:7592677864793229092:2355] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:47:54.919760Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=Mzk0ODgxYzAtMjIwODZlYjktYzljNDY1MjAtNmE0YzM1NTA=, ActorId: [59:7592677864793229083:2355], ActorState: ExecuteState, LegacyTraceId: 01kecs514jepy8701e4gfk2y1x, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-07T17:47:54.920075Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7592677843318391207:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:47:54.920102Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7592677843318391207:2116] TxId# 281474976710665 ProcessProposeKqpTransaction |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TLocksTest::MultipleLocks [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TSourceIdTests::ExpensiveCleanup >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3c [GOOD] >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_After_Timeout |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::StoreKeys [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5c ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2026-01-07T17:47:56.106210Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677876012279385:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:56.106621Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:56.325437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:56.325513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:56.355742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.378594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:56.411869Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:56.413472Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677876012279261:2081] 1767808076096706 != 1767808076096709 2026-01-07T17:47:56.577209Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.601342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:56.704255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.835306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.877495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.113751Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:01.104251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592677876012279385:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:01.104301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:48:02.989351Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677900896550715:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:02.989407Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:03.001008Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.066038Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:03.068379Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677900896550688:2081] 1767808082988439 != 1767808082988442 2026-01-07T17:48:03.102891Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:03.102978Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:03.108072Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:03.205674Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.277670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:03.291391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.342644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.382530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.996878Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:06.312222Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677917215764995:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:06.312277Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:06.323977Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:06.378638Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:06.380061Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677917215764970:2081] 1767808086311893 != 1767808086311896 2026-01-07T17:48:06.423758Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:06.423839Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:06.425864Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:06.509749Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:06.628484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:06.644908Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.695679Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.739903Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.317872Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> TMeteringSink::FlushPutEventsV1 [GOOD] >> PQCountersSimple::Partition >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5c [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_After_Timeout [GOOD] >> TPQTest::TestCompaction >> PQCountersSimple::Partition [GOOD] >> PQCountersSimple::PartitionLevelMetrics >> DataShardTxOrder::RandomPoints_DelayRS_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2026-01-07T17:47:59.884915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:59.884996Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:59.885504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:47:59.902575Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:47:59.902981Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:47:59.903249Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:47:59.915622Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:47:59.964195Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:47:59.964825Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:47:59.966321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:47:59.966387Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:47:59.966442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:47:59.966818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:47:59.967007Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:47:59.967065Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:48:00.046241Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:00.075956Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:48:00.086515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:00.086697Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:48:00.086752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:48:00.086790Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:48:00.086822Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:00.086998Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:00.087872Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:00.089232Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:48:00.089352Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:48:00.089494Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:00.089538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:00.090551Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:48:00.090601Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:48:00.090655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:48:00.090689Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:48:00.090751Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:00.090854Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:00.090925Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:00.090989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:48:00.100277Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:48:00.100356Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:00.100472Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:00.100776Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:48:00.100854Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:48:00.101392Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:48:00.101469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:48:00.101508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:48:00.101549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:48:00.101581Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:00.101922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:00.101971Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:48:00.102009Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:48:00.102042Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:00.102106Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:48:00.102142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:48:00.102172Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:48:00.102207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:00.102232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:00.117813Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:48:00.117898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:00.117937Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:00.117998Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:48:00.118092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:48:00.118944Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:00.119005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:00.119065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:48:00.119204Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:48:00.119239Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:48:00.119385Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:00.119424Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:48:00.119459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:48:00.119492Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:48:00.127674Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:48:00.127767Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:00.128023Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:00.128065Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:00.128133Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:00.128177Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:48:00.128214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:00.128258Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:48:00.128291Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:48:11.271606Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.271633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2026-01-07T17:48:11.271664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:11.271701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-07T17:48:11.271734Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.271851Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:48:11.271873Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:48:11.271890Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:48:11.271914Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.271938Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2026-01-07T17:48:11.271972Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:11.272026Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-07T17:48:11.272055Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.272182Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-07T17:48:11.272206Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.272226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2026-01-07T17:48:11.272259Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:11.272311Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-07T17:48:11.272333Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.272441Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.272463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2026-01-07T17:48:11.272494Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:11.272527Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-07T17:48:11.272547Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.272658Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.272681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2026-01-07T17:48:11.272709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:11.272760Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-07T17:48:11.272799Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.272906Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.272959Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:516] at 9437184 on unit FinishPropose 2026-01-07T17:48:11.273001Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-07T17:48:11.273078Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.273227Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.273252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2026-01-07T17:48:11.273310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:48:11.273354Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-07T17:48:11.273378Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.273482Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.273505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2026-01-07T17:48:11.273552Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:11.273601Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.273790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-07T17:48:11.273831Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:11.273868Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2026-01-07T17:48:11.274125Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-07T17:48:11.274155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:11.274197Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2026-01-07T17:48:11.274332Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-07T17:48:11.274362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:11.274389Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2026-01-07T17:48:11.274479Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-07T17:48:11.274504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:11.274539Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2026-01-07T17:48:11.274686Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-07T17:48:11.274714Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:11.274735Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2026-01-07T17:48:11.274823Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-07T17:48:11.274847Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:11.274888Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2026-01-07T17:48:11.275014Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-07T17:48:11.275040Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:11.275061Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2026-01-07T17:48:11.275163Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-07T17:48:11.275189Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:11.275210Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_With_Delete_Partition_Done_Event_Drop >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2026-01-07T17:48:11.088870Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:11.141252Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:11.141320Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:11.141366Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:11.141418Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:11.154548Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:11.154853Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:11.155198Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:183:2196] 2026-01-07T17:48:11.155833Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:11.155881Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2026-01-07T17:48:11.155920Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:183:2196] 2026-01-07T17:48:11.155962Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:11.156031Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:11.156447Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-07T17:48:11.156480Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:48:11.156531Z node 1 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ChangeConfig]) 2026-01-07T17:48:11.156577Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:11.156602Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:11.156645Z node 1 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ChangeConfig]) 2026-01-07T17:48:11.156693Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:11.156717Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:48:11.156748Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:48:11.156911Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:11.157004Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:11.157127Z node 1 :PERSQUEUE INFO: partition.cpp:4331: [72057594037927937][Partition][1][StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2026-01-07T17:48:11.157215Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|c06c5858-942c844a-9792c2e8-a963c7bf_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2026-01-07T17:48:11.157263Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:11.157395Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction Send disk status response with cookie: 0 2026-01-07T17:48:11.157524Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:11.157609Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:11.157680Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:11.157721Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:48:11.157758Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:48:11.157800Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:11.157826Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:11.157861Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:48:11.157896Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:11.157920Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:48:11.157946Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:48:11.157985Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2026-01-07T17:48:11.158025Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:48:11.158185Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2026-01-07T17:48:11.158239Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1792: [72057594037927937][Partition][1][StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 24. Cookie: 1 2026-01-07T17:48:11.158297Z node 1 :PERSQUEUE DEBUG: partition.cpp:4259: [72057594037927937][Partition][1][StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2026-01-07T17:48:11.158326Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:48:11.158375Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:48:11.158413Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:11.158440Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:11.158465Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:48:11.158554Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2026-01-07T17:48:11.159305Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 120 count 1 nextOffset 101 batches 1 2026-01-07T17:48:11.159360Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:11.159386Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:48:11.159419Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:48:11.159787Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][1][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000? size 106 WTime 128 2026-01-07T17:48:11.160036Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:11.180691Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:11.211599Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:11.211693Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][1][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:11.211758Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 24 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:11.211822Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyWrite. Partition: 1 2026-01-07T17:48:11.211887Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][1][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2026-01-07T17:48:11.212056Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:11.212087Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:48:11.212115Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:11.212140Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:11.212168Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:11.212197Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Parti ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2026-01-07T17:47:45.859066Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677825707555249:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.859295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:45.887951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:46.113023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.120271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:46.120369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:46.122941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:46.210676Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677825707555221:2081] 1767808065853144 != 1767808065853147 2026-01-07T17:47:46.215712Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:46.329052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.398348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:46.498755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.646029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.701157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.865139Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:49.343838Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:49.344034Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677845593695735:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:49.344912Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:49.455376Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:49.457520Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677845593695609:2081] 1767808069313507 != 1767808069313510 2026-01-07T17:47:49.457754Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:49.467654Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:49.467886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:49.497770Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:49.675494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:49.702139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:49.759740Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:49.768674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:49.820036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:52.908460Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:52.909122Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:53.011181Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:53.026898Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:53.026979Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:53.030135Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:53.128890Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:53.193243Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:53.198462Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:53.214598Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.292331Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.362103Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.374952Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677875829955115:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:56.374999Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:56.402203Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.478536Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:56.514509Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:56.514603Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:56.521690Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:56.671179Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:56.677202Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:56.682945Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.687961Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:56.692558Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.768633Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.814741Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.010402Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592677890024778605:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:00.010490Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:00.036127Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:00.109521Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:00.147395Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:00.147467Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:00.151500Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:00.222454Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:00.376489Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:00.394770Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.448220Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.494217Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.796234Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592677904378749040:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:03.796292Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:03.815647Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.890085Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:03.891195Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592677904378749013:2081] 1767808083795323 != 1767808083795326 2026-01-07T17:48:03.932706Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:03.932806Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:03.934583Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:04.113004Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:04.152132Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:04.168509Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.231852Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.284887Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.380309Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592677921276507636:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:07.380491Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:07.393825Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:07.472358Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:07.516479Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:07.516567Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:07.520524Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:07.684143Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:07.758997Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:07.780206Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.840281Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.884174Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> DataShardTxOrder::DelayData >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> TLocksTest::Range_BrokenLock1 [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [FAIL] |93.3%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TLocksTest::CK_Range_BrokenLockInf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a [GOOD] Test command err: 2026-01-07T17:48:09.960111Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.095629Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:10.104149Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:10.104392Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:10.104462Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:10.104498Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:10.105265Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:10.105344Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:10.108284Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:10.143251Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:211:2214], now have 1 active actors on pipe 2026-01-07T17:48:10.143406Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:10.161196Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-07T17:48:10.164018Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-07T17:48:10.164173Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:10.165150Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-07T17:48:10.165342Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:10.165816Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:10.166313Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:219:2142] 2026-01-07T17:48:10.167312Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:10.167367Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:48:10.167413Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:219:2142] 2026-01-07T17:48:10.167474Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:10.167543Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:10.168086Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:10.168524Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:10.168570Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:10.168609Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:10.168656Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:10.168701Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-07T17:48:10.168745Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:10.168809Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:10.168900Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:48:10.168959Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:48:10.168994Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:10.169025Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2026-01-07T17:48:10.169049Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2026-01-07T17:48:10.169079Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-07T17:48:10.169117Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-07T17:48:10.169158Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:10.169671Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:10.169733Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:10.169798Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:10.171073Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:10.172665Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:10.176006Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:10.176498Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:10.177036Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:10.177108Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:10.177138Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:10.177197Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:10.177228Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:10.177289Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:10.177702Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-07T17:48:10.181684Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:229:2224], now have 1 active actors on pipe 2026-01-07T17:48:10.182531Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 182 RawX2: 4294969490 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-07T17:48:10.183555Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-07T17:48:10.184423Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-07T17:48:10.184471Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-07T17:48:10.184536Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-07T17:48:10.184593Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-07T17:48:10.184648Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [ ... TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-07T17:48:12.552945Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-07T17:48:12.553030Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-07T17:48:12.553077Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-07T17:48:12.553137Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-07T17:48:12.553181Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.553232Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:12.553265Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.553315Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:48:12.553391Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 67890 2026-01-07T17:48:12.553424Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-07T17:48:12.553460Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:12.553493Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:12.553533Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.553785Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:12.555690Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:12.555771Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:12.555805Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.555830Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.555852Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.555880Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.555906Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.555935Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:12.555976Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-07T17:48:12.556003Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-07T17:48:12.556027Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-07T17:48:12.556054Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-07T17:48:12.556078Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-07T17:48:12.556105Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-07T17:48:12.556132Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-07T17:48:12.556158Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-07T17:48:12.556199Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-07T17:48:12.556235Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22226 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:48:12.556264Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:48:12.556287Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22223 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:48:12.556310Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22224 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:48:12.556330Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22225 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:48:12.556349Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-07T17:48:12.556375Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-07T17:48:12.556402Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-07T17:48:12.556422Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-07T17:48:12.556443Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-07T17:48:12.566741Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:12.597687Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:12.608165Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.608249Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.608287Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.608320Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.608345Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.618668Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:12.639519Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.639583Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.639612Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.639645Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.639669Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.650014Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:12.670786Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.670846Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.670874Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.670906Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.670934Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.681251Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:12.702027Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:12.712543Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.712615Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.712655Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.712686Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.712712Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.733244Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.733294Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.733315Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.733340Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.733359Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.753908Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:12.764766Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:388:2332], now have 1 active actors on pipe |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteSplit >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TLocksTest::BrokenDupLock [GOOD] >> TLocksTest::GoodSameShardLock [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_With_Delete_Partition_Done_Event_Drop [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> DataShardWrite::UncommittedUpdateLockMissingRow >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_TEvDeletePartitionDone_Came_Should_Be_Processed_After_Previous_Complete_Erasure |93.3%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_TEvDeletePartitionDone_Came_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] >> TLocksTest::GoodNullLock [GOOD] >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort >> DataShardWrite::DelayedVolatileTxAndEvWrite >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Deleting_State >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2026-01-07T17:47:33.635968Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677776438798115:2246];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.636074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.775316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:34.082411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.082494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.096728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.171720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.242109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.249612Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677776438797901:2081] 1767808053623171 != 1767808053623174 2026-01-07T17:47:34.463181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.487605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.497422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.517166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:34.585218Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.590098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.727397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.811241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.492457Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677791823181153:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.492515Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.529525Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.628355Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.628449Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.630188Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.663080Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.737014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.950397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.959060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.968505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:37.976602Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.067442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.137544Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.084914Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677808770717058:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:41.085429Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:41.118314Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.186123Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:41.186197Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:41.202478Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:41.203586Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677808770716944:2081] 1767808061073867 != 1767808061073870 2026-01-07T17:47:41.217682Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:41.296125Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.462254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:41.469608Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:41.491418Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.560837Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.628174Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.873235Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677822162040279:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:44.873302Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:44.887851Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:44.970957Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:44.988007Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:44.988101Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:45.015058Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#720575 ... o.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:56.898057Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:56.904808Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:56.929356Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.136810Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:57.143172Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:57.160576Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:57.166669Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.241841Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.308177Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.946425Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677890266286034:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:00.946498Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:00.977048Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.011714Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677890266286006:2081] 1767808080944927 != 1767808080944930 2026-01-07T17:48:01.062939Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:01.099077Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:01.099181Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:01.112426Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:01.166939Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.381585Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:01.389411Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:48:01.404648Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:01.476951Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:01.534184Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.932727Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677906483980771:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:04.932790Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:04.946167Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:05.024095Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:05.065803Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:05.065934Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:05.072964Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:05.213545Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:05.315190Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:05.328830Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:05.379923Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:05.428252Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.063215Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677929063792767:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:09.063279Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:09.080296Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:09.168059Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:09.181705Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:09.181793Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:09.205003Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:09.301087Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:09.441682Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:09.459084Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.513239Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.566560Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2026-01-07T17:47:33.613463Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677774392206893:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.613579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.038743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.065995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.066080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.127647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.214286Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.225012Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677774392206851:2081] 1767808053601151 != 1767808053601154 2026-01-07T17:47:34.243560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.415506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.421357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.520282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.634143Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.727392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.807682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.488802Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677792934273669:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.488863Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.499318Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:37.577018Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.662738Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.662821Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.690913Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.703145Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.841782Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.926709Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.932851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.946363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.039726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.150914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.176791Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677810874908250:2228];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:41.178407Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:41.196448Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.256588Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:41.259577Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677810874908053:2081] 1767808061146282 != 1767808061146285 2026-01-07T17:47:41.316232Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:41.316315Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:41.319825Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:41.454321Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.501671Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:41.525234Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.586129Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.639337Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.607237Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677820637935514:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:44.607287Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:44.616790Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:44.705160Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:44.705250Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:44.724870Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:44.728414Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677820637935487:2081] 1767808064606471 != 1767808064606474 2026-01-07T17:47:44.746246Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:44.753017Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:44.941625Z node 4 :FLAT_TX_SCHEMESHARD ... 2057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:56.361255Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:56.407871Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:56.589922Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.658487Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:56.681039Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.741216Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.793796Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.419283Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677892087313131:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:00.419358Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:00.433266Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:00.511116Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:00.541280Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:00.541369Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:00.543609Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:00.713874Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:00.762336Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:00.780225Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.836903Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.891346Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.608470Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677906996178299:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:04.608538Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:04.623487Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:04.730671Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:04.730776Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:04.732759Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:04.734232Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677906996178270:2081] 1767808084607331 != 1767808084607334 2026-01-07T17:48:04.767441Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:04.799290Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:05.002775Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:05.007547Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:48:05.018222Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:48:05.026601Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:05.089656Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:05.136978Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.135204Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677928952457350:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:09.135274Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:09.155044Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:09.247310Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:09.261630Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677928952457325:2081] 1767808089134704 != 1767808089134707 2026-01-07T17:48:09.277020Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:09.277141Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:09.308257Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:09.350495Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:09.508808Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:09.530172Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.586992Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.640651Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:48:10.903664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:11.037818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:48:11.060419Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:48:11.060832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:48:11.060881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:48:11.428672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:11.428766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:11.520365Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808088652890 != 1767808088652894 2026-01-07T17:48:11.543935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:11.594918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:11.712988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:48:11.995967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:12.009841Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:12.132884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.563193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1070:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:12.563369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1081:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:12.563457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:12.566343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:12.566582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:12.574301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:48:12.691875Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1084:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:48:12.780960Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1141:2943] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit_After_Abort >> TLocksTest::CK_Range_GoodLock [GOOD] >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2026-01-07T17:47:33.629520Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677777196967159:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:33.629671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:33.661024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:33.941034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.005135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.005247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.097235Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677777196966937:2081] 1767808053602959 != 1767808053602962 2026-01-07T17:47:34.111735Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:34.127278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:34.127967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.492341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:34.499549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:34.512799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:34.625289Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:34.653247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.825454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:34.883038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.566657Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677791302309530:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.566973Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.609001Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.781294Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.784897Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677791302309383:2081] 1767808057531485 != 1767808057531488 2026-01-07T17:47:37.801258Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.802120Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.802186Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.842154Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:38.210370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:38.249149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.302242Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.336352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.446966Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.566597Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:41.275823Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677808362109597:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:41.276078Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:41.304671Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.374074Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:41.376070Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:41.376131Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:41.381138Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677808362109471:2081] 1767808061266925 != 1767808061266928 2026-01-07T17:47:41.408832Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:41.582824Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.602126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:41.621401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:41.633282Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:41.640223Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.700180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.744391Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.964068Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677824219333428:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:44.964172Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:45.067792Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.080854Z node 4 :HIVE WAR ... ispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7592677874871168392:2081] 1767808076971430 != 1767808076971433 2026-01-07T17:47:57.111053Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:57.111152Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:57.118300Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:57.264794Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.352895Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:57.361368Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:57.374368Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.437271Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.507660Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:01.354383Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677895685159962:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:01.354433Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:01.398597Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.508278Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:01.512693Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:01.513430Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:01.539312Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:01.602136Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.791546Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:01.797305Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:48:01.812323Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:01.868522Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:01.975200Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:05.574668Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677911400980348:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:05.574736Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:05.597034Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:05.674699Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:05.710524Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:05.710613Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:05.717715Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:05.872538Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:05.957032Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:05.973005Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.026803Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.076859Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.833032Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677929192811881:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:09.833143Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:09.850502Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:09.932396Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:09.933770Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677929192811854:2081] 1767808089831799 != 1767808089831802 2026-01-07T17:48:09.951934Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:09.952039Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:09.976654Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:10.143884Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:10.265903Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:10.286491Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:10.387823Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:10.438848Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.3%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit_After_Abort [GOOD] >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Deleting_State [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2026-01-07T17:47:34.618944Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677778451567078:2228];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:34.619379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:35.145078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:35.145201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:35.149200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.181383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.190001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.205089Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677778451566887:2081] 1767808054562711 != 1767808054562714 2026-01-07T17:47:35.414472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:35.423396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:35.434641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:35.490913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.496281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.620402Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:35.652058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.715740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.512902Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.513080Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:38.591352Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:38.592556Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677796829569837:2081] 1767808058453636 != 1767808058453639 2026-01-07T17:47:38.600239Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:38.600314Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:38.626543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:38.706716Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.809613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:38.817366Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:38.829564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.895990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.954641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.849233Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677809273413839:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:41.849410Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:41.868451Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:41.962195Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:41.963494Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677809273413802:2081] 1767808061847948 != 1767808061847951 2026-01-07T17:47:41.973213Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.975883Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:41.975953Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:42.001288Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:42.151035Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:42.222050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:42.244807Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.300168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.343751Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.604174Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677827917926955:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.604251Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:45.625984Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.686971Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677827917926927:2081] 1767808065602977 != 1767808065602980 2026-01-07T17:47:45.690624Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:45.727287Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:45.727367Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:45.731095Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:45.844387Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinali ... cript_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.767134Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:57.773316Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:57.793069Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:57.804190Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.888006Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.996075Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:01.557242Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677896596223648:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:01.557702Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:01.584153Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:01.688889Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:01.690287Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.690403Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677896596223548:2081] 1767808081536785 != 1767808081536788 2026-01-07T17:48:01.737214Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:01.737319Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:01.743954Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:01.842855Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.969013Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:01.992100Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:02.048989Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:02.104315Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:05.576785Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677912291498173:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:05.576948Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:05.597585Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:05.674250Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:05.675667Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677912291498133:2081] 1767808085575224 != 1767808085575227 2026-01-07T17:48:05.692675Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:05.692777Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:05.721727Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:05.764493Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:05.940621Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:05.959732Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.015865Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.123242Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.996782Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677930750603360:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:09.996871Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:10.013576Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:10.114797Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677930750603331:2081] 1767808089995833 != 1767808089995836 2026-01-07T17:48:10.122403Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:10.129065Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:10.129172Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:10.134704Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:10.264523Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:10.409107Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:10.434620Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:10.503546Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:10.613676Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:48:02.232819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:02.432461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:48:02.452488Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:48:02.453079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:48:02.453164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:48:02.824791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:02.824907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:02.905376Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808079434579 != 1767808079434583 2026-01-07T17:48:02.931506Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:02.978113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:03.082121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:48:03.387560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:03.401391Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:03.517846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.558845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:48:03.559716Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:48:03.559958Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:48:03.560185Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:48:03.593080Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:48:03.593703Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:48:03.593835Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:48:03.595381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:48:03.595445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:48:03.595496Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:48:03.595801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:48:03.595911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:48:03.595996Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:48:03.606732Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:03.634208Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:48:03.634430Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:03.634539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:48:03.634581Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:48:03.634614Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:48:03.634646Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:48:03.634888Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:03.634952Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:03.635221Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:48:03.635338Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:48:03.635481Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:48:03.635517Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:03.635575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:48:03.635610Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:48:03.635650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:48:03.635685Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:48:03.635741Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:48:03.635836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:03.635886Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:03.635935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:48:03.636324Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:48:03.636367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:03.636460Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:48:03.636714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:48:03.636782Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:48:03.636861Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:48:03.636918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:48:03.636969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:48:03.637000Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:48:03.637036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:48:03.637278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:03.637307Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:48:03.637344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:48:03.637369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:48:03.637418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:48:03.637443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:48:03.637470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:48:03.637494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:48:03.637518Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:03.639011Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:48:03.639064Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:48:03.649996Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... reamingQuery: 0}. Shards nodes resolved SuccessNodes# 1 FailedNodes# 0 trace_id# 2026-01-07T17:48:14.079655Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] trace_id# 2026-01-07T17:48:14.079705Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-07T17:48:14.079920Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1413:3136] 2026-01-07T17:48:14.079974Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1413:3136], channels: 1 2026-01-07T17:48:14.080072Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:48:14.080123Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1413:3136], trace_id# 2026-01-07T17:48:14.080175Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1413:3136], trace_id# 2026-01-07T17:48:14.080214Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-07T17:48:14.080816Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1413:3136] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-07T17:48:14.080865Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1413:3136], trace_id# 2026-01-07T17:48:14.080910Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1413:3136], trace_id# 2026-01-07T17:48:14.081220Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1414:3136], Recipient [2:1350:3102]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-07T17:48:14.081315Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:48:14.081359Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v5001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v5001/18446744073709551615 2026-01-07T17:48:14.081396Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v6000/18446744073709551615 2026-01-07T17:48:14.081452Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-07T17:48:14.081522Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:48:14.081556Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:48:14.081586Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:14.081615Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:48:14.081658Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-07T17:48:14.081692Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:48:14.081709Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:14.081724Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:48:14.081739Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:48:14.081827Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-07T17:48:14.082046Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1414:3136], 0} after executionsCount# 1 2026-01-07T17:48:14.082089Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1414:3136], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:14.082149Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1414:3136], 0} finished in read 2026-01-07T17:48:14.082190Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:48:14.082208Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:48:14.082223Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:48:14.082239Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:48:14.082270Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T17:48:14.082284Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:48:14.082301Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-07T17:48:14.082332Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:48:14.083111Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1414:3136], Recipient [2:1350:3102]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:48:14.083153Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-07T17:48:14.083335Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1413:3136] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 741 Tasks { TaskId: 1 CpuTimeUs: 172 FinishTimeMs: 1767808094082 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 76 BuildCpuTimeUs: 96 HostName: "ghrun-z2c6jslqle" NodeId: 2 StartTimeMs: 1767808094082 CreateTimeMs: 1767808094080 UpdateTimeMs: 1767808094082 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:48:14.083385Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1413:3136] 2026-01-07T17:48:14.083480Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:48:14.083527Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1409:3136] TxId: 281474976715671. Ctx: { TraceId: 01kecs5ksr7zvebcdq2bzve2jb, Database: , SessionId: ydb://session/3?node_id=2&id=MTE5NzY5ZTgtOTc1ZjMxM2QtZTA0ZjQ4ZDctOTljMzE1M2M=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000741s ReadRows: 1 ReadBytes: 8 RequestUnits# 1 ForceFlag# true trace_id# { items { uint32_value: 7 } items { uint32_value: 4 } } |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Successful_Commit >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort_After_Commit >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Successful_Commit [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack |93.3%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort_After_Commit [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Tx >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelTests::BSControllerUnresponsive >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Tx [GOOD] >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations >> TPartitionTests::SetOffset >> TLocksTest::BrokenNullLock [GOOD] >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskFaultyGuard >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TLocksTest::Range_GoodLock1 [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step [GOOD] >> TPQTest::DirectReadOldPipe >> TPQTabletTests::ProposeTx_Unknown_Partition_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2026-01-07T17:48:10.894746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:10.894801Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:10.895228Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:48:10.906662Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:48:10.906984Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:48:10.907211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:48:10.916735Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:48:10.957233Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:48:10.957794Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:48:10.959280Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:48:10.959348Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:48:10.959404Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:48:10.959727Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:48:10.959885Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:48:10.959962Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:48:11.026793Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:11.045071Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:48:11.045231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:11.045295Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:48:11.045331Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:48:11.045357Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:48:11.045383Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.045497Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:11.045549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:11.045749Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:48:11.045831Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:48:11.045916Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:11.045954Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:11.045986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:48:11.046008Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:48:11.046041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:48:11.046068Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:48:11.046092Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:11.046152Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:11.046179Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:11.046217Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:48:11.050870Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:48:11.050925Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:11.051003Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:11.051116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:48:11.051172Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:48:11.051210Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:48:11.051245Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:48:11.051269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:48:11.051293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:48:11.051313Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:11.051503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:11.051531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:48:11.051559Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:48:11.051579Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:11.051632Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:48:11.051652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:48:11.051671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:48:11.051691Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:11.051706Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:11.063802Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:48:11.063872Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:11.063904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:11.063956Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:48:11.064015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:48:11.064469Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:11.064522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:11.064570Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:48:11.064683Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:48:11.064709Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:48:11.064862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:11.064894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:48:11.064946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:48:11.064978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:48:11.067921Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:48:11.067982Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:11.068182Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:11.068219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:11.068273Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:11.068305Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:48:11.068338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:11.068373Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:48:11.068405Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:48:15.782415Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.782448Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-07T17:48:15.782540Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:15.782588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2026-01-07T17:48:15.782632Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:48:15.782687Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-07T17:48:15.782716Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:15.782828Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:48:15.782859Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:15.782882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2026-01-07T17:48:15.782916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:48:15.782958Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-07T17:48:15.782982Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:15.783089Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:48:15.783136Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:48:15.783188Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:48:15.783213Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:15.783237Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2026-01-07T17:48:15.783479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:48:15.783526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-07T17:48:15.783555Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:15.783660Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:48:15.783687Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:15.783712Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2026-01-07T17:48:15.783750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:48:15.783791Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-07T17:48:15.783829Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:15.783942Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:15.783974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2026-01-07T17:48:15.784009Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:48:15.784051Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-07T17:48:15.784076Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:15.784163Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:15.784188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-07T17:48:15.784221Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:48:15.784256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:48:15.784298Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:15.784394Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:15.784450Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:48:15.784484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-07T17:48:15.784544Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:48:15.784569Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:15.784839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-07T17:48:15.784878Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.784912Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-07T17:48:15.784999Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:48:15.785028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.785053Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-07T17:48:15.785136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-07T17:48:15.785165Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.785235Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-07T17:48:15.785334Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-07T17:48:15.785362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.785387Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-07T17:48:15.785464Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-07T17:48:15.785497Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.785521Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-07T17:48:15.785612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-07T17:48:15.785645Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.785669Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-07T17:48:15.785741Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:48:15.785773Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.785810Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-07T17:48:15.785885Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:48:15.785923Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:15.785948Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2026-01-07T17:47:34.564047Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677778498594282:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:34.564104Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:34.884306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:34.916802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:34.916891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:34.985371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:35.079679Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:35.097129Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677778498594177:2081] 1767808054539886 != 1767808054539889 2026-01-07T17:47:35.104997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:35.353027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:35.365578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:35.382360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:35.449625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.577031Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:35.631700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:35.691528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.529965Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677795273014652:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:38.530011Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:38.657192Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.749039Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677795273014628:2081] 1767808058519546 != 1767808058519549 2026-01-07T17:47:38.760485Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:38.765283Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:38.765362Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:38.773827Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:38.833001Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.978336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:38.997673Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:39.088286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:39.139644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.091226Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677814277199691:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:42.091297Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:42.131324Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:42.204156Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:42.205654Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677814277199664:2081] 1767808062090255 != 1767808062090258 2026-01-07T17:47:42.243609Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:42.243688Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:42.248828Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:42.370205Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:42.430118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:42.441309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:42.462662Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.524446Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:42.571615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.649640Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677828479854629:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.649707Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:45.679277Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.766310Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:45.802904Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:45.803001Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:45.818987Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:45.942979Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.024046Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__oper ... ecting 2026-01-07T17:47:57.737059Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7592677878692258099:2081] 1767808077572141 != 1767808077572144 2026-01-07T17:47:57.772304Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:57.846954Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.014175Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:58.021797Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:58.041768Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.112141Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.176655Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:02.119231Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677899244963707:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:02.119309Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:02.134306Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:02.213644Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:02.213747Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:02.214286Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:02.216889Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677899244963679:2081] 1767808082118089 != 1767808082118092 2026-01-07T17:48:02.230108Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:02.305966Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:02.482295Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:02.501797Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:02.565343Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:02.621151Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.170295Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677919157587930:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:06.170346Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:06.186924Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:06.257699Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:06.260009Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677919157587903:2081] 1767808086169679 != 1767808086169682 2026-01-07T17:48:06.300230Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:06.300338Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:06.305817Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:06.420530Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:06.498900Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:06.520333Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.577053Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:06.632594Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:10.540587Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677934987229075:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:10.540641Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:10.563421Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:10.617100Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:10.620514Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677934987229047:2081] 1767808090539440 != 1767808090539443 2026-01-07T17:48:10.663083Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:10.663164Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:10.665482Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:10.738720Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:10.914299Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:10.938185Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:10.988756Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.033815Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] Test command err: 2026-01-07T17:48:11.761112Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:11.817539Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:11.820896Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:11.821221Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:11.821268Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:11.821331Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:11.821375Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:11.821412Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:11.821469Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:11.836976Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-07T17:48:11.837087Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:11.851882Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:11.854577Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:11.854711Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:11.855501Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:11.855634Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:11.855994Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:11.856335Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:48:11.857048Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:11.857083Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:48:11.857117Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:48:11.857167Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:11.857246Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:11.857690Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:11.857740Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:11.857776Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:11.857831Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:11.857864Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:11.857894Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:11.857952Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:48:11.857981Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:48:11.858009Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:11.858041Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:11.858085Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:11.858254Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:11.858306Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:11.858469Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:11.858618Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:11.860742Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:11.860835Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:11.860873Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:11.860901Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:11.860951Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:11.860988Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:11.861013Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:11.861054Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:11.861386Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:197:2203], now have 1 active actors on pipe 2026-01-07T17:48:11.861991Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:200:2205], now have 1 active actors on pipe 2026-01-07T17:48:11.862083Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-07T17:48:11.862127Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-07T17:48:11.862176Z node 1 :PQ_TX INFO: pq_impl.cpp:2616: [PQ: 72057594037927937] partition {0, KafkaTransactionWriteId{1, 0}, 100000} for WriteId KafkaTransactionWriteId{1, 0} 2026-01-07T17:48:11.862368Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:48:11.864219Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:48:11.864830Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:11.865138Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:11.865389Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateInit] bootstrapping {0, KafkaTransactionWriteId{1, 0}, 100000} [1:206:2142] 2026-01-07T17:48:11.866140Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2026-01-07T17:48:11.869472Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitMetaStep 2026-01-07T17:48:11.869809Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:48:11.869942Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From M0000100000 to M0000100001 2026-01-07T17:48:11.870159Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:48:11.870222Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From D0000100000 to D0000100001 2026-01-07T17:48:11.870380Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDataStep 2026-01-07T ... 72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:16.550265Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:16.550300Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:16.550333Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Try persist 2026-01-07T17:48:16.550373Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:173: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Blobs compaction is stopped 2026-01-07T17:48:16.550813Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-07T17:48:16.550879Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-07T17:48:16.550936Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2026-01-07T17:48:16.550982Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2026-01-07T17:48:16.551033Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-07T17:48:16.551077Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4642: [PQ: 72057594037927937] WriteId KafkaTransactionWriteId{1, 0} Partitions.size=0 2026-01-07T17:48:16.551117Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-07T17:48:16.551158Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-07T17:48:16.551201Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4642: [PQ: 72057594037927937] WriteId KafkaTransactionWriteId{1, 0} Partitions.size=0 2026-01-07T17:48:16.551243Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-07T17:48:16.551293Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-07T17:48:16.551346Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-07T17:48:16.551446Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:48:16.554059Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-07T17:48:16.554117Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-07T17:48:16.554167Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2547: [PQ: 72057594037927937] GetOwnership request for the next Kafka transaction while previous is being deleted. Saving it till the complete delete of the previous tx.%01 2026-01-07T17:48:16.554259Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:48:16.554310Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-07T17:48:16.554358Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-07T17:48:16.554408Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-07T17:48:16.554455Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 2026-01-07T17:48:16.554533Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-07T17:48:16.554588Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-07T17:48:16.554644Z node 6 :PQ_TX INFO: pq_impl.cpp:2616: [PQ: 72057594037927937] partition {0, KafkaTransactionWriteId{1, 0}, 100001} for WriteId KafkaTransactionWriteId{1, 0} 2026-01-07T17:48:16.554759Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:48:16.556953Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:48:16.557541Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:16.557924Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:16.558243Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] bootstrapping {0, KafkaTransactionWriteId{1, 0}, 100001} [6:244:2142] 2026-01-07T17:48:16.559195Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDiskStatusStep 2026-01-07T17:48:16.560372Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitMetaStep 2026-01-07T17:48:16.560651Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:48:16.560788Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From M0000100001 to M0000100002 2026-01-07T17:48:16.561074Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:48:16.561162Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From D0000100001 to D0000100002 2026-01-07T17:48:16.561379Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDataStep 2026-01-07T17:48:16.561459Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:48:16.561517Z node 6 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:48:16.561565Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:48:16.561614Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:48:16.561661Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:16.561708Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Initializing completed. 2026-01-07T17:48:16.561764Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] init complete for topic 'topic' partition {0, KafkaTransactionWriteId{1, 0}, 100001} generation 2 [6:244:2142] 2026-01-07T17:48:16.561821Z node 6 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] SYNC INIT topic topic partitition {0, KafkaTransactionWriteId{1, 0}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:16.561876Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:16.561922Z node 6 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process pending events. Count 0 2026-01-07T17:48:16.561963Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:48:16.562004Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:16.562042Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:16.562083Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:16.562120Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-07T17:48:16.562203Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:16.562374Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-07T17:48:16.562596Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|7ceac25c-e441a9ff-c9e97bb5-f37564ff_0 generated for partition {0, KafkaTransactionWriteId{1, 0}, 100001} topic 'topic' owner -=[ 0wn3r ]=- 2026-01-07T17:48:16.562652Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:48:16.562698Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:48:16.562753Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:16.562791Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:16.562836Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:48:16.562898Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:16.562945Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Batch completed (1) 2026-01-07T17:48:16.562993Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-07T17:48:16.563061Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-07T17:48:16.563158Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 5 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::SetOffset [GOOD] >> TSentinelTests::PDiskErrorState >> TPQTabletTests::ProposeTx_Unknown_WriteId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2026-01-07T17:47:37.151088Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677793351493709:2230];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.151185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.489445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.489541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.491902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.508390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.681083Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677793351493505:2081] 1767808057128064 != 1767808057128067 2026-01-07T17:47:37.688910Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.789248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.902146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.910683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.922777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:37.986998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.153215Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:38.175950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.231923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:40.671255Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677805223707192:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:40.671866Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:40.683501Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:40.785032Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677805223707168:2081] 1767808060670663 != 1767808060670666 2026-01-07T17:47:40.798933Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:40.802124Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:40.802731Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:40.802795Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:40.824831Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:41.024506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:41.038870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.081865Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.109754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.172103Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.235013Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677820684640743:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:44.235317Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:44.252788Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:44.327981Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:44.344056Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:44.344140Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:44.365669Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:44.455884Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:44.560555Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:44.581633Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.635370Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.692075Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:48.126154Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677837824514377:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:48.126338Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:48.141081Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.215875Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:48.256655Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:48.256748Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:48.263489Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:48.422794Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.454774Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemesha ... 26-01-07T17:47:59.694938Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:59.696871Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7592677887095687126:2081] 1767808079589280 != 1767808079589283 2026-01-07T17:47:59.713494Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:59.713573Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:59.735560Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:59.842803Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:59.971572Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:59.988720Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.072384Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.186094Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.837344Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677905767919260:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:03.837406Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:03.854541Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.936954Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:03.938806Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677905767919231:2081] 1767808083836347 != 1767808083836350 2026-01-07T17:48:03.974382Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:03.974482Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:03.977480Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:04.018396Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:04.206930Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:04.231776Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.287130Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.380019Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.417574Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677920809520730:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:07.417647Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:07.443156Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:07.496720Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:07.498577Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677920809520703:2081] 1767808087417075 != 1767808087417078 2026-01-07T17:48:07.533439Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:07.533527Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:07.537387Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:07.615627Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:07.748444Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:07.765530Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.825908Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.874561Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.608686Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677939143806177:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:11.608769Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:11.627184Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:11.694312Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:11.743358Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:11.743460Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:11.748706Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:11.839163Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:11.979633Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:12.002351Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.057494Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.107524Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPartitionTests::OldPlanStep >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPQTabletTests::ProposeTx_Unknown_Partition_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step [GOOD] Test command err: 2026-01-07T17:48:14.593727Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:14.672844Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:14.678048Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:14.678370Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:14.678434Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:14.678477Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:14.678537Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:14.678613Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:14.678672Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:14.710409Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:210:2214], now have 1 active actors on pipe 2026-01-07T17:48:14.710870Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:14.730392Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:14.733274Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:14.733422Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:14.734302Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:14.734463Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:14.734848Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:14.735291Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:218:2142] 2026-01-07T17:48:14.736235Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:14.736297Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:48:14.736339Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:218:2142] 2026-01-07T17:48:14.736398Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:14.736463Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:14.737025Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:14.737072Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:14.737109Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:14.737150Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:14.737194Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:14.737249Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:14.737314Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:48:14.737350Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:48:14.737402Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:14.737432Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:14.737466Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:14.737677Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:14.737744Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:14.737916Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:14.738144Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:14.740683Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:14.740791Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:14.740846Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:14.740897Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:14.740968Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:14.741001Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:14.741031Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:14.741078Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:14.741439Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:225:2222], now have 1 active actors on pipe 2026-01-07T17:48:14.742082Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:228:2224], now have 1 active actors on pipe 2026-01-07T17:48:14.742866Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-07T17:48:14.742951Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-07T17:48:14.743055Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-07T17:48:14.743104Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-07T17:48:14.743152Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-07T17:48:14.743193Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-07T17:48:14.743245Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from UNKNOWN to PREPARING 2026-01-07T17:48:14.743290Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3864: [PQ: 72057594037927937] Persist state PREPARED for TxId 67890 2026-01-07T17:48:14.743460Z node 1 :PQ_TX DEBUG: transaction.cpp:414: [TxId: 67890] Save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 181 RawX2: 4294969490 } Partitions { } 2026-01-07T17:48:14.743543Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:48:14.745772Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:48:14.745831Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARING 2026-01-07T17:48:14.745871Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARING 2026-01-07T17:48:14.745906Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARING to PREPARED 2026-01-07T17:48:14.749454Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3398: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 181 RawX2: 4294969490 } } St ... 01-07T17:48:16.966081Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARED 2026-01-07T17:48:16.966127Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARED 2026-01-07T17:48:16.966172Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARED to PLANNING 2026-01-07T17:48:16.966223Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2026-01-07T17:48:16.966275Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4471: [PQ: 72057594037927937] TxQueue.size 1 2026-01-07T17:48:16.966317Z node 6 :PQ_TX INFO: pq_impl.cpp:650: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2026-01-07T17:48:16.966391Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2026-01-07T17:48:16.966441Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3791: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2026-01-07T17:48:16.966519Z node 6 :PERSQUEUE DEBUG: partition.cpp:1381: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2026-01-07T17:48:16.966578Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:16.966626Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:48:16.966706Z node 6 :PQ_TX DEBUG: partition.cpp:3048: [Partition][0][StateIdle] TxId 67890 affect consumer user 2026-01-07T17:48:16.966763Z node 6 :PQ_TX DEBUG: partition.cpp:1708: [Partition][0][StateIdle] The long answer to TEvTxCalcPredicate. TxId: 67890 2026-01-07T17:48:16.966817Z node 6 :PQ_TX DEBUG: partition.cpp:1711: [Partition][0][StateIdle] Send TEvTxCalcPredicateResult. TxId: 67890 2026-01-07T17:48:16.966883Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:16.966930Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:16.966981Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:16.967024Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:16.967254Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3488: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2026-01-07T17:48:16.967303Z node 6 :PQ_TX DEBUG: transaction.cpp:244: [TxId: 67890] Handle TEvTxCalcPredicateResult 2026-01-07T17:48:16.967343Z node 6 :PQ_TX DEBUG: transaction.cpp:301: [TxId: 67890] Partition responses 1/1 2026-01-07T17:48:16.967396Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2026-01-07T17:48:16.967446Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2026-01-07T17:48:16.967492Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2026-01-07T17:48:16.967540Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4482: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-07T17:48:16.967583Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2026-01-07T17:48:16.967631Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2026-01-07T17:48:16.967686Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-07T17:48:16.967739Z node 6 :PQ_TX INFO: pq_impl.cpp:4023: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 tx 67890 2026-01-07T17:48:16.967847Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-07T17:48:16.967930Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:16.969030Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-07T17:48:16.969088Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2822: [PQ: 72057594037927937] Connected to tablet 22222 2026-01-07T17:48:16.974175Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:241:2233], now have 1 active actors on pipe 2026-01-07T17:48:16.974831Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:242:2234], now have 1 active actors on pipe 2026-01-07T17:48:16.974970Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\002" Seqno: 0 2026-01-07T17:48:16.975022Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 22222 2026-01-07T17:48:16.975064Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-07T17:48:16.975114Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 1/1 2026-01-07T17:48:16.975163Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-07T17:48:16.975206Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-07T17:48:16.975253Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-07T17:48:16.975300Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-07T17:48:16.975388Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-07T17:48:16.975443Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-07T17:48:16.975511Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 22222 2026-01-07T17:48:16.975658Z node 6 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 100, TxId 67890 2026-01-07T17:48:16.975707Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:16.975968Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:16.976049Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:16.976104Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:16.976157Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:16.976202Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:16.976257Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:48:16.976333Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-07T17:48:16.976378Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:16.976421Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:16.976468Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:16.976642Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:16.976741Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:16.979120Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:16.979245Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:16.979291Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:16.979324Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:16.979356Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:16.979388Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:16.979424Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:16.979492Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:16.979571Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-07T17:48:16.979632Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-07T17:48:16.979679Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-07T17:48:16.979728Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-07T17:48:16.979781Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-07T17:48:16.979832Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-07T17:48:16.979881Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-07T17:48:16.979939Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-07T17:48:16.980019Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-07T17:48:16.980082Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:48:16.980132Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-07T17:48:16.980178Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-07T17:48:16.980226Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/1 2026-01-07T17:48:16.980266Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-07T17:48:16.980303Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/1 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::UpdateConfig_1 >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMeteringSink::UnusedStorageV1 [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::Read_From_Different_Zones_What_Was_Written_With_Gaps >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTest::The_Value_Of_CreationUnixTime_Must_Not_Decrease >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose >> TPartitionTests::OldPlanStep [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients >> TPQTabletTests::UpdateConfig_2 >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TPartitionTests::IncorrectRange |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMultiBucketCounter::InsertAndUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2026-01-07T17:47:37.394201Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677794318105531:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:37.394301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:37.889078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.891837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:37.891912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:37.964480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:38.005903Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:38.169042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:38.242679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:38.252285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:38.265286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:38.342942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.401439Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:38.483919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:38.533711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.274741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.291374Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:41.313726Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:41.317192Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677809285591327:2081] 1767808061127169 != 1767808061127172 2026-01-07T17:47:41.337814Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:41.337896Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:41.362015Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:41.540345Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:41.618973Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:41.638134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:41.650227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.732289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:41.820132Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.690107Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677823486168543:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:44.690161Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:44.721151Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:44.777960Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:44.779465Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677823486168516:2081] 1767808064688888 != 1767808064688891 2026-01-07T17:47:44.829493Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:44.829556Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:44.832879Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:45.010647Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:45.017100Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.024712Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:45.032573Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:45.037366Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.097344Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.172796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:48.225580Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677840378114339:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:48.225803Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:48.251299Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.360162Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677840378114196:2081] 1767808068218802 != 1767808068218805 2026-01-07T17:47:48.369768Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:48.369858Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:48.378115Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:48.390515Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:48.482193Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables e ... ileState: Disconnected -> Connecting 2026-01-07T17:47:59.780686Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:59.928148Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:59.994963Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:00.002422Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:48:00.017607Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:48:00.029172Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.090600Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.192862Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.750020Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677904658807121:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:03.750091Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:03.767160Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.869485Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:03.872229Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677904658807092:2081] 1767808083749098 != 1767808083749101 2026-01-07T17:48:03.887231Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:03.887328Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:03.892476Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:04.067345Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:04.131917Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:04.150338Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:48:04.156147Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.217999Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.268368Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.047134Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677924885048794:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:08.047237Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:08.059817Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:08.148304Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:08.150421Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677924885048767:2081] 1767808088046137 != 1767808088046140 2026-01-07T17:48:08.190742Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:08.190859Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:08.202786Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:08.241787Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:08.466722Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:08.487376Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.589291Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.638310Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.286901Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677942844470986:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:12.286960Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:12.301248Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.376297Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:12.420455Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:12.420557Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:12.422617Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:12.542093Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.631020Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:12.651322Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.704712Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.798983Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPartitionTests::CorrectRange_Rollback >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TSentinelTests::PDiskFaultyGuard [GOOD] >> TSentinelTests::PDiskFaultyGuardWithForced >> TPQTabletTests::ReadQuoter_ExclusiveLock >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2026-01-07T17:47:36.599912Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677788145473941:2180];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:36.600144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:36.896168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:36.899098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:36.899212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:36.967476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:37.011755Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:37.017141Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677788145473790:2081] 1767808056567612 != 1767808056567615 2026-01-07T17:47:37.072821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:37.204529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:37.209945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:37.290243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.452079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.519419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:37.558297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:40.037546Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677803972878494:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:40.037617Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:40.061455Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:40.131255Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:40.132912Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677803972878377:2081] 1767808060033803 != 1767808060033806 2026-01-07T17:47:40.160252Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:40.160335Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:40.170102Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:40.242129Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:40.363039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:40.383679Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:40.461289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:40.514494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:43.718381Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677816807743977:2248];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:43.718493Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:43.734312Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:43.787631Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:43.788829Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677816807743749:2081] 1767808063712846 != 1767808063712849 2026-01-07T17:47:43.825340Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:43.825425Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:43.830401Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:44.004209Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:44.018019Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:44.023072Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.053946Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:44.076119Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:44.118078Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.072532Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677837579844769:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:47.072661Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:47.093204Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.219195Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:47.245074Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:47.245159Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:47.256501Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:47.326079Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.473676Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first ... adata/script_executions 2026-01-07T17:47:59.316785Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:59.361619Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:59.361720Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:59.366001Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:59.422511Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:59.607494Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:59.624380Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:59.629865Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:59.702724Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:59.772528Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.499890Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677903337331639:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:03.500010Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:03.517645Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.602107Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:03.643570Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:03.643666Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:03.651947Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:03.760399Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.895650Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:03.916061Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:48:03.925511Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.996420Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.046693Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.993797Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677922686109540:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:07.993858Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:08.014398Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:08.090670Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:08.128860Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:08.128985Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:08.137760Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:08.222953Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:08.387815Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:08.412167Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.473354Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.524087Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.459507Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677941809209592:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:12.459568Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:12.473995Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.554011Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:12.562294Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677941809209564:2081] 1767808092458602 != 1767808092458605 2026-01-07T17:48:12.584831Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:12.584969Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:12.605689Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:12.771524Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.855896Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:12.876377Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.937171Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.989023Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPartitionTests::IncorrectRange [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2026-01-07T17:42:32.210389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:32.210434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:32.409899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:33.435187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:42:33.643120Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:33.643635Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:33.644258Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12423268679096612232 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:33.740898Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:33.741416Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:33.741685Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11481380516942953513 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:33.744750Z node 2 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-07T17:42:33.788337Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:33.788845Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:33.789173Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5238637848760970896 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:42:33.844851Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:42:33.845378Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:42:33.845668Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0018bf/r3tmp/tmp6o2I9E/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2951574327339373713 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceI ... 2026-01-07T17:47:05.575635Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:05.575990Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:11.810083Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:11.810485Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:18.100385Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:18.100725Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:18.227754Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:18.228228Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:24.171978Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:24.172560Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:30.384905Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:30.385307Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:37.117230Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:37.117600Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:43.558549Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:43.558935Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:50.180198Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:50.180590Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:56.696093Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:47:56.696426Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:48:03.140435Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:48:03.140771Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:48:09.534463Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:48:09.534701Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:48:16.000222Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-07T17:48:16.000569Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TPartitionTests::ReserveSubDomainOutOfSpace >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> PQCountersSimple::PartitionLevelMetrics [GOOD] >> PQCountersSimple::PartitionWriteQuota >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess >> TPartitionTests::DataTxCalcPredicateOk >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> TSentinelTests::PDiskFaultyGuardWithForced [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet >> TPQTest::DirectReadBadSessionOrPipe >> TPartitionTests::NonConflictingCommitsBatch >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile >> TPartitionTests::TestNonConflictingActsBatchOk >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] >> DataShardWrite::UncommittedUpdateLockMissingRow [GOOD] >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::LocksBrokenStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskFaultyGuardWithForced [GOOD] Test command err: 2026-01-07T17:48:11.639393Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-07T17:48:11.639470Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-07T17:48:11.639545Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:48:11.639575Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-07T17:48:11.639631Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-07T17:48:11.639707Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-07T17:48:11.643104Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-07T17:48:11.648272Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... oupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1000 } } Group { GroupId: 13 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1001 } } Group { GroupId: 14 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1002 } } Group { GroupId: 15 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1003 } } Group { GroupId: 16 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1000 } } Group { GroupId: 17 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1001 } } Group { GroupId: 18 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1002 } } Group { GroupId: 19 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1003 } } Group { GroupId: 20 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1000 } } Group { GroupId: 21 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1001 } } Group { GroupId: 22 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1002 } } Group { GroupId: 23 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1003 } } Group { GroupId: 24 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1000 } } Group { GroupId: 25 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1001 } } Group { GroupId: 26 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1002 } } Group { GroupId: 27 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1003 } } Group { GroupId: 28 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1000 } } Group { GroupId: 29 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1001 } } Group { GroupId: 30 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1002 } } Group { GroupId: 31 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1003 } } Group { GroupId: 32 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1000 } } Group { GroupId: 33 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1001 } } Group { GroupId: 34 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1002 } } Group { GroupId: 35 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1003 } } Group { GroupId: 36 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1000 } } Group { GroupId: 37 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1001 } } Group { GroupId: 38 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1002 } } Group { GroupId: 39 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1003 } } Group { GroupId: 40 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1000 } } Group { GroupId: 41 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1001 } } Group { GroupId: 42 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1002 } } Group { GroupId: 43 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1003 } } Group { GroupId: 44 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1000 } } Group { GroupId: 45 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1001 } } Group { GroupId: 46 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1002 } } Group { GroupId: 47 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1003 } } Group { GroupId: 48 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1000 } } Group { GroupId: 49 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1001 } } Group { GroupId: 50 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1002 } } Group { GroupId: 51 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1003 } } Group { GroupId: 52 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1000 } } Group { GroupId: 53 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1001 } } Group { GroupId: 54 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1002 } } Group { GroupId: 55 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1003 } } Group { GroupId: 56 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1000 } } Group { GroupId: 57 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1001 } } Group { GroupId: 58 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1002 } } Group { GroupId: 59 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1003 } } Group { GroupId: 60 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1000 } } Group { GroupId: 61 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1001 } } Group { GroupId: 62 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1002 } } Group { GroupId: 63 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1003 } } Group { GroupId: 64 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1000 } } Group { GroupId: 65 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1001 } } Group { GroupId: 66 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1002 } } Group { GroupId: 67 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1003 } } Group { GroupId: 68 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1000 } } Group { GroupId: 69 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1001 } } Group { GroupId: 70 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1002 } } Group { GroupId: 71 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1003 } } Group { GroupId: 72 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1000 } } Group { GroupId: 73 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1001 } } Group { GroupId: 74 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1002 } } Group { GroupId: 75 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1003 } } Group { GroupId: 76 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1000 } } Group { GroupId: 77 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1001 } } Group { GroupId: 78 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1002 } } Group { GroupId: 79 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1003 } } } } Success: true 2026-01-07T17:48:19.065583Z node 13 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.000000s 2026-01-07T17:48:19.065622Z node 13 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:48:19.065739Z node 13 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-07T17:48:19.065787Z node 13 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-07T17:48:19.066179Z node 13 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 130 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-130.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 131 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-131.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 132 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-132.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 133 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-133.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 134 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-134.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 135 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-135.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 136 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-136.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 137 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-137.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 138 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-138.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 139 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-139.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:48:19.066480Z node 13 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 140 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-140.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 141 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-141.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 142 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-142.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 143 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-143.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 144 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-144.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 145 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-145.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 146 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-146.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 147 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-147.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 148 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-148.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 149 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-149.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-07T17:48:19.066547Z node 13 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] Test command err: 2026-01-07T17:48:17.867686Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.938703Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:17.942719Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:17.943128Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:17.943194Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:17.943227Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:17.943311Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:17.943375Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:17.943437Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:17.962445Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-07T17:48:17.962620Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:17.983210Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:17.986500Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:17.986662Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:17.988340Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:17.988527Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:17.988622Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:17.989371Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:17.989840Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:48:17.990892Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:17.990952Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:48:17.990996Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:48:17.991051Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:17.991122Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:17.991693Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:17.991744Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:17.991828Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:17.991897Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:17.991958Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:17.992013Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:17.992110Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:48:17.992158Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:48:17.992202Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:17.992243Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:17.992285Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:17.992544Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:17.992637Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:17.992842Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:17.993106Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:192:2142] 2026-01-07T17:48:17.993896Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:17.993940Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-07T17:48:17.993976Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:192:2142] 2026-01-07T17:48:17.994018Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:17.994063Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:17.994465Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-07T17:48:17.994502Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:48:17.994532Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:17.994576Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:17.994603Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:17.994636Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:17.994684Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-07T17:48:17.994727Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-07T17:48:17.994771Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:17.994804Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:48:17.994834Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:48:17.995041Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:17.995108Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:17.995279Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:17.995546Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:17.995804Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:17.995948Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:48:18.001425Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... :2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:20.099699Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:20.099729Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:20.099948Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3488: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2026-01-07T17:48:20.099983Z node 6 :PQ_TX DEBUG: transaction.cpp:244: [TxId: 67890] Handle TEvTxCalcPredicateResult 2026-01-07T17:48:20.100021Z node 6 :PQ_TX DEBUG: transaction.cpp:301: [TxId: 67890] Partition responses 1/1 2026-01-07T17:48:20.100062Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2026-01-07T17:48:20.100092Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2026-01-07T17:48:20.100122Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2026-01-07T17:48:20.100153Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4482: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-07T17:48:20.100182Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2026-01-07T17:48:20.100218Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2026-01-07T17:48:20.100253Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-07T17:48:20.100290Z node 6 :PQ_TX INFO: pq_impl.cpp:4023: [PQ: 72057594037927937] Send TEvReadSet to tablet 72057594037950158 tx 67890 2026-01-07T17:48:20.100364Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-07T17:48:20.100447Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:20.101155Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-07T17:48:20.101205Z node 6 :PQ_TX INFO: pq_impl.cpp:2842: [PQ: 72057594037927937] Assume tablet 72057594037950158 dead, sending read set acks for tx 67890 2026-01-07T17:48:20.101234Z node 6 :PQ_TX DEBUG: transaction.cpp:358: [TxId: 67890] Predicate acks 1/1 2026-01-07T17:48:20.101288Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-07T17:48:20.101319Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-07T17:48:20.101351Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-07T17:48:20.101394Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-07T17:48:20.101427Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4382: [PQ: 72057594037927937] TxId 67890 status has not changed 2026-01-07T17:48:20.101487Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-07T17:48:20.101519Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2822: [PQ: 72057594037927937] Connected to tablet 72057594037950158 2026-01-07T17:48:20.101598Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:238:2230], now have 1 active actors on pipe 2026-01-07T17:48:20.101693Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 72057594037950158 TabletDest: 72057594037927937 TabletProducer: 72057594037950158 ReadSet: "\010\001" Seqno: 0 2026-01-07T17:48:20.101728Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 72057594037950158 2026-01-07T17:48:20.101758Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-07T17:48:20.101790Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 1/1 2026-01-07T17:48:20.101817Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-07T17:48:20.101845Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-07T17:48:20.101876Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-07T17:48:20.101905Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-07T17:48:20.101967Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-07T17:48:20.102003Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-07T17:48:20.102132Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 72057594037950158 2026-01-07T17:48:20.102366Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-07T17:48:20.102403Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:20.103467Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:20.103556Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:20.103597Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:20.103626Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:20.103654Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:20.103715Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:48:20.103777Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 67890 2026-01-07T17:48:20.103807Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-07T17:48:20.103837Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:20.103866Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:20.103899Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:20.104092Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:20.104145Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:20.105547Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:20.105631Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:20.105663Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:20.105686Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:20.105708Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:20.105732Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:20.105753Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:20.105782Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:20.105826Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-07T17:48:20.105860Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-07T17:48:20.105892Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-07T17:48:20.105923Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-07T17:48:20.105961Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-07T17:48:20.105992Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-07T17:48:20.106022Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-07T17:48:20.106057Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-07T17:48:20.106106Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-07T17:48:20.106147Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 72057594037950158 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:48:20.106182Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-07T17:48:20.106212Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-07T17:48:20.106243Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 1/1 2026-01-07T17:48:20.106266Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-07T17:48:20.106297Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 1/1 2026-01-07T17:48:20.106327Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-07T17:48:20.106358Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-07T17:48:20.106392Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-07T17:48:20.106462Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:48:20.107673Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:48:20.107724Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-07T17:48:20.107758Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-07T17:48:20.107788Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetPartitionWriteInfoError >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPartitionTests::ConflictingCommitFails >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> TPartitionTests::GetPartitionWriteInfoError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::DirectReadBadSessionOrPipe [GOOD] Test command err: 2026-01-07T17:48:18.106841Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.159343Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:18.162238Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:18.162464Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:18.162522Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:18.162591Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:18.162638Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:18.162674Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:18.162732Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:18.175320Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-07T17:48:18.175420Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:18.190114Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:18.192335Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:18.192434Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:18.193582Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:18.193725Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:18.193781Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:18.194165Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:18.194471Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:48:18.195119Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:18.195153Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:48:18.195181Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:48:18.195247Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:18.195291Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:18.195654Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:18.195684Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:18.195711Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:18.195760Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:18.195790Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:18.195829Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:18.195902Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:48:18.195936Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:48:18.195964Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:18.195996Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:18.196028Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:18.196195Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:18.196259Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:18.196379Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:18.196521Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:192:2142] 2026-01-07T17:48:18.196995Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:18.197031Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-07T17:48:18.197053Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:192:2142] 2026-01-07T17:48:18.197077Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:18.197103Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:18.197478Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-07T17:48:18.197512Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:48:18.197540Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:18.197570Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:18.197596Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:18.197630Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:18.197662Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-07T17:48:18.197688Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-07T17:48:18.197715Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:18.197743Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:48:18.197766Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:48:18.197855Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:18.197901Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:18.198009Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:18.198128Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:18.198276Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:18.198379Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:48:18.201451Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... w have 1 active actors on pipe 2026-01-07T17:48:20.956205Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:48:20.956242Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:48:20.956284Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with empty session id 2026-01-07T17:48:20.956357Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 6, Error Read prepare request with empty session id 2026-01-07T17:48:20.956390Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with empty session id Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "bad-session" Offset: 0 Count: 1 Bytes: 99999 DirectReadId: 1 PartitionSessionId: 1 } PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 } via pipe: [8:179:2192] 2026-01-07T17:48:20.956781Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:225:2225], now have 1 active actors on pipe 2026-01-07T17:48:20.956891Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:48:20.956944Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:48:20.956985Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id bad-session 2026-01-07T17:48:20.957060Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 7, Error Read prepare request with unknown(old?) session id bad-session 2026-01-07T17:48:20.957091Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id bad-session 2026-01-07T17:48:20.957374Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:228:2227], now have 1 active actors on pipe 2026-01-07T17:48:20.957465Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:48:20.957498Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:48:20.957554Z node 8 :PERSQUEUE INFO: pq_impl.cpp:1702: [PQ: 72057594037927937] Got cmd delete session: ClientId: "user2" SessionId: "session2" 2026-01-07T17:48:20.957591Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72057594037927937] Destroy direct read session session2 2026-01-07T17:48:20.957658Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:20.957689Z node 8 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:20.957727Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:20.957757Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:20.957790Z node 8 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:20.957851Z node 8 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user2 session is set to 0 (startOffset 0) session session2 2026-01-07T17:48:20.957881Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:20.957910Z node 8 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:20.957943Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:20.958109Z node 8 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:20.960011Z node 8 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:20.960102Z node 8 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:20.960145Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:20.960176Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:20.960203Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:20.960234Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:20.960265Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:20.960305Z node 8 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:20.960368Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "session2" Offset: 0 Count: 1 Bytes: 99999 DirectReadId: 1 PartitionSessionId: 1 } PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 } via pipe: [8:179:2192] 2026-01-07T17:48:20.960705Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:234:2232], now have 1 active actors on pipe 2026-01-07T17:48:20.960817Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:48:20.960853Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:48:20.960896Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2 2026-01-07T17:48:20.960992Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 9, Error Read prepare request with unknown(old?) session id session2 2026-01-07T17:48:20.961023Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2 Publish read Send publish read request: Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-07T17:48:20.961377Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:237:2234], now have 1 active actors on pipe 2026-01-07T17:48:20.961454Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:48:20.961490Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:48:20.961582Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-07T17:48:20.961652Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 10, Error Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-07T17:48:20.961688Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } Got direct read response: Status: 128 ErrorReason: "Read prepare request with unknown(old?) session id session2Partition: 0\nPipeClient {\n RawX1: 214\n RawX2: 34359740585\n}\nCookie: 123\nCmdPublishRead {\n SessionKey {\n SessionId: \"session2\"\n PartitionSessionId: 1\n }\n DirectReadId: 1\n}\n" ErrorCode: BAD_REQUEST PartitionResponse { Cookie: 123 } Forget read Send forget read request: Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-07T17:48:20.962066Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:239:2236], now have 1 active actors on pipe 2026-01-07T17:48:20.962121Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:48:20.962154Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:48:20.962239Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-07T17:48:20.962299Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 11, Error Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-07T17:48:20.962330Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } Got direct read response: Status: 128 ErrorReason: "Read prepare request with unknown(old?) session id session2Partition: 0\nPipeClient {\n RawX1: 214\n RawX2: 34359740585\n}\nCookie: 123\nCmdForgetRead {\n SessionKey {\n SessionId: \"session2\"\n PartitionSessionId: 1\n }\n DirectReadId: 1\n}\n" ErrorCode: BAD_REQUEST PartitionResponse { Cookie: 123 } |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::FailedTxsDontBlock >> TKesusTest::TestAcquireSemaphoreTimeout >> TKesusTest::TestSessionTimeoutAfterDetach >> TKesusTest::TestAcquireLocks >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TLocksTest::SetBreakSetEraseBreak [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TPQTabletTests::ReadQuoter_ExclusiveLock [GOOD] >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> TPartitionTests::GetUsedStorage >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> TLocksTest::Range_BrokenLock3 [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ReadQuoter_ExclusiveLock [GOOD] Test command err: 2026-01-07T17:48:16.826199Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:16.891129Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:16.894703Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:16.894981Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:16.895037Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:16.895073Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:16.895123Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:16.895202Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:16.895258Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:16.912046Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-07T17:48:16.912198Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:16.936340Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:16.939121Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:16.939254Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:16.940190Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:16.940338Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:16.940732Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:16.941169Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:48:16.942130Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:16.942174Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:48:16.942210Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:48:16.942271Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:16.942356Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:16.942918Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:16.942961Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:16.942997Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:16.943048Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:16.943100Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:16.943143Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:16.943222Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:48:16.943287Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:48:16.943319Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:16.943350Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:16.943387Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:16.943623Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:16.943693Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:16.943863Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:16.944059Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:16.946672Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:16.946775Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:16.946823Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:16.946874Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:16.946913Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:16.946960Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:16.946999Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:16.947039Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:16.947394Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:197:2203], now have 1 active actors on pipe 2026-01-07T17:48:16.947975Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:200:2205], now have 1 active actors on pipe 2026-01-07T17:48:16.948820Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 2 Data { Immediate: false } 2026-01-07T17:48:16.948889Z node 1 :PQ_TX WARN: pq_impl.cpp:3283: [PQ: 72057594037927937] TxId 2 empty list of operations 2026-01-07T17:48:16.948999Z node 1 :PQ_TX INFO: pq_impl.cpp:4801: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-07T17:48:17.202091Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.244279Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:17.247540Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:17.247806Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:17.247849Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:17.247881Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:17.247917Z node 2 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:17.247958Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:17.248007Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:17.262937Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [2:181:2193], now have 1 active actors on pipe 2026-01-07T17:48:17.263093Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:17.263339Z node 2 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicNam ... persist 2026-01-07T17:48:22.359996Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.360072Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.360102Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.360182Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.360214Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.380721Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.380777Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.380805Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.380836Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.380861Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.401400Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.401452Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.401480Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.401512Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.401548Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.422098Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.422157Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.422183Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.422207Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.422226Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.443727Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.443779Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.443809Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.443839Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.443865Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.455021Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:22.475863Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.475917Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.475952Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.475981Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.476008Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.496576Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.496641Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.496672Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.496707Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.496737Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.517314Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.517378Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.517407Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.517438Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.517464Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.538044Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.538102Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.538131Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.538165Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.538191Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.558741Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:22.569391Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.569453Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.569483Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.569516Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.569550Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.590156Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.590216Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.590242Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.590293Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.590321Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.610949Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.611007Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.611034Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.611065Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.611090Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.631706Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.631760Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.631787Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.631817Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.631842Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.652354Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.652425Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.652449Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.652471Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.652490Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.725144Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.725210Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.725238Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.725282Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.725309Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2026-01-07T17:47:57.674731Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677879072850595:2218];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:57.674783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:57.733724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:58.014512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:58.014621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:58.032766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:58.075280Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.122736Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677879072850414:2081] 1767808077646360 != 1767808077646363 2026-01-07T17:47:58.124285Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:58.332696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:58.337005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:58.346922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:58.406997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.528626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.567628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:58.694783Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:01.153794Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:48:01.154054Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.221538Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:01.221611Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:01.223344Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:01.232779Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677893641584147:2081] 1767808081094600 != 1767808081094603 2026-01-07T17:48:01.258388Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:01.357040Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:01.493462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:01.515115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:01.583295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:01.631041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.467592Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677906990992605:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:04.467666Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:04.480499Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:04.537020Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:04.539007Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677906990992580:2081] 1767808084467177 != 1767808084467180 2026-01-07T17:48:04.577594Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:04.577674Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:04.582561Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:04.724516Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:04.739757Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.778230Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:04.790790Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:04.829905Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.862971Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677922709926403:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:07.863033Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:07.874538Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:07.941513Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:07.981461Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:07.981545Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:07.989736Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:08.166381Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:08.195002Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:08.213768Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.264293Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.304364Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.213789Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592677939747775563:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:11.213846Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:11.269602Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:11.301624Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:11.342554Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:11.342648Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:11.349926Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:11.554227Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:11.558670Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:11.572767Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.618166Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.663095Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:14.581480Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592677952697155667:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:14.581681Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:14.599543Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:14.691230Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592677952697155636:2081] 1767808094579161 != 1767808094579164 2026-01-07T17:48:14.702724Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:14.712367Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:14.712469Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:14.742279Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:14.794818Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:14.993189Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:15.011042Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:15.076820Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:15.133296Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:18.351083Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592677968740654419:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:18.351190Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:18.365530Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:18.456075Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:18.498808Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:18.498913Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:18.506215Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:18.538531Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:18.722745Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:18.741569Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:18.795893Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:18.845360Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TKesusTest::TestUnregisterProxy >> TPartitionTests::GetUsedStorage [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianFaultyPDisks >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2026-01-07T17:48:12.254850Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:12.309883Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:12.309942Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:12.309992Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:12.310038Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:12.327019Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:12.344631Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:205:2218] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:12.345540Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:213:2166] 2026-01-07T17:48:12.347099Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:213:2166] 2026-01-07T17:48:12.348824Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:214:2166] 2026-01-07T17:48:12.349861Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:214:2166] 2026-01-07T17:48:12.356211Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:12.356557Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2c5fd3dd-40937409-d29c3a0b-841ff4fd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:12.362109Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:12.362363Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9561b7e1-3eea1b07-dd05975d-e15057d7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:12.367681Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:12.367942Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e57f3938-40f51ef0-90bccf12-436d8d5e_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:12.372644Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:12.372911Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ca4565a1-ece2e992-f86b3445-9df84470_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:12.374332Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:12.374606Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4c414ad5-84291900-bd5138e-8d938b3e_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 ACTUAL:
subsystem=SLI:
    Account=asdfgs:
        sensor=WriteBigLatency: 0
        sensor=WritesTotal: 5
    Account=total:
        sensor=WriteBigLatency: 0
        sensor=WritesTotal: 5
    sensor=Write:
        Account=asdfgs:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
        Account=total:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
subsystem=partitionWriteQuotaWait:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
    Account=total:
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
subsystem=readSession:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
    Account=total:
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
subsystem=readTimeLag:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=TimeLags:
                                    Interval=10000ms: 0
                                    Interval=1000ms: 0
                                    Interval=100ms: 0
                                    Interval=10ms: 0
                                    Interval=180000ms: 0
                                    Interval=2000ms: 0
                                    Interval=200ms: 0
                                    Interval=20ms: 0
                                    Interval=30000ms: 0
                                    Interval=5000ms: 0
                                    Interval=500ms: 0
                                    Interval=50ms: 0
                                    Interval=60000ms: 0
                                    Interval=9999999ms: 0
                    OriginDC=cluster:
                        Client=user:

...
[15:205:2218] txId 12345 config:
CacheSize: 10485760
PartitionConfig {
  MaxCountInPartition: 20000000
  MaxSizeInPartition: 104857600
  LifetimeSeconds: 0
  LowWatermark: 6291456
  SourceIdLifetimeSeconds: 3600
  MaxWriteInflightSize: 90000000
}
PartitionIds: 0
PartitionIds: 1
TopicName: "topic"
Version: 39
LocalDC: true
Topic: "topic"
TopicPath: "/topic"
YcCloudId: "somecloud"
YcFolderId: "somefolder"
YdbDatabaseId: "PQ"
YdbDatabasePath: "/Root/PQ"
Partitions {
  PartitionId: 0
}
Partitions {
  PartitionId: 1
}
FederationAccount: "federationAccount"
MeteringMode: METERING_MODE_RESERVED_CAPACITY
AllPartitions {
  PartitionId: 0
}
AllPartitions {
  PartitionId: 1
}
Consumers {
  Name: "user"
  ReadFromTimestampsMs: 0
  Generation: 39
}
2026-01-07T17:48:21.357316Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [15:213:2166]
2026-01-07T17:48:21.358097Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [15:213:2166]
2026-01-07T17:48:21.358830Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [15:214:2166]
2026-01-07T17:48:21.359373Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [15:214:2166]
2026-01-07T17:48:21.363604Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId:  error: new GetOwnership request needed for owner 
2026-01-07T17:48:21.363901Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a039c1b1-97681db7-30dcc37f-34458f87_0 generated for partition 0 topic 'topic' owner default
2026-01-07T17:48:21.369201Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-07T17:48:21.369510Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2c88fd17-a220501e-eeb194fd-9e2e60e5_1 generated for partition 0 topic 'topic' owner default
2026-01-07T17:48:21.374439Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-07T17:48:21.374752Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8bafd8e-a51c9ec5-43d8382b-5e5fbc1b_2 generated for partition 0 topic 'topic' owner default
2026-01-07T17:48:21.380119Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-07T17:48:21.380449Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ce2ed442-112485fe-bb73672e-ec63ea8_3 generated for partition 0 topic 'topic' owner default
Got start offset = 0
COUNTERS: 
subsystem=SLI:
    Account=federationAccount:
        name=WriteBigLatency: 0
        name=WritesTotal: 5
    Account=total:
        name=WriteBigLatency: 0
        name=WritesTotal: 5
    sensor=Write:
        Account=federationAccount:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
        Account=total:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
2026-01-07T17:48:21.668812Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:21.722857Z node 16 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:21.722928Z node 16 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:21.722987Z node 16 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:21.723055Z node 16 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:21.743653Z node 16 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:21.744378Z node 16 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 40 actor [16:206:2218] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 40 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 40 } 2026-01-07T17:48:21.745048Z node 16 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [16:215:2166] 2026-01-07T17:48:21.748546Z node 16 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [16:215:2166] 2026-01-07T17:48:21.754461Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:21.754880Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3a266042-4d3c0283-7b928aad-b4ab682b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:231:2166] Captured TEvRequest, cmd write size: 4 2026-01-07T17:48:21.764225Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:21.764579Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f9ac459c-85b297d-919c44a4-d538f63c_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:21.775687Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:21.806751Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:21.827580Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:21.858625Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:21.889625Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:21.910447Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:21.972238Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:22.033988Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:231:2166] Captured TEvRequest, cmd write size: 4 2026-01-07T17:48:22.113080Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:22.113380Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8ceef1c3-def63102-624c5b68-ed959367_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:22.175545Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:22.350484Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:231:2166] Captured TEvRequest, cmd write size: 4 2026-01-07T17:48:22.389759Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:22.390135Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5d7718f0-6342e1c-76e90f3-1b1e9e72_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:22.442266Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:22.627687Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:231:2166] Captured TEvRequest, cmd write size: 4 2026-01-07T17:48:22.639688Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:22.640027Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|13d801d2-cd0d96d1-6918f611-e7bd4ce2_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:231:2166] Captured TEvRequest, cmd write size: 4 2026-01-07T17:48:22.884424Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:22.884778Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ba243f2b-2430f9b0-563ebe8-4a10d578_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:22.947779Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:22.958369Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:231:2166] Captured TEvRequest, cmd write size: 4 Got start offset = 0 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> TLocksTest::Range_EmptyKey [GOOD] >> TLocksTest::Range_IncorrectDot2 [GOOD] >> DataShardTxOrder::DelayData [GOOD] >> DataShardOutOfOrder::LocksBrokenStats [GOOD] >> TSentinelBaseTests::GuardianFaultyPDisks [GOOD] >> TSentinelBaseTests::GuardianRackRatio [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2026-01-07T17:48:16.826271Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:16.892823Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:16.892908Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:16.893012Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:16.893079Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:16.917414Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:183:2196] 2026-01-07T17:48:16.919144Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:16.000000Z 2026-01-07T17:48:16.919394Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:183:2196] 2026-01-07T17:48:16.940638Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:16.985155Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.006200Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.016790Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.061186Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.102709Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.133719Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\256\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:17.288176Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\256\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:17.310272Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\256\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:17.353390Z node 1 :PERSQUEUE WARN: partition.cpp:3781: [72057594037927937][Partition][0][StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\256\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:17.843241Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.877551Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:17.877597Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:17.877633Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:17.877672Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:17.890213Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:182:2194] 2026-01-07T17:48:17.891714Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:17.000000Z 2026-01-07T17:48:17.891907Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:182:2194] 2026-01-07T17:48:17.902469Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.933542Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.954339Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:17.987273Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.018207Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.049206Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.121011Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.151892Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.615209Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.656597Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:18.656635Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:18.656674Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:18.656716Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:18.669542Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [3:181:2194] 2026-01-07T17:48:18.671212Z node 3 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:18.000000Z 2026-01-07T17:48:18.671427Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:181:2194] 2026-01-07T17:48:18.692441Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.733611Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.754412Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.764900Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.807342Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.848572Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.869302Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:19.014807Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.035497Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp ... :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:48:22.865673Z node 5 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:22.865713Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:48:22.865752Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:48:22.865779Z node 5 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:22.865801Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-07T17:48:22.865829Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-07T17:48:22.865860Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.865896Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-07T17:48:22.865923Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Wait kv request Wait kv request 2026-01-07T17:48:22.866311Z node 5 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 1, TxId 0 2026-01-07T17:48:22.866349Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:22.866387Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2026-01-07T17:48:22.866420Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:22.866450Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2026-01-07T17:48:22.866473Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:22.866501Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:22.886992Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:22.887055Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:22.887124Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:22.887166Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.887196Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-07T17:48:22.887227Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.887289Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:48:22.887329Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-07T17:48:22.887379Z node 5 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:22.887454Z node 5 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2026-01-07T17:48:22.887500Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:48:22.887542Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 3 2026-01-07T17:48:22.887599Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 3 2026-01-07T17:48:22.887640Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:48:22.887667Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 4 2026-01-07T17:48:22.887686Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 4 2026-01-07T17:48:22.887709Z node 5 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:22.887739Z node 5 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2026-01-07T17:48:22.887763Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-07T17:48:22.902759Z node 5 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2026-01-07T17:48:22.902844Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 6 2026-01-07T17:48:22.902879Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (6) 2026-01-07T17:48:22.902946Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.903179Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 6 Got KV request Got KV request Wait tx rollback for tx 0 2026-01-07T17:48:22.903373Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:22.923949Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:22.924030Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:22.924206Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:22.924254Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:22.924292Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.924327Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:22.924363Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:22.924398Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:22.924440Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait tx committed for tx 3 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2026-01-07T17:48:23.154933Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:23.199113Z node 6 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:23.199179Z node 6 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:23.199226Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:23.199278Z node 6 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:23.215900Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [6:184:2196] 2026-01-07T17:48:23.217830Z node 6 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:23.000000Z 2026-01-07T17:48:23.217900Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [6:184:2196] 2026-01-07T17:48:23.228464Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:23.259520Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:23.280274Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:23.311329Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:23.342419Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:23.363213Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:23.414702Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:23.486572Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> TPartitionTests::DataTxCalcPredicateOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2026-01-07T17:47:45.771099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677827625747058:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.771200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:45.975243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:45.975312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.975323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:45.977123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:46.073067Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677827625747031:2081] 1767808065769290 != 1767808065769293 2026-01-07T17:47:46.075118Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:46.237610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.282856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:46.289738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:46.388054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.517868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.563944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:49.015059Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677845614484688:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:49.015591Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:49.026759Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:49.101554Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:49.103730Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677845614484557:2081] 1767808069011812 != 1767808069011815 2026-01-07T17:47:49.112512Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:49.112783Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:49.112850Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:49.138868Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:49.315043Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:49.371465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:49.381423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:49.393474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:49.461704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:49.506586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:52.532461Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677857016653769:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:52.532509Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:52.555797Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:52.645988Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:52.656501Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:52.656582Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:52.680723Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:52.806818Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:52.879058Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:52.901273Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:52.963784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.008640Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:55.886066Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677870105718921:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:55.886112Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:55.902041Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:55.961403Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:55.997841Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:55.997919Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:56.002651Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:56.118374Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.200061Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:56.2042 ... State: Unknown -> Disconnected 2026-01-07T17:48:06.900683Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:06.903436Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:07.027529Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:07.145687Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:07.164081Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.222099Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.272332Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.138128Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677939610712274:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:11.138183Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:11.152307Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:11.224763Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:11.273068Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:11.273177Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:11.276573Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:11.377256Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:11.483465Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:11.503750Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.558800Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.608320Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:14.951739Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677951598039687:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:14.951791Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:14.964173Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:15.068231Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:15.070448Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677951598039654:2081] 1767808094950516 != 1767808094950519 2026-01-07T17:48:15.084358Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:15.085799Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:15.085919Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:15.088583Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:15.270972Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:15.338984Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:15.354506Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:48:15.360296Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:15.431002Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:15.487088Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:19.300243Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677971461575948:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:19.300304Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:19.322579Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:19.388079Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:19.389571Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677971461575921:2081] 1767808099299544 != 1767808099299547 2026-01-07T17:48:19.424227Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:19.424328Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:19.430428Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:19.509108Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:19.628637Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:19.644555Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:19.699646Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:19.751371Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TPartitionTests::DataTxCalcPredicateError |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::GuardianRackRatio [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TKesusTest::TestRegisterProxy >> TKesusTest::TestQuoterAccountResourcesBurst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2026-01-07T17:48:13.334866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:13.334941Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:13.335390Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:48:13.346361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:48:13.346695Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:48:13.346930Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:48:13.356503Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:48:13.397746Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:48:13.398212Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:48:13.399493Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:48:13.399546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:48:13.399579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:48:13.399809Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:48:13.399948Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:48:13.400012Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:48:13.467729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:13.499701Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:48:13.499890Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:13.499990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:48:13.500038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:48:13.500074Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:48:13.500104Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:13.500245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:13.500299Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:13.500571Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:48:13.500676Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:48:13.500797Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:13.500832Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:13.500874Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:48:13.500904Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:48:13.500963Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:48:13.500996Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:48:13.501031Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:13.501107Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:13.501154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:13.501212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:48:13.510893Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:48:13.510984Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:13.511083Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:13.511258Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:48:13.511326Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:48:13.511377Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:48:13.511423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:48:13.511464Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:48:13.511494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:48:13.511522Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:13.511778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:13.511809Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:48:13.511850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:48:13.511883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:13.511946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:48:13.511972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:48:13.511998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:48:13.512026Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:13.512053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:13.523948Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:48:13.524013Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:13.524040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:13.524093Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:48:13.524144Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:48:13.524590Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:13.524641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:13.524680Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:48:13.524806Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:48:13.524834Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:48:13.524972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:13.525008Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:48:13.525040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:48:13.525069Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:48:13.528517Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:48:13.528572Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:13.528732Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:13.528774Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:13.528818Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:13.528851Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:48:13.528883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:13.528941Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:48:13.528978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... plan for [1000005:506] at 9437184 has finished 2026-01-07T17:48:23.828117Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:48:23.828148Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:23.828176Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2026-01-07T17:48:23.828444Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:240:2232], Recipient [1:240:2232]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:23.828478Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:23.828506Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:23.828525Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:48:23.828559Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:282: Return cached ready operation [1000005:507] at 9437184 2026-01-07T17:48:23.828586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2026-01-07T17:48:23.828605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.828621Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2026-01-07T17:48:23.828640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2026-01-07T17:48:23.828655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2026-01-07T17:48:23.829019Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2026-01-07T17:48:23.829051Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.829073Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2026-01-07T17:48:23.829093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-07T17:48:23.829107Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2026-01-07T17:48:23.829126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.829139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-07T17:48:23.829149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:23.829160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:48:23.829187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:507] is the new logically complete end at 9437184 2026-01-07T17:48:23.829202Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:507] is the new logically incomplete end at 9437184 2026-01-07T17:48:23.829217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:507] at 9437184 2026-01-07T17:48:23.829246Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.829268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:23.829294Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2026-01-07T17:48:23.829306Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2026-01-07T17:48:23.829337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.829352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2026-01-07T17:48:23.829370Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2026-01-07T17:48:23.829381Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2026-01-07T17:48:23.829395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.829408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2026-01-07T17:48:23.829419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2026-01-07T17:48:23.829430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2026-01-07T17:48:23.829465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.829479Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2026-01-07T17:48:23.829493Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2026-01-07T17:48:23.829505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2026-01-07T17:48:23.829516Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.829527Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2026-01-07T17:48:23.829537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:48:23.829549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BlockFailPoint 2026-01-07T17:48:23.829561Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.829572Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:48:23.829595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:48:23.829617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2026-01-07T17:48:23.829822Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2026-01-07T17:48:23.829853Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:48:23.829879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:48:23.829895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:48:23.829921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2026-01-07T17:48:23.829944Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2026-01-07T17:48:23.830026Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is DelayComplete 2026-01-07T17:48:23.830041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2026-01-07T17:48:23.830075Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2026-01-07T17:48:23.830096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2026-01-07T17:48:23.830116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-07T17:48:23.830128Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2026-01-07T17:48:23.830141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:507] at 9437184 has finished 2026-01-07T17:48:23.830168Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:23.830193Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:23.830216Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:48:23.830241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:48:23.843565Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2026-01-07T17:48:23.843621Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2026-01-07T17:48:23.843688Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:23.843731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2026-01-07T17:48:23.843787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:23.843834Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:23.843963Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:23.843979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2026-01-07T17:48:23.844007Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:23.844037Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TLocksTest::BrokenSameShardLock [GOOD] >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> DqUnboxedValueToNativeArrowConversion::Struct [GOOD] >> DqUnboxedValueToNativeArrowConversion::Tuple >> DqOutputWideChannelTests::SingleRead [GOOD] >> DqOutputWideChannelTests::PartialRead [GOOD] >> DqOutputWideChannelTests::PopAll [GOOD] >> DqOutputWideChannelWithStorageTests::Spill [GOOD] >> DqOutputWideChannelWithStorageTests::Overflow [GOOD] >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:48:17.544424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:17.643088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:48:17.666341Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:48:17.666914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:48:17.666979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:48:17.951849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:17.952005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:18.016345Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808095216055 != 1767808095216059 2026-01-07T17:48:18.027325Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:18.071602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:18.158658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:48:18.446035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:18.459350Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:18.566863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:18.599779Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:48:18.600792Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:48:18.601118Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:48:18.601380Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:48:18.636155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:48:18.636723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:48:18.636823Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:48:18.638081Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:48:18.638145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:48:18.638195Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:48:18.638438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:48:18.638537Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:48:18.638595Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:48:18.649332Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:18.688376Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:48:18.688542Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:18.688617Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:48:18.688641Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:48:18.688668Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:48:18.688700Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:48:18.688865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:18.688913Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:18.689161Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:48:18.689257Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:48:18.689353Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:48:18.689379Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:18.689414Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:48:18.689438Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:48:18.689463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:48:18.689488Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:48:18.689532Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:48:18.689618Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:18.689653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:18.689708Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:48:18.689991Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:48:18.690028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:18.690121Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:48:18.690271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:48:18.690320Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:48:18.690389Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:48:18.690430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:48:18.690458Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:48:18.690486Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:48:18.690507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:48:18.690721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:18.690747Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:48:18.690782Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:48:18.690805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:48:18.690845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:48:18.690864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:48:18.690895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:48:18.690927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:48:18.690950Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:18.691961Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:48:18.691992Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:48:18.702828Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 419Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-07T17:48:24.048439Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2026-01-07T17:48:24.048457Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:24.048473Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:48:24.048497Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-07T17:48:24.048526Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715664] at 72075186224037888 2026-01-07T17:48:24.048547Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-07T17:48:24.048560Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:24.048574Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T17:48:24.048586Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BlockFailPoint 2026-01-07T17:48:24.048600Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-07T17:48:24.048614Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T17:48:24.048627Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-07T17:48:24.048639Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-07T17:48:24.048669Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-07T17:48:24.048737Z node 2 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2026-01-07T17:48:24.048834Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-07T17:48:24.048879Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-07T17:48:24.048895Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-07T17:48:24.048910Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:48:24.048941Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2026-01-07T17:48:24.048978Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-07T17:48:24.049018Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2026-01-07T17:48:24.049036Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:48:24.049063Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:48:24.049089Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:48:24.049116Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-07T17:48:24.049131Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:48:24.049145Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2026-01-07T17:48:24.049186Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:48:24.049210Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2026-01-07T17:48:24.049238Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:48:24.050178Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2026-01-07T17:48:24.050399Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1118:2929], Recipient [2:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:24.050420Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:24.050454Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1117:2928], serverId# [2:1118:2929], sessionId# [0:0:0] 2026-01-07T17:48:24.050501Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [2:1116:2927], Recipient [2:886:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-07T17:48:24.052155Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1121:2932], Recipient [2:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:24.052193Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:24.052221Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1120:2931], serverId# [2:1121:2932], sessionId# [0:0:0] 2026-01-07T17:48:24.052338Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1119:2930], Recipient [2:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-07T17:48:24.052412Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:48:24.052465Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-07T17:48:24.052495Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2026-01-07T17:48:24.052534Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-07T17:48:24.052592Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:48:24.052620Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:48:24.052644Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:24.052669Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:48:24.052696Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-07T17:48:24.052718Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:48:24.052735Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:24.052750Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:48:24.052763Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:48:24.052810Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-07T17:48:24.053005Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1119:2930], 1000} after executionsCount# 1 2026-01-07T17:48:24.053052Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1119:2930], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:24.053102Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1119:2930], 1000} finished in read 2026-01-07T17:48:24.053140Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:48:24.053155Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:48:24.053169Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:48:24.053185Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:48:24.053209Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:48:24.053222Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:48:24.053236Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-07T17:48:24.053263Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:48:24.053332Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DqUnboxedValueToNativeArrowConversion::Tuple [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalListOfOptional [GOOD] >> DqUnboxedValueToNativeArrowConversion::VariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2026-01-07T17:48:18.970621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:18.970662Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:18.970965Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:48:18.979235Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:48:18.979493Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:48:18.979685Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:48:18.989369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:48:19.030244Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:48:19.030669Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:48:19.032137Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:48:19.032205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:48:19.032275Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:48:19.032630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:48:19.032817Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:48:19.032879Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:48:19.097084Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:19.121349Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:48:19.121498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:19.121568Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:48:19.121603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:48:19.121628Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:48:19.121662Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:19.121770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:19.121820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:19.122016Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:48:19.122090Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:48:19.122173Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:19.122216Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:19.122250Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:48:19.122276Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:48:19.122307Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:48:19.122330Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:48:19.122356Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:19.122425Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:19.122451Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:19.122499Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:48:19.127556Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:48:19.127616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:19.127689Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:19.127796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:48:19.127847Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:48:19.127886Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:48:19.127923Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:48:19.127946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:48:19.127970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:48:19.127994Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:19.128187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:19.128215Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:48:19.128241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:48:19.128262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:19.128308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:48:19.128329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:48:19.128349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:48:19.128369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:19.128388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:19.140026Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:48:19.140076Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:19.140098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:19.140141Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:48:19.140182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:48:19.140519Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:19.140559Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:19.140600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:48:19.140680Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:48:19.140699Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:48:19.140803Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:19.140829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:48:19.140851Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:48:19.140877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:48:19.143223Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:48:19.143276Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:19.143417Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:19.143440Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:19.143478Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:19.143509Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:48:19.143529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:19.143557Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:48:19.143583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 48:24.283666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompleteOperation 2026-01-07T17:48:24.283832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is DelayComplete 2026-01-07T17:48:24.283865Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompleteOperation 2026-01-07T17:48:24.283889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:154] at 9437184 to execution unit CompletedOperations 2026-01-07T17:48:24.283924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompletedOperations 2026-01-07T17:48:24.283957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is Executed 2026-01-07T17:48:24.283980Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompletedOperations 2026-01-07T17:48:24.284019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:154] at 9437184 has finished 2026-01-07T17:48:24.284068Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:24.284099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:24.284129Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:48:24.284155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:48:24.302746Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:24.302790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2026-01-07T17:48:24.302836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-07T17:48:24.302889Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-07T17:48:24.302922Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:24.303048Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:24.303081Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2026-01-07T17:48:24.303117Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:24.303150Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:24.303211Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:24.303227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2026-01-07T17:48:24.303257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-07T17:48:24.303282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-07T17:48:24.303297Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:24.303358Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:24.303389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-07T17:48:24.303424Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-07T17:48:24.303449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:48:24.303464Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:24.303525Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:24.303539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2026-01-07T17:48:24.303557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T17:48:24.303577Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:24.303640Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:24.303656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-07T17:48:24.303677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-07T17:48:24.303711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:48:24.303733Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:24.303850Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-07T17:48:24.303875Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:24.303898Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-07T17:48:24.303963Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:24.303979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-07T17:48:24.304009Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-07T17:48:24.304043Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:48:24.304062Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:24.304124Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-07T17:48:24.304144Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-07T17:48:24.304175Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-07T17:48:24.304198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-07T17:48:24.304224Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:48:24.304237Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-07T17:48:24.304400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-07T17:48:24.304420Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:24.304438Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-07T17:48:24.304492Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-07T17:48:24.304522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:24.304540Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-07T17:48:24.304592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-07T17:48:24.304606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:24.304621Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-01-07T17:48:24.304668Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-07T17:48:24.304693Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:24.304717Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-07T17:48:24.304770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:351:2318]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-07T17:48:24.304785Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:24.304798Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2026-01-07T17:47:46.525176Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677830421963606:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:46.527866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:46.861027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:46.933253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:46.933388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:46.946111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:46.947455Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677830421963503:2081] 1767808066519107 != 1767808066519110 2026-01-07T17:47:46.950217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:47.055459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.203178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:47.213947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:47.226383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:47.317275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.438221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.478023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.533722Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:49.938675Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677844506062599:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:49.938729Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:49.960577Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.044640Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:50.073558Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:50.073654Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:50.084596Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:50.139119Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.276311Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:50.292889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.360455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.413172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.556321Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:53.556724Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:53.650289Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:53.650375Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:53.662881Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677860005811307:2081] 1767808073511692 != 1767808073511695 2026-01-07T17:47:53.681846Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:53.695465Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:53.827214Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:53.882906Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:53.897251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.960125Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.020384Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.256294Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677877115220962:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:57.256353Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:57.281407Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.359408Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:57.365014Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677877115220921:2081] 1767808077254447 != 1767808077254450 2026-01-07T17:47:57.376671Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:57.376780Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:57.396936Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:57.545007Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.669647Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:57.677366Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompl ... -01-07T17:48:08.669452Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:08.689897Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:08.689994Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:08.705805Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:08.782931Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:08.918244Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:08.934345Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.994690Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.092323Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.368691Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677944951970123:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:12.368751Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:12.381073Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.456061Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:12.471608Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677944951970095:2081] 1767808092367875 != 1767808092367878 2026-01-07T17:48:12.491999Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:12.492092Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:12.495791Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:12.569667Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.681110Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:12.701373Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.752292Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.805002Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:16.508439Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677959182765062:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:16.508493Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:16.523879Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:16.597682Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:16.601043Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677959182765035:2081] 1767808096507537 != 1767808096507540 2026-01-07T17:48:16.643645Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:16.643742Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:16.670614Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:16.790089Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:16.878676Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:16.902570Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:16.969403Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:17.069934Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:20.481063Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677976849609542:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:20.481104Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:20.492653Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:20.573682Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:20.590218Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677976849609515:2081] 1767808100480577 != 1767808100480580 2026-01-07T17:48:20.608503Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:20.608581Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:20.611221Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:20.688285Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:20.838396Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:20.856612Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:20.918666Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.023849Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2026-01-07T17:47:45.187664Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677828633001643:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:45.192680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:45.494118Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.498992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:45.499103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:45.501189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:45.540232Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:45.541539Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677828633001607:2081] 1767808065182855 != 1767808065182858 2026-01-07T17:47:45.733856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:45.772862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:45.799350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:45.972790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.032320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:46.192723Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:48.586700Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677837799118835:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:48.588134Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:48.610701Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.672263Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:48.672276Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677837799118729:2081] 1767808068578312 != 1767808068578315 2026-01-07T17:47:48.684652Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:48.684751Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:48.699594Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:48.805866Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:48.918001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:48.934712Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:48.996489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:49.040157Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:51.994051Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677852626123824:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:51.994733Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:52.004470Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:52.097508Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:52.129976Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:52.130055Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:52.139424Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:52.196263Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:52.313416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:52.332068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:52.396076Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:52.454254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:55.730035Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677870428511996:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:55.734724Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:55.751175Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:55.837668Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:55.875170Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:55.875258Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:55.886989Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:55.957383Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.096948Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:56.102366Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:56.118594Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called a ... tion cookie mismatch for subscription [7:7592677919424559696:2081] 1767808087297490 != 1767808087297493 2026-01-07T17:48:07.428000Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:07.428095Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:07.435150Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:07.476473Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:07.609920Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:07.625870Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.684911Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:07.733912Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:11.630306Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677937239369232:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:11.630370Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:11.648322Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:11.733709Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:11.735349Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677937239369204:2081] 1767808091629248 != 1767808091629251 2026-01-07T17:48:11.779415Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:11.779517Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:11.781494Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:11.850501Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.018940Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:12.035271Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.091346Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.143216Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:15.974805Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677954986186853:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:15.974862Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:15.995639Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:16.079739Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:16.092042Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677954986186826:2081] 1767808095974051 != 1767808095974054 2026-01-07T17:48:16.098512Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:16.098616Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:16.118628Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:16.192136Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:16.398004Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:16.419363Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:16.476763Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:16.560390Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:20.530891Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677975984550011:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:20.530963Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:20.542825Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:20.623989Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677975984549983:2081] 1767808100530087 != 1767808100530090 2026-01-07T17:48:20.636954Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:20.645136Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:20.645237Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:20.669087Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:20.835534Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:20.876023Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:20.898685Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:20.955753Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.007489Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TPartitionTests::TestTxBatchInFederation |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2026-01-07T17:47:46.817330Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677829429126435:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:46.817394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:46.844040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:47.105144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.112659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:47.112762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:47.159174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:47.224427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:47.311320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.431662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:47.437183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:47.447573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:47.505107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.687738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.737005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:47.810526Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:50.356527Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677849799065610:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:50.357138Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:50.367208Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:50.450661Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:50.452401Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677849799065508:2081] 1767808070349385 != 1767808070349388 2026-01-07T17:47:50.486396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:50.486477Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:50.492598Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.496697Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:50.665140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:50.681852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:50.686908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.786322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.844204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.846455Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677861053909751:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:53.846524Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:53.875536Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:53.951743Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:53.959227Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677861053909724:2081] 1767808073845411 != 1767808073845414 2026-01-07T17:47:53.976592Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:53.976686Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:53.981900Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:54.136275Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:54.178998Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:54.203396Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.299648Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.356200Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.165955Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677877838485785:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:57.166159Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:57.193218Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:57.287243Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677877838485652:2081] 1767808077161829 != 1767808077161832 2026-01-07T17:47:57.306866Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:57.322465Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:57.322556Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:57.324788Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:57.469885Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScrip ... 97 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:08.434323Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:08.443516Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:08.547554Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:08.660117Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:08.683051Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.737556Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:08.788032Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.155758Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677944523167838:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:12.155811Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:12.175295Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.248605Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:12.250661Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677944523167809:2081] 1767808092154913 != 1767808092154916 2026-01-07T17:48:12.288659Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:12.288751Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:12.294887Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:12.431125Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.542908Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:12.561067Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.614612Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.663826Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:16.204104Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677960709860058:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:16.204232Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:16.221540Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:16.309347Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:16.311081Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677960709860019:2081] 1767808096200894 != 1767808096200897 2026-01-07T17:48:16.349378Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:16.349481Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:16.360826Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:16.400571Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:16.596766Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:16.601933Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:48:16.614104Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:16.676079Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:16.725545Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:20.627012Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677976547691873:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:20.627068Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:20.642780Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:20.713639Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:20.714726Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677976547691844:2081] 1767808100625903 != 1767808100625906 2026-01-07T17:48:20.748580Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:20.748651Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:20.750442Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:20.913387Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:21.003130Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:21.025325Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.085348Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.144899Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TSchemeShardMoveTest::Reject >> TKesusTest::TestAcquireWaiterDowngrade >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TPQRBDescribes::PartitionLocations [GOOD] >> TPQTabletTests::DropTablet >> TPartitionTests::EndWriteTimestamp_DataKeysBody >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> TPQTabletTests::DropTablet [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TPQTabletTests::Cancel_Tx >> TPartitionTests::ConflictingCommitFails [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2026-01-07T17:47:47.365956Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677837480476043:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:47.366043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:47.416819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:47.657378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.683733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:47.683870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:47.733701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:47.749155Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677837480476009:2081] 1767808067364207 != 1767808067364210 2026-01-07T17:47:47.749172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:47.934631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:47.979971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:47.986873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:47.994747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:48.078311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:48.211366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:48.254875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:48.372018Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:50.480660Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677846609383567:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:50.480722Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:50.524065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.636278Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:50.637707Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677846609383540:2081] 1767808070479604 != 1767808070479607 2026-01-07T17:47:50.647460Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:50.647527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:50.677227Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:50.813064Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:50.864308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:50.873153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:50.883514Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:50.891185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:50.951881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:51.001075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.162441Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677865193816601:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:54.162507Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:54.191111Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:54.265317Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:54.267095Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677865193816555:2081] 1767808074161229 != 1767808074161232 2026-01-07T17:47:54.308812Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:54.308899Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:54.312979Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:54.377075Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:54.508860Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:54.514904Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:54.528689Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.601443Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.659727Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:57.536462Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677876661468272:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:57.536510Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:57.548613Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:57.664739Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:57.666476Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677876661468251:2081] 1767808077536002 != 176 ... qp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:09.323139Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:09.360834Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:09.361049Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:09.372821Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:09.536198Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:09.578366Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:09.600361Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.652388Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:09.703298Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:12.907637Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677944751382492:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:12.907693Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:12.936448Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:12.994508Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:12.996363Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677944751382463:2081] 1767808092906701 != 1767808092906704 2026-01-07T17:48:13.036140Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:13.036257Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:13.041131Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:13.219430Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:13.222927Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:13.240196Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:13.292554Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:13.339633Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:16.975594Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677961800168724:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:16.975647Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:16.991409Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:17.068709Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:17.071052Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7592677961800168697:2081] 1767808096974250 != 1767808096974253 2026-01-07T17:48:17.086520Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:17.086619Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:17.116136Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:17.269422Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:17.320009Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:17.342601Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:17.400953Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:17.450300Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.123962Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677982457487031:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:21.124023Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:21.138287Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:21.230326Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:21.248243Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:21.248383Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:21.272878Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:21.328213Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:21.509347Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:21.528746Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.587208Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.639748Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> TPQTabletTests::Cancel_Tx [GOOD] >> TPartitionTests::EndWriteTimestamp_FromMeta >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:48:17.503181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:17.615035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:48:17.627661Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:48:17.628002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:48:17.628044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:48:17.872776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:17.872877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:17.926069Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808095055188 != 1767808095055192 2026-01-07T17:48:17.937369Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:17.980141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:18.065581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:48:18.338278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:18.350921Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:18.453029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:18.486514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:48:18.487484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:48:18.487776Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:48:18.488020Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:48:18.529763Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:48:18.530488Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:48:18.530654Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:48:18.532609Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:48:18.532800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:48:18.532853Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:48:18.533229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:48:18.533373Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:48:18.533457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:48:18.544091Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:18.562864Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:48:18.563059Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:18.563183Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:48:18.563224Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:48:18.563264Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:48:18.563315Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:48:18.563519Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:18.563591Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:18.563886Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:48:18.563999Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:48:18.564139Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:48:18.564178Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:18.564225Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:48:18.564259Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:48:18.564305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:48:18.564344Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:48:18.564386Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:48:18.564490Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:18.564535Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:18.564583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:48:18.565070Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:48:18.565134Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:18.565259Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:48:18.565468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:48:18.565527Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:48:18.565618Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:48:18.565666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:48:18.565703Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:48:18.565750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:48:18.565788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:48:18.566051Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:18.566088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:48:18.566137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:48:18.566170Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:48:18.566225Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:48:18.566263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:48:18.566301Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:48:18.566331Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:48:18.566352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:18.567989Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:48:18.568038Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:48:18.578816Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... bfnwwg41dgkhjs0y. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2026-01-07T17:48:25.951214Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:48:25.952710Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:48:25.952960Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1093:2907];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1090:2904];info={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:48:25.953029Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1093:2907];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:48:25.953329Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1086:2877] TxId: 281474976715662. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1090:2904] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 452076 Tasks { TaskId: 1 CpuTimeUs: 448896 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 14 BuildCpuTimeUs: 448882 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-z2c6jslqle" NodeId: 2 CreateTimeMs: 1767808104932 CurrentWaitInputTimeUs: 126667 UpdateTimeMs: 1767808105951 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:48:25.953388Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1090:2904] 2026-01-07T17:48:25.954238Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1210} ActorId: [2:1086:2877] TxId: 281474976715662. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-07T17:48:25.954316Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1203} ActorId: [2:1086:2877] TxId: 281474976715662. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1091:2905] TaskId# 2 trace_id# 2026-01-07T17:48:25.954380Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1203} ActorId: [2:1086:2877] TxId: 281474976715662. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1092:2906] TaskId# 3 trace_id# 2026-01-07T17:48:25.954448Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:1086:2877] TxId: 281474976715662. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:48:25.954574Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1091:2905], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kecs5xx4bfnwwg41dgkhjs0y. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-07T17:48:25.954646Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1091:2905], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kecs5xx4bfnwwg41dgkhjs0y. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1086:2877], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-07T17:48:25.954787Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:48:25.955908Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:48:25.956007Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1092:2906], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kecs5xx4bfnwwg41dgkhjs0y. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646735 2026-01-07T17:48:25.956046Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1092:2906], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kecs5xx4bfnwwg41dgkhjs0y. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Handle abort execution event from: [2:1086:2877], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-07T17:48:25.956120Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-07T17:48:25.958014Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:48:25.958304Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, ActorId: [2:1058:2877], ActorState: ExecuteState, LegacyTraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-07T17:48:25.958826Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T17:48:25.958869Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:68:2115] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-01-07T17:48:25.959019Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-01-07T17:48:25.959099Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-07T17:48:25.959136Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 0 ProcessProposeTransaction 2026-01-07T17:48:25.959229Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1126:2938] SnapshotReq marker# P0 2026-01-07T17:48:25.959573Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1128:2938] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-07T17:48:25.959758Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2026-01-07T17:48:25.959856Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-07T17:48:25.959906Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:1125:2877] TxId: 281474976715664. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:48:25.959975Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:1125:2877] TxId: 281474976715664. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:48:25.960041Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1125:2877] TxId: 281474976715664. Ctx: { TraceId: 01kecs5xx4bfnwwg41dgkhjs0y, Database: , SessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:48:25.960105Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1128:2938] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-07T17:48:25.960344Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:830:2725], selfId: [2:66:2113], source: [2:1058:2877] 2026-01-07T17:48:25.962000Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1126:2938] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2026-01-07T17:48:25.962285Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1126:2938], Recipient [2:887:2766]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-07T17:48:25.963056Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=NDMyOTQ3NWYtY2M3Nzk2YTUtZjAxYTI0MWYtNDY0M2IxODQ=, workerId: [2:1058:2877], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 280 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TPartitionTests::EndWriteTimestamp_HeadKeys >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> TPQTest::The_Value_Of_CreationUnixTime_Must_Not_Decrease [GOOD] >> TPQTest::The_Keys_Are_Loaded_In_Several_Iterations >> TKesusTest::TestAllocatesResources [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2026-01-07T17:45:46.649576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:45:46.649634Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:45:46.650119Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:45:46.661597Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:45:46.661936Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:45:46.662211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:45:46.679099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:45:46.731425Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:45:46.732036Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:45:46.733915Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:45:46.734030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:45:46.734112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:45:46.734423Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:45:46.734595Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:45:46.734655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:45:46.814708Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:45:46.891501Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:45:46.891682Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:45:46.891781Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:45:46.891828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:45:46.891863Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:45:46.891918Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.892078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.892132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.892399Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:45:46.892488Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:45:46.892615Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.892656Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:45:46.892697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:45:46.892731Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:45:46.892779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:45:46.892810Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:45:46.892846Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:45:46.893035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.893083Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.893133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:45:46.901358Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:45:46.901438Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:45:46.901549Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:45:46.901741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:45:46.901815Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:45:46.901888Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:45:46.901932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:45:46.901967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:45:46.902019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:45:46.902052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.902319Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:45:46.902358Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:45:46.902404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:45:46.902434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.902487Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:45:46.902513Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:45:46.902544Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:45:46.902573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.902595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:45:46.919644Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:45:46.919714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:45:46.919753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:45:46.919808Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:45:46.919895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:45:46.920391Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.920442Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:45:46.920492Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:45:46.920638Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:45:46.920713Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:45:46.920897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:45:46.920973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:45:46.921006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:45:46.921045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:45:46.924548Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:45:46.924609Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:45:46.924799Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.924837Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:45:46.924891Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:45:46.924943Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:45:46.924979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:45:46.925017Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:45:46.925054Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... execution plan for [0:10] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:48:19.883161Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit FinishPropose 2026-01-07T17:48:19.883193Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit FinishPropose 2026-01-07T17:48:19.883235Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 10 at tablet 9437184 send to client, exec latency: 8 ms, propose latency: 8 ms, status: COMPLETE 2026-01-07T17:48:19.883296Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is DelayComplete 2026-01-07T17:48:19.883317Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2026-01-07T17:48:19.883343Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit CompletedOperations 2026-01-07T17:48:19.883374Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2026-01-07T17:48:19.883414Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is Executed 2026-01-07T17:48:19.883435Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2026-01-07T17:48:19.883454Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 9437184 has finished 2026-01-07T17:48:19.887258Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:48:19.887316Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 9437184 on unit FinishPropose 2026-01-07T17:48:19.887368Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:22.420627Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-07T17:48:22.420685Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-07T17:48:22.420990Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:497:2471], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:22.421019Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:22.421051Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:496:2470], serverId# [3:497:2471], sessionId# [0:0:0] 2026-01-07T17:48:22.421216Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006 2026-01-07T17:48:22.421243Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:22.421320Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:22.421941Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2026-01-07T17:48:22.437181Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-07T17:48:22.437263Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2026-01-07T17:48:22.437307Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:22.437338Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2026-01-07T17:48:22.437388Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-07T17:48:22.437438Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 9437184 2026-01-07T17:48:22.437470Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-07T17:48:22.437488Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:22.437526Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BlockFailPoint 2026-01-07T17:48:22.437547Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BlockFailPoint 2026-01-07T17:48:22.437569Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-07T17:48:22.437592Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BlockFailPoint 2026-01-07T17:48:22.437609Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2026-01-07T17:48:22.437636Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-07T17:48:22.453925Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2026-01-07T17:48:22.454242Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-07T17:48:22.454288Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-07T17:48:22.472789Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:22.472849Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-07T17:48:22.473500Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-07T17:48:22.595064Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:195: Tablet 9437184 is not ready for [0:11] execution 2026-01-07T17:48:22.596707Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-07T17:48:22.596771Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-07T17:48:22.804125Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:22.804203Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-07T17:48:22.805019Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-07T17:48:22.931762Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2026-01-07T17:48:22.933653Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-07T17:48:22.933722Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-07T17:48:22.940785Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:22.940841Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-07T17:48:22.941585Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-07T17:48:23.788570Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2026-01-07T17:48:23.788676Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-07T17:48:23.788742Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-07T17:48:23.788776Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2026-01-07T17:48:23.788809Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit FinishPropose 2026-01-07T17:48:23.788839Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit FinishPropose 2026-01-07T17:48:23.788888Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 6 ms, propose latency: 6 ms, status: COMPLETE 2026-01-07T17:48:23.788976Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is DelayComplete 2026-01-07T17:48:23.789001Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2026-01-07T17:48:23.789026Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit CompletedOperations 2026-01-07T17:48:23.789050Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2026-01-07T17:48:23.789090Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-07T17:48:23.789107Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2026-01-07T17:48:23.789128Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 9437184 has finished 2026-01-07T17:48:23.793351Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:48:23.793410Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:11] at 9437184 on unit FinishPropose 2026-01-07T17:48:23.793469Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2026-01-07T17:48:25.162169Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:25.162287Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:25.180262Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:25.180727Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:25.215706Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:25.530464Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:25.530541Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:25.543134Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:25.543379Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:25.557383Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:25.929555Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:25.929654Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:25.947277Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:25.947386Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:25.981568Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:26.316131Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:26.316205Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:26.327641Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:26.328032Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:26.372656Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:26.376067Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 5 2026-01-07T17:48:26.376629Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:193:2162]) 2026-01-07T17:48:26.894714Z node 6 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:26.894823Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:26.909553Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:26.910000Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute ... waiting for register request 2026-01-07T17:48:26.955727Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR cookie 11161996130146999973 ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2026-01-07T17:48:26.956510Z node 6 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 7 2026-01-07T17:48:26.957062Z node 6 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([6:193:2162]) >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2026-01-07T17:48:26.231714Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:26.231804Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:26.243289Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:26.243628Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:26.277696Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:26.278128Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=7596607819306802050, session=0, seqNo=0) 2026-01-07T17:48:26.278257Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:48:26.290152Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=7596607819306802050, session=1) 2026-01-07T17:48:26.290446Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=15455003541595016690, session=0, seqNo=0) 2026-01-07T17:48:26.290548Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:48:26.302299Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=15455003541595016690, session=2) 2026-01-07T17:48:26.302595Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=1) 2026-01-07T17:48:26.302777Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:48:26.302864Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:48:26.314685Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-07T17:48:26.314983Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:48:26.315233Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-07T17:48:26.315327Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2026-01-07T17:48:26.327200Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2026-01-07T17:48:26.327285Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=333) 2026-01-07T17:48:26.327845Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2175], cookie=9846094672360019666, name="Lock1") 2026-01-07T17:48:26.327966Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2175], cookie=9846094672360019666) 2026-01-07T17:48:26.585871Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:26.585952Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:26.597958Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:26.598165Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:26.611267Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:26.611681Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=255601621537971400, session=0, seqNo=0) 2026-01-07T17:48:26.611789Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:48:26.634017Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=255601621537971400, session=1) 2026-01-07T17:48:26.634291Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=13147300820715086220, session=0, seqNo=0) 2026-01-07T17:48:26.634392Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:48:26.646255Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=13147300820715086220, session=2) 2026-01-07T17:48:26.646579Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:48:26.646691Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:48:26.646770Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:48:26.658651Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=111) 2026-01-07T17:48:26.658961Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-07T17:48:26.659283Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:48:26.671238Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=222) 2026-01-07T17:48:26.671303Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=333) 2026-01-07T17:48:26.671700Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:153:2175], cookie=13479834150503062666, name="Lock1") 2026-01-07T17:48:26.671765Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:153:2175], cookie=13479834150503062666) 2026-01-07T17:48:26.672055Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:156:2178], cookie=2763457900034369632, name="Lock1") 2026-01-07T17:48:26.672101Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:156:2178], cookie=2763457900034369632) 2026-01-07T17:48:26.959729Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:26.959821Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:26.974565Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:26.974675Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:27.009138Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:27.009670Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=14975997617645851194, session=0, seqNo=0) 2026-01-07T17:48:27.009809Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:48:27.021431Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=14975997617645851194, session=1) 2026-01-07T17:48:27.021646Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=18238622299809954639, session=0, seqNo=0) 2026-01-07T17:48:27.021746Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:48:27.033274Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=18238622299809954639, session=2) 2026-01-07T17:48:27.033716Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:48:27.033825Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:48:27.033882Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:48:27.045607Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=111) 2026-01-07T17:48:27.045854Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-07T17:48:27.046102Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-07T17:48:27.046152Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-07T17:48:27.057733Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=222) 2026-01-07T17:48:27.057794Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=333) 2026-01-07T17:48:27.058208Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:156:2178], cookie=4998223063900664395, name="Lock1") 2026-01-07T17:48:27.058296Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:156:2178], cookie=4998223063900664395) 2026-01-07T17:48:27.058625Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:159:2181], cookie=16035094708274677768, name="Lock1") 2026-01-07T17:48:27.058686Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:159:2181], cookie=16035094708274677768) 2026-01-07T17:48:27.067590Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:27.067663Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:27.067988Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:27.068373Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:27.113749Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:27.113863Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:48:27.114120Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:198:2211], cookie=4539515009160924253, name="Lock1") 2026-01-07T17:48:27.114177Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:198:2211], cookie=4539515009160924253) 2026-01-07T17:48:27.114520Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:206:2218], cookie=18224029952871033665, name="Lock1") 2026-01-07T17:48:27.114564Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:206:2218], cookie=18224029952871033665) 2026-01-07T17:48:27.540847Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:27.540977Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:27.555744Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:27.556073Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:27.580039Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:27.580624Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=896744605464873680, session=0, seqNo=0) 2026-01-07T17:48:27.580782Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:48:27.592949Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=896744605464873680, session=1) 2026-01-07T17:48:27.593261Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=16880042862746101420, session=0, seqNo=0) 2026-01-07T17:48:27.593398Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:48:27.605601Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=16880042862746101420, session=2) 2026-01-07T17:48:27.605951Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:48:27.606102Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:48:27.606195Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:48:27.618416Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-01-07T17:48:27.618753Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-07T17:48:27.619104Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=333, name="Lock1") 2026-01-07T17:48:27.619195Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-07T17:48:27.631417Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-01-07T17:48:27.631534Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=333) 2026-01-07T17:48:27.925564Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:27.925689Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:27.946448Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:27.946582Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:27.981657Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:27.988031Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=15350026428317076978, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-07T17:48:27.988266Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:48:28.000065Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=15350026428317076978) 2026-01-07T17:48:28.000527Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=12648268165928041356, path="/Root/Res", config={ }) 2026-01-07T17:48:28.000717Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-07T17:48:28.012398Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=12648268165928041356) 2026-01-07T17:48:28.013805Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 2137893190238783436. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:28.013866Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=2137893190238783436) 2026-01-07T17:48:28.014296Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:151:2173]. Cookie: 14765702521776167692. Data: { } 2026-01-07T17:48:28.014334Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:151:2173], cookie=14765702521776167692) 2026-01-07T17:48:28.056291Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-07T17:48:28.108629Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-07T17:48:28.139647Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-07T17:48:28.181320Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-07T17:48:28.222999Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::PartitionKeyCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2026-01-07T17:48:19.470687Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.537468Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:19.537546Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:19.537611Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:19.537669Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:19.554309Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:19.554624Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:19.554882Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [1:183:2195] 2026-01-07T17:48:19.555764Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:19.555823Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2026-01-07T17:48:19.555868Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [1:183:2195] 2026-01-07T17:48:19.555939Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:19.556072Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:19.556126Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process pending events. Count 0 2026-01-07T17:48:19.556162Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:48:19.556231Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:19.556269Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:19.556307Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:19.556340Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-07T17:48:19.556438Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:19.556574Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|a5724d06-f811146d-615f524e-2dab3555_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2026-01-07T17:48:19.556650Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:48:19.556689Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:48:19.556745Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:19.556780Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:19.556820Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:48:19.556895Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:19.556958Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Batch completed (1) 2026-01-07T17:48:19.557001Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-07T17:48:19.557059Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2026-01-07T17:48:19.557284Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] No data for blobs compaction 2026-01-07T17:48:19.557419Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TPartition::TEvWrite 2026-01-07T17:48:19.557501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:48:19.557538Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:48:19.557583Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:19.557623Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:19.557666Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:48:19.557759Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2026-01-07T17:48:19.558674Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2026-01-07T17:48:19.558757Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:19.558795Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Batch completed (1) 2026-01-07T17:48:19.558834Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-07T17:48:19.559356Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000? size 104 WTime 128 2026-01-07T17:48:19.559575Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:19.569932Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.600973Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.611532Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:19.611617Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:19.611713Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:19.611814Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2026-01-07T17:48:19.611915Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2026-01-07T17:48:19.612117Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:48:19.612160Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:19.612219Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:19.612273Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:19.612307Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-07T17:48:19.612359Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] No data for blobs compaction 2026-01-07T17:48:19.622701Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.643532Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:48:19.643609Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:19.643651Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:19.643701Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:19.643746Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-07T17:48:19.654065Z nod ... 3 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0, NewHead=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0 2026-01-07T17:48:26.013119Z node 3 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 12 2026-01-07T17:48:26.013156Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:26.013187Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:26.013221Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait tx rollback for tx 8 Wait immediate tx complete 10 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "[TxId: (empty maybe), Topic: \'Root/PQ/rt3.dc1--account--topic\', Partition 0, SourceId \'src2\', SeqNo 10] MinSeqNo violation failure. SeqNo 5" } Wait tx committed for tx 12 2026-01-07T17:48:26.013727Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:26.034297Z node 3 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:26.045093Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:26.045255Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-07T17:48:26.045320Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:26.045377Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:26.045415Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:26.045471Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:26.045507Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:26.045564Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:26.484179Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:26.516775Z node 4 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:26.516826Z node 4 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:26.516874Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:26.516938Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:26.528107Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [4:184:2196] 2026-01-07T17:48:26.529447Z node 4 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:26.000000Z 2026-01-07T17:48:26.529632Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:184:2196] 2026-01-07T17:48:26.540094Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:26.570992Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:26.591771Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:26.622887Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:26.653919Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:26.674769Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:26.726512Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:26.798837Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:27.261425Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:27.291527Z node 5 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:27.291578Z node 5 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:27.291611Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:27.291649Z node 5 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:27.303797Z node 5 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [5:183:2196] 2026-01-07T17:48:27.305770Z node 5 :PERSQUEUE INFO: partition_init.cpp:1016: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:48:27.306003Z node 5 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:183:2196] 2026-01-07T17:48:27.327105Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:27.368286Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:27.389033Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:27.399464Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:27.440549Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:27.481772Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:27.512733Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.034578Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.072787Z node 6 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:28.072834Z node 6 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:28.072870Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:28.072935Z node 6 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:28.083713Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [6:184:2196] >>>> ADD BLOB 0 writeTimestamp=2026-01-07T17:48:28.075210Z >>>> ADD BLOB 1 writeTimestamp=2026-01-07T17:48:28.075232Z >>>> ADD BLOB 2 writeTimestamp=2026-01-07T17:48:28.075245Z >>>> ADD BLOB 3 writeTimestamp=2026-01-07T17:48:28.075256Z >>>> ADD BLOB 4 writeTimestamp=2026-01-07T17:48:28.075266Z >>>> ADD BLOB 5 writeTimestamp=2026-01-07T17:48:28.075277Z >>>> ADD BLOB 6 writeTimestamp=2026-01-07T17:48:28.075286Z >>>> ADD BLOB 7 writeTimestamp=2026-01-07T17:48:28.075295Z >>>> ADD BLOB 8 writeTimestamp=2026-01-07T17:48:28.075305Z >>>> ADD BLOB 9 writeTimestamp=2026-01-07T17:48:28.075329Z 2026-01-07T17:48:28.086407Z node 6 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:28.000000Z 2026-01-07T17:48:28.086605Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [6:184:2196] 2026-01-07T17:48:28.097087Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.128117Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.148951Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.180064Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.211142Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.231974Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.283626Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.355759Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2026-01-07T17:48:13.882555Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677948031640022:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:13.882627Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:13.940062Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:48:13.944370Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677948141496785:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:13.944571Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:13.951318Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:48:14.051085Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:14.051359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:14.230986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:14.231093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:14.232628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:14.232686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:14.254806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:14.256737Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:48:14.258017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:14.280822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:14.310070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:14.320910Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:14.574024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038cc/r3tmp/yandexUd6u5q.tmp 2026-01-07T17:48:14.574051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0038cc/r3tmp/yandexUd6u5q.tmp 2026-01-07T17:48:14.577795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0038cc/r3tmp/yandexUd6u5q.tmp 2026-01-07T17:48:14.577941Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:14.871777Z INFO: TTestServer started on Port 29676 GrpcPort 1131 2026-01-07T17:48:14.888349Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:14.948316Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:1131 2026-01-07T17:48:15.128051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:15.253053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:17.094752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677965321366259:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.095301Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.095598Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677965321366268:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.095631Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.098476Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677965321366273:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.098775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677965211510553:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.098836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677965211510541:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.098944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.099327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677965211510557:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.099388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:17.107334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:48:17.129897Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677965211510556:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:48:17.153403Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677965321366276:2176] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:48:17.154466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-01-07T17:48:17.154772Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677965321366275:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T17:48:17.226394Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677965321366302:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:48:17.234075Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677965211510654:3087] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:48:17.687924Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592677965321366316:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:48:17.687927Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592677965211510664:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:48:17.688872Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YjdlN2RkNWItOWFhYWZjOTgtODA4MDNiMjAtZWMxYTUzZTQ=, ActorId: [2:7592677 ... WAIT_RS 2026-01-07T17:48:27.824984Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-07T17:48:27.825031Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-07T17:48:27.825214Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-07T17:48:27.825252Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/2 2026-01-07T17:48:27.825322Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-07T17:48:27.825365Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:27.825397Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:27.825428Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:27.825475Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-07T17:48:27.825701Z node 6 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2026-01-07T17:48:27.825734Z node 6 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2026-01-07T17:48:27.825759Z node 6 :PERSQUEUE DEBUG: partition.cpp:3831: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user drop done 2026-01-07T17:48:27.825791Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-07T17:48:27.825829Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:27.825859Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:27.825898Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:27.826267Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][1][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-07T17:48:27.826292Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:48:27.826314Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:27.826335Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:27.826361Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-07T17:48:27.826411Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][1][StateIdle] Schedule reply tx done 67890 2026-01-07T17:48:27.826433Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:27.826453Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:48:27.826473Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:48:27.826591Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:27.826638Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:27.829696Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:27.829959Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:27.830597Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:27.830643Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:27.830673Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:27.830699Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:27.830730Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:27.830770Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:27.830811Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:27.831230Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-07T17:48:27.831280Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-07T17:48:27.831323Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-07T17:48:27.831373Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-07T17:48:27.831420Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/2 2026-01-07T17:48:27.831464Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4382: [PQ: 72057594037927937] TxId 67890 status has not changed 2026-01-07T17:48:27.831682Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:27.831791Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:27.831860Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:27.831883Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:48:27.831904Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:27.831921Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:27.831942Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:27.831962Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:48:27.831987Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:48:27.832082Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 1 2026-01-07T17:48:27.832106Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-07T17:48:27.832129Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-07T17:48:27.832156Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-07T17:48:27.832186Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 2/2 2026-01-07T17:48:27.832235Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-07T17:48:27.832290Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-07T17:48:27.832512Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 } Consumers { Name: "client-3" Generation: 2 } 2026-01-07T17:48:27.832598Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:27.832667Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-07T17:48:27.832730Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-07T17:48:27.832786Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:48:27.832834Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-07T17:48:27.832884Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-07T17:48:27.832953Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-07T17:48:27.832991Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-07T17:48:27.833029Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-07T17:48:27.833072Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-07T17:48:27.833116Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-07T17:48:27.833181Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-07T17:48:27.833264Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:48:27.835170Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:48:27.835233Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-07T17:48:27.835285Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-07T17:48:27.835332Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.5%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardMoveTest::OneTable [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks+Volatile >> TKesusTest::TestQuoterAccountResourcesAggregateResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2026-01-07T17:48:09.960135Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.105843Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:10.105909Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:10.105965Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:10.106022Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:10.134000Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:184:2196] 2026-01-07T17:48:10.138467Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:10.000000Z 2026-01-07T17:48:10.138814Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:184:2196] 2026-01-07T17:48:10.151028Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.182079Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.202965Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.234069Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.265214Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.286086Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.337693Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.410427Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\220\377\312\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:10.495419Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\220\377\312\314\27138\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\220\377\312\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:10.911798Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.954871Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:10.954941Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:10.954984Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:10.955047Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:10.971373Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-07T17:48:10.971593Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:10.971877Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:181:2193] 2026-01-07T17:48:10.972837Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Got KV request Got KV request 2026-01-07T17:48:10.973059Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2026-01-07T17:48:10.973232Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:48:10.973335Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000000 to m0000000001 Got KV request 2026-01-07T17:48:10.973445Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:48:10.973527Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000000 to d0000000001 Got KV request 2026-01-07T17:48:10.973730Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:523: key[0]: d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-07T17:48:10.973793Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:531: add key d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-07T17:48:10.973892Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:629: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] check key d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-07T17:48:10.973985Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:659: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-07T17:48:10.974121Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2026-01-07T17:48:10.974158Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:48:10.974223Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:10.000000Z 2026-01-07T17:48:10.974268Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:48:10.974363Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000000|0000000000000000 to e0000000001 Got KV request 2026-01-07T17:48:10.974520Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:48:10.974562Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:10.974601Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2026-01-07T17:48:10.974651Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:181:2193] 2026-01-07T17:48:10.974701Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2026-01-07T17:48:10.974755Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:10.974792Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:10.974828Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:10.974861Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:10.974893Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:10.974929Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:10.974968Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:10.975048Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:10.975237Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:10.985585Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:11.016761Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:11.027371Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:11.027471Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:11.027522Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:11.027570Z node 2 :PERSQUEUE DEBUG: partition ... artition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:28.425504Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:28.425557Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.425604Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:28.425645Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.425701Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:48:28.425766Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 1 2026-01-07T17:48:28.425807Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 1 2026-01-07T17:48:28.425850Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:28.425890Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:28.425934Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.426190Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request Got KV request Got KV request Wait tx committed for tx 1 2026-01-07T17:48:28.427003Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:28.427104Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:28.427160Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.427196Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.427233Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.427275Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.427308Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.427354Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait for no tx committed 2026-01-07T17:48:28.437729Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:28.458611Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.458696Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.458738Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.458798Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.458840Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.479389Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.479447Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.479480Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.479518Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.479551Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.500070Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.500133Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.500165Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.500205Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.500235Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.520779Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.520853Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.520886Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.520921Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.520967Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.541514Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.541585Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.541617Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.541653Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.541685Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.562261Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.562322Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.562354Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.562402Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.562433Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.594394Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.594462Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.594494Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.594529Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.594560Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.615086Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.615142Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.615167Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.615194Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.615218Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.635773Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.635849Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.635882Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.635919Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.635951Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.656439Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.656489Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.656513Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.656540Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.656565Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:28.677195Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:28.677261Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.677299Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:28.677351Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:28.677384Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:26.635036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:26.635127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:26.635176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:26.635217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:26.635252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:26.635278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:26.635326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:26.635383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:26.636123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:26.638963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:26.714866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:26.714914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:26.730008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:26.730400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:26.731772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:26.740029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:26.740393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:26.743408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:26.746417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:26.759487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:26.761159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:26.769806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:26.769893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:26.772232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:26.772284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:26.772382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:26.773307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:26.778869Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:48:26.901832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:48:26.903150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:26.904031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:48:26.904070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:48:26.905504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:48:26.905575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:26.908139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:26.910407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:48:26.910691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:26.910843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:48:26.910890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:48:26.910923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:48:26.912742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:26.912798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:48:26.912832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:48:26.914486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:26.914538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:26.914578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:48:26.914617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:48:26.922453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:48:26.924079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:48:26.925258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:48:26.926086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:26.926181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:26.926210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:48:26.926429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:48:26.926474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:48:26.926608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:48:26.927641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-07T17:48:26.929281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:26.929329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ecute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:48:29.019156Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2026-01-07T17:48:29.019212Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:48:29.019241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:48:29.019267Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 129 -> 240 2026-01-07T17:48:29.019924Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:48:29.019995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:48:29.020020Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-07T17:48:29.020045Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2026-01-07T17:48:29.020075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:48:29.020391Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:48:29.020434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-01-07T17:48:29.020453Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-07T17:48:29.020472Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2026-01-07T17:48:29.020491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2026-01-07T17:48:29.020532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2026-01-07T17:48:29.022146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-07T17:48:29.022185Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:48:29.022333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2026-01-07T17:48:29.022422Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-07T17:48:29.022454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:48:29.022488Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-07T17:48:29.022514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:48:29.022540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-01-07T17:48:29.022583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:340:2317] message: TxId: 108 2026-01-07T17:48:29.022611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-07T17:48:29.022636Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-07T17:48:29.022661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-07T17:48:29.022718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2026-01-07T17:48:29.023648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-07T17:48:29.023794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-07T17:48:29.024873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-07T17:48:29.024916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:828:2783] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2026-01-07T17:48:29.027554Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-07T17:48:29.027627Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2026-01-07T17:48:29.056148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 312 RawX2: 8589936889 } TabletId: 72075186233409546 State: 4 2026-01-07T17:48:29.056254Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2026-01-07T17:48:29.057933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:48:29.058058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:48:29.058436Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-07T17:48:29.060919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:29.061198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2026-01-07T17:48:29.061910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:48:29.061961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2026-01-07T17:48:29.062027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:48:29.064442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-07T17:48:29.064520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-07T17:48:29.065120Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2026-01-07T17:48:29.065866Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:29.066048Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 210us result status StatusSuccess 2026-01-07T17:48:29.066420Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2026-01-07T17:47:53.124251Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677859671353409:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:53.124401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:53.381009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:53.412698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:53.412811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:53.469484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:53.486442Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:53.488720Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677859671353174:2081] 1767808073085834 != 1767808073085837 2026-01-07T17:47:53.605287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:53.708499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:53.726695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:47:53.755905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T17:47:53.806816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.939770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:53.997032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:54.122782Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:56.321397Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677876053342155:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:56.321437Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:56.343258Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.424725Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:56.428685Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592677876053342132:2081] 1767808076320824 != 1767808076320827 2026-01-07T17:47:56.473852Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:56.473941Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:56.480913Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:56.587497Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:56.656990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:56.679060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.740622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:56.796048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:59.768956Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592677889173118489:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:59.769028Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:59.791891Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:59.866861Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:59.868423Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592677889173118450:2081] 1767808079768275 != 1767808079768278 2026-01-07T17:47:59.909221Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:59.909306Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:59.916286Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:59.991531Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:00.097331Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:00.115154Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.178009Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:00.226781Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:03.109206Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592677903055082059:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:03.111676Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:03.123463Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.192055Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:03.199614Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592677903055082030:2081] 1767808083104455 != 1767808083104458 2026-01-07T17:48:03.224600Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:03.224663Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:03.227801Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:03.370602Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:03.416546Z node 4 :FLAT_TX_SCHEMESHARD WARN: sch ... -01-07T17:48:13.313942Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7592677945399558836:2081] 1767808093233472 != 1767808093233475 2026-01-07T17:48:13.318957Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:13.350513Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:13.350607Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:13.354776Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:13.489623Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:13.523258Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:13.538936Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:13.583196Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:13.623431Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:17.049562Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7592677964592470251:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:17.049673Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:17.079053Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:17.140659Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7592677964592470225:2081] 1767808097048830 != 1767808097048833 2026-01-07T17:48:17.150419Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:17.190491Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:17.190589Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:17.196571Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:17.331133Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:17.402517Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:17.419461Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:17.480268Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:17.527656Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:20.884025Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7592677977985915843:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:20.884098Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:20.899094Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:20.992674Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:21.009671Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:21.009771Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:21.036261Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:21.166132Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:21.300816Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:21.317694Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.370206Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.424977Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:24.551208Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592677994646975633:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:24.551295Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:24.566757Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:24.648951Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:24.651793Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7592677994646975604:2081] 1767808104550319 != 1767808104550322 2026-01-07T17:48:24.686422Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:24.686529Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:24.696670Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:24.804999Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:24.943901Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:24.964656Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:25.021318Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:25.075104Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> DstCreator::ReplicationModeMismatch >> DstCreator::SameOwner >> DstCreator::ColumnsSizeMismatch >> DstCreator::WithIntermediateDir >> DstCreator::GlobalConsistency >> DstCreator::WithSyncIndexAndIntermediateDir >> DstCreator::WithSyncIndex >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> TPartitionTests::DifferentWriteTxBatchingOptions >> TPQTest::TestCompaction [GOOD] >> TPQTest::TestDirectReadHappyWay >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> TPartitionTests::TestBatchingWithChangeConfig >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> TSchemeShardLoginFinalize::NoPublicKeys >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestMessageNo >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> TSchemeShardLoginTest::UserLogin >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> DstCreator::WithSyncIndex [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false >> TSchemeShardLoginFinalize::NoPublicKeys [GOOD] >> TSchemeShardLoginFinalize::InvalidPassword >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestGetTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart |93.5%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2026-01-07T17:48:25.223918Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:25.224070Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:25.242120Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:25.242615Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:25.278788Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:25.286550Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=17188178088364299389, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-07T17:48:25.287643Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:48:25.299817Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=17188178088364299389) 2026-01-07T17:48:25.300473Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2169], cookie=16221399092939443139, path="/Root/Res", config={ }) 2026-01-07T17:48:25.300685Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-07T17:48:25.312891Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2169], cookie=16221399092939443139) 2026-01-07T17:48:25.317998Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:152:2174]. Cookie: 841151257838567401. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:25.318096Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[1:152:2174], cookie=841151257838567401) 2026-01-07T17:48:25.318620Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [1:152:2174]. Cookie: 10463748422689680964. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2026-01-07T17:48:25.318672Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[1:152:2174], cookie=10463748422689680964) 2026-01-07T17:48:27.391790Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:27.391868Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:27.402928Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:27.403153Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:27.416273Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:27.416589Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:137:2161], cookie=3344661243292073562, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-07T17:48:27.416798Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:48:27.438925Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:137:2161], cookie=3344661243292073562) 2026-01-07T17:48:27.439701Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2169]. Cookie: 3194834140562053046. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:27.439755Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2169], cookie=3194834140562053046) 2026-01-07T17:48:27.440165Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2169]. Cookie: 1887346309461798305. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:27.440203Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2169], cookie=1887346309461798305) 2026-01-07T17:48:27.440538Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2169]. Cookie: 10568932296367056614. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-07T17:48:27.440566Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:147:2169], cookie=10568932296367056614) 2026-01-07T17:48:27.440823Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2169]. Cookie: 12538576251600688908. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-07T17:48:27.440846Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:147:2169], cookie=12538576251600688908) 2026-01-07T17:48:29.571612Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:29.571713Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:29.586583Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:29.586675Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:29.620546Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:29.620944Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:136:2161], cookie=16405598130853159606, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-07T17:48:29.621210Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:48:29.632952Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:136:2161], cookie=16405598130853159606) 2026-01-07T17:48:29.633440Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=2582597905540545360, path="/Root/Res1", config={ }) 2026-01-07T17:48:29.633617Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2026-01-07T17:48:29.645372Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=2582597905540545360) 2026-01-07T17:48:29.645778Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:152:2174], cookie=7223846687078196753, path="/Root/Res2", config={ }) 2026-01-07T17:48:29.645938Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2026-01-07T17:48:29.657703Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:152:2174], cookie=7223846687078196753) 2026-01-07T17:48:29.658260Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 9312742386784482322. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:29.658299Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=9312742386784482322) 2026-01-07T17:48:29.658704Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 5606401974863141664. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:29.658735Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=5606401974863141664) 2026-01-07T17:48:29.659899Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:157:2179]. Cookie: 684733015322195257. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2026-01-07T17:48:29.659947Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:157:2179], cookie=684733015322195257) 2026-01-07T17:48:31.777755Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:31.777874Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:31.797549Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:31.797873Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:31.822042Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:31.822551Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=5273706996121262875, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2026-01-07T17:48:31.822896Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:48:31.835009Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=5273706996121262875) 2026-01-07T17:48:31.837877Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 16788619562893269779. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:31.837966Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=16788619562893269779) 2026-01-07T17:48:31.838492Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 3876293850328506294. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2026-01-07T17:48:31.838546Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=3876293850328506294) 2026-01-07T17:48:34.012128Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:34.012246Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:34.031882Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:34.032047Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:34.066742Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:34.067228Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=17654908660572700086, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-07T17:48:34.067416Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:48:34.079557Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=17654908660572700086) 2026-01-07T17:48:34.080207Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=15981579801790630429, path="/Root/Res", config={ }) 2026-01-07T17:48:34.080464Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-07T17:48:34.093227Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=15981579801790630429) 2026-01-07T17:48:34.094152Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 12151807606487841935. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:34.094221Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=12151807606487841935) 2026-01-07T17:48:34.094732Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:155:2177], cookie=7657609162093609965, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2026-01-07T17:48:34.094919Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2026-01-07T17:48:34.095109Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2026-01-07T17:48:34.107405Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:155:2177], cookie=7657609162093609965) 2026-01-07T17:48:34.107978Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:151:2173]. Cookie: 13455423756539940152. Data: { } 2026-01-07T17:48:34.108031Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:151:2173], cookie=13455423756539940152) >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> DataShardWrite::DistributedInsertWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> TSchemeShardLoginFinalize::InvalidPassword [GOOD] >> TSchemeShardLoginFinalize::Success >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLogin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2026-01-07T17:48:30.654933Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678020694070052:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:30.654983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:30.919829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:30.978096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:30.978167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:30.979841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:31.020189Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:31.199905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:31.467456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:31.467492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:31.467499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:31.467614Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:31.669579Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:31.981912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:31.991164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.290350Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:32.290371Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:32.290793Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:33.138387Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808112280, tx_id: 281474976710658 } } } 2026-01-07T17:48:33.138887Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:33.140901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:33.142654Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-07T17:48:33.142681Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-07T17:48:33.171853Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-07T17:48:33.195610Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 41 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808113211 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { ... ndexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 43 PathOwnerId: 72057594046644480 } 2026-01-07T17:48:33.204446Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 43] Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808113211 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2026-01-07T17:48:30.654912Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678018164551957:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:30.654958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:30.909266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:30.975621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:30.975706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:30.978289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:31.050135Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:31.076499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:31.467380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:31.467435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:31.467441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:31.467519Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:31.669162Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:31.958419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:31.970871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:48:31.981431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.302844Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:32.302870Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:32.303283Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:33.157466Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808112273, tx_id: 281474976710658 } } } 2026-01-07T17:48:33.157976Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:33.160219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:33.161971Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-07T17:48:33.162006Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-07T17:48:33.188535Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-07T17:48:33.222718Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 42 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808113232 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 43 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCo ... ImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 44 PathOwnerId: 72057594046644480 } 2026-01-07T17:48:33.232657Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 44] Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 44 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808113232 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TWebLoginService::AuditLogCreateModifyUser >> TSentinelTests::BSControllerUnresponsive [GOOD] >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::NodeStatusComputer [GOOD] >> TSentinelTests::InitialDeploymentGracePeriod >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::BanUnbanUser >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TWebLoginService::AuditLogCreateModifyUser [GOOD] >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind >> LdapAuthProviderTest::LdapServerIsUnavailable >> TPartitionTests::TestBatchingWithProposeConfig >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] Test command err: 2026-01-07T17:48:16.961406Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-07T17:48:16.961462Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-07T17:48:16.961522Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:48:16.961549Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-07T17:48:16.961586Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-07T17:48:16.961675Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-07T17:48:16.962818Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-07T17:48:16.967996Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... nfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-07T17:48:33.614908Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-07T17:48:33.615017Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-07T17:48:33.615095Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:48:33.615389Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 1:4, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:33.615431Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 3:14, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:33.615496Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 6:24, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:33.615564Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-07T17:48:33.626124Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 1 2026-01-07T17:48:33.626211Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-07T17:48:33.636708Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:48:33.636779Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:48:33.636879Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 2 2026-01-07T17:48:33.636910Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-07T17:48:33.637092Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2026-01-07T17:48:33.637131Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-07T17:48:33.637161Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-07T17:48:33.637188Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-07T17:48:33.637230Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-07T17:48:33.637276Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-07T17:48:33.637307Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-07T17:48:33.637335Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-07T17:48:33.637763Z node 1 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 123 2026-01-07T17:48:33.637804Z node 1 :CMS ERROR: sentinel.cpp:1385: [Sentinel] [Main] Unsuccesful response from BSC: error# 2026-01-07T17:48:33.638161Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T17:48:33.638643Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T17:48:33.638898Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T17:48:33.639105Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T17:48:33.639265Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T17:48:33.639434Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T17:48:33.639576Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T17:48:33.639723Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T17:48:33.639781Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogCreateModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.795071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.795171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.795252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.795286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.795322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.795383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.796232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.797732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.873110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.873191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.888108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.888475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.888646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.895274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.895671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.896435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.896689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.903027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.903989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.912623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.912917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.912990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.913809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:32.921191Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:48:33.039143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:48:33.040829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.042598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:48:33.042655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:48:33.043739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:48:33.043831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:33.046928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:33.048061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:48:33.048336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.048456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:48:33.048502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:48:33.048538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:48:33.050510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.050566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:48:33.050649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:48:33.052357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.052399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.052442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:48:33.052495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:48:33.056487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:48:33.058462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:48:33.059917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:48:33.060952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:33.061093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:33.061142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:48:33.061410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:48:33.061452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:48:33.061603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:48:33.062659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:33.065036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:33.065096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... ration: MODIFY USER, path: /MyRoot 2026-01-07T17:48:35.550127Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:35.550169Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:35.550335Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:35.550380Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-07T17:48:35.550872Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:48:35.550987Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:48:35.551033Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:48:35.551075Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-07T17:48:35.551122Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:48:35.551206Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-07T17:48:35.552706Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 AUDIT LOG buffer(6): 2026-01-07T17:48:35.482208Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:48:35.505849Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-07T17:48:35.521303Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-07T17:48:35.530666Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2026-01-07T17:48:35.539212Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2026-01-07T17:48:35.547271Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] AUDIT LOG checked line: 2026-01-07T17:48:35.547271Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-07T17:48:35.555817Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" Password: "password1" CanLogin: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:48:35.560789Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-07T17:48:35.560941Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:48:35.560986Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:48:35.561032Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:48:35.561074Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:48:35.561127Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:48:35.561188Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-01-07T17:48:35.561229Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:48:35.561265Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-07T17:48:35.561305Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-01-07T17:48:35.561347Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-07T17:48:35.563439Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:35.563564Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2026-01-07T17:48:35.563721Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:35.563767Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:35.563926Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:35.563974Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-01-07T17:48:35.564465Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:48:35.564572Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:48:35.564617Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-01-07T17:48:35.564661Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:48:35.564702Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:48:35.564790Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-01-07T17:48:35.566524Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 AUDIT LOG buffer(7): 2026-01-07T17:48:35.482208Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-07T17:48:35.505849Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-07T17:48:35.521303Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-07T17:48:35.530666Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2026-01-07T17:48:35.539212Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2026-01-07T17:48:35.547271Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-07T17:48:35.560661Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] AUDIT LOG checked line: 2026-01-07T17:48:35.560661Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> DstCreator::SamePartitionCount [GOOD] >> DstCreator::ColumnTypeMismatch [GOOD] >> DstCreator::WithAsyncIndex [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> PQCountersLabeled::PartitionKeyCompaction [GOOD] >> PQCountersLabeled::PartitionBlobCompactionCounters >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2026-01-07T17:48:30.654887Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678021861681353:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:30.654985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:30.923383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:30.978381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:30.978474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:30.997073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:31.019194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:31.153685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:31.467533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:31.467564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:31.467580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:31.467678Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:31.668387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:31.993584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:32.002424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.131672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.162045Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:32.162065Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:32.162475Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:33.138902Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808112098, tx_id: 281474976710658 } } } 2026-01-07T17:48:33.139276Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:33.166627Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:48:33.178935Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808112196 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:48:33.179223Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Columns size mismatch: expected: 2, got: 3 2026-01-07T17:48:33.559754Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678031888708486:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:33.560038Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:33.578495Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:33.630787Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678031888708376:2081] 1767808113557195 != 1767808113557198 2026-01-07T17:48:33.639100Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:33.670958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:33.671038Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:33.674254Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:33.801286Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:33.801309Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:33.801315Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:33.801406Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:33.841721Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:34.002224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:34.010861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:34.044219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:34.076253Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:34.076282Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:34.076754Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:34.564224Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:36.200448Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808114079, tx_id: 281474976710658 } } } 2026-01-07T17:48:36.200702Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:36.202309Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:48:36.203442Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808114114 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:48:36.203713Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 >> TSchemeShardLoginFinalize::Success [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2026-01-07T17:48:30.654894Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678021986256314:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:30.654956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:30.934502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:30.976393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:30.976519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:30.978185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:31.013372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:31.089922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:31.467413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:31.467437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:31.467442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:31.467513Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:31.669695Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:31.980621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:31.988846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.130602Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:32.130635Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:32.131153Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:33.118961Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808112098, tx_id: 281474976710658 } } } 2026-01-07T17:48:33.119319Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:33.120718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:33.121334Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-07T17:48:33.121372Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-07T17:48:33.141442Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-07T17:48:33.141461Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-07T17:48:33.592203Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678032480534361:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:33.592314Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:33.603560Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:33.672138Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:33.673090Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678032480534318:2081] 1767808113591320 != 1767808113591323 2026-01-07T17:48:33.700639Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:33.700705Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:33.702730Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:33.784472Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:33.866807Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:33.866825Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:33.866831Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:33.866916Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:34.095604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:34.101968Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:34.331050Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:34.331074Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:34.331383Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:34.596809Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:36.211360Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808114331, tx_id: 281474976715658 } } } 2026-01-07T17:48:36.211657Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:36.213217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:36.213933Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-07T17:48:36.213953Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-07T17:48:36.239238Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-07T17:48:36.239267Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 41] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2026-01-07T17:48:30.654883Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678020922219839:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:30.654968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:30.927196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:30.994165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:30.994292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:30.997723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:31.044348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:31.171066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:31.467549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:31.467578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:31.467584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:31.467656Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:31.667912Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:31.956169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:31.970354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:48:31.977294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:48:31.981944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.159471Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:32.159497Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:32.160042Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:33.253032Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808112098, tx_id: 281474976710659 } } } 2026-01-07T17:48:33.253467Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:33.254778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:33.255388Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2026-01-07T17:48:33.255414Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2026-01-07T17:48:33.278340Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2026-01-07T17:48:33.278372Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:48:33.788093Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678031437375751:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:33.788149Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:33.816206Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:33.897083Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678031437375716:2081] 1767808113787343 != 1767808113787346 2026-01-07T17:48:33.901709Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:33.909211Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:33.909282Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:33.913863Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:34.033187Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:34.068939Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:34.069004Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:34.069015Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:34.069087Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:34.299766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:34.305747Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:34.361231Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:34.361259Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:34.361702Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:34.794205Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:36.298126Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808114387, tx_id: 281474976710658 } } } 2026-01-07T17:48:36.299599Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:36.301348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:36.302093Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-07T17:48:36.302109Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-07T17:48:36.327318Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-07T17:48:36.327345Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2026-01-07T17:48:30.654761Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678020164428772:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:30.654812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:30.936910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:30.979092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:30.979167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:31.037384Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:31.038630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:31.216180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:31.467506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:31.467533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:31.467539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:31.467644Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:31.667853Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:31.971539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:31.980623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.147756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.180313Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:32.180339Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:32.180940Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:33.034631Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808112098, tx_id: 281474976710658 } } } 2026-01-07T17:48:33.037159Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:33.077245Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:48:33.079961Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808112217 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:48:33.080243Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication mode mismatch: expected: REPLICATION_MODE_READ_ONLY, got: 0 2026-01-07T17:48:33.634448Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678033891720683:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:33.634483Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:33.647236Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:33.710978Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:33.712330Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678033891720658:2081] 1767808113634075 != 1767808113634078 2026-01-07T17:48:33.748987Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:33.749065Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:33.750756Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:33.858665Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:33.922854Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:33.922881Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:33.922894Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:33.922979Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:34.126892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:34.132073Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:34.160281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:34.188742Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:34.188764Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:34.189193Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:34.641522Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:36.373596Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808114198, tx_id: 281474976710658 } } } 2026-01-07T17:48:36.373869Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:36.375247Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:48:36.376216Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808114226 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:48:36.376455Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.795062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.795162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.795245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.795292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.795349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.795416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.796304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.797795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.887237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.887317Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.901979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.902269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.902419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.908709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.909065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.909807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.910033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.912557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.913515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.913570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.913649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.913681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.913714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.913843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.076667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.078030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.078085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ne_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:37.388184Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 14] 2026-01-07T17:48:37.388221Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 28] name: top_queries_by_cpu_time_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:37.388240Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 28] 2026-01-07T17:48:37.388264Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 37] name: top_queries_by_duration_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:37.388277Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2026-01-07T17:48:37.388297Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 25] name: top_queries_by_duration_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:37.388310Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 25] 2026-01-07T17:48:37.388329Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 15] name: top_queries_by_read_bytes_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:37.388342Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 15] 2026-01-07T17:48:37.388361Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 16] name: top_queries_by_read_bytes_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:37.388384Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-01-07T17:48:37.388405Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:37.388418Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-01-07T17:48:37.388444Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:37.388478Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-01-07T17:48:37.388513Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:37.388535Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:48:37.388836Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-07T17:48:37.388917Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:48:37.388971Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:48:37.389006Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:48:37.389038Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:48:37.389083Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:48:37.389135Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-01-07T17:48:37.389179Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:48:37.389211Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-07T17:48:37.389248Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-01-07T17:48:37.389283Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-01-07T17:48:37.391008Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:37.391091Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2026-01-07T17:48:37.391245Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:37.391280Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:37.391413Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:37.391451Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-01-07T17:48:37.391829Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:48:37.391952Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-01-07T17:48:37.392001Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-01-07T17:48:37.392057Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-07T17:48:37.392098Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:48:37.392186Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-01-07T17:48:37.393493Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2026-01-07T17:48:37.393920Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:37.394061Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 174us result status StatusSuccess 2026-01-07T17:48:37.394387Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.795053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.795161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.795268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.795360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.795395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.795490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.796453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.797839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.887044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.887108Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.901473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.901751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.901918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.907232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.907501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.908055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.908237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.910355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.910488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.912511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.912686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.912732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.913751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.038974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.039758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.039886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.039956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.040694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 17:48:37.379082Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-07T17:48:37.379123Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-07T17:48:37.379166Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-07T17:48:37.379209Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:48:37.379301Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-07T17:48:37.382555Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-07T17:48:37.383034Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:37.383613Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [5:668:2658] Bootstrap 2026-01-07T17:48:37.384834Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [5:668:2658] Become StateWork (SchemeCache [5:673:2663]) 2026-01-07T17:48:37.385030Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:37.385203Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 199us result status StatusSuccess 2026-01-07T17:48:37.385680Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:37.386239Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [5:668:2658] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-07T17:48:37.387926Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 2026-01-07T17:48:37.388725Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.388775Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-07T17:48:37.428009Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: Cannot find user: user1, at schemeshard: 72057594046678944 2026-01-07T17:48:37.428092Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:37.428124Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:37.428278Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:37.428325Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-07T17:48:37.428703Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2026-01-07T17:48:37.428970Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:37.429116Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 189us result status StatusSuccess 2026-01-07T17:48:37.429485Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy3t+0u6kWYtU3JYsK4UE\nZpl+k6CaZn1XWDAZ1E69x6XZOwFMC+bdbdlB2iTWs5mn+zVFV8YvmonALbEMoOKw\ns8PXhBKEYEdi2GNJ81bEY8dpxGGK7qK1VCkvxfwALQXwzVwk7ibUsMAw0lzA95ss\nUgEz12GC9h6Oc92ChXlE8Q2L4q+9GfrnU+0hn34DVGjm1Vc7R3zgWa0w42jH/Sq0\nD/KLN6w7Oi2B/RyDiqssjCCb5YUT7bxhB4E8sf3kMna4QKWrttCFmyZRGJ8pg/qD\nDXhBmTZrUhPVmbrvSudJTlzhIjd6luf7Eo0mEEot7jwivWyfJUQIorqyls34foXr\n1wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767894517426 } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2026-01-07T17:48:30.654898Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678021198833281:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:30.654949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:30.870862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:30.975467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:30.975550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:30.977142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:31.022972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:31.113950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:31.467454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:31.467481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:31.467493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:31.467550Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:31.668110Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:31.973530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:31.985051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:32.129392Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:32.129415Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:32.129880Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:33.205572Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808112105, tx_id: 281474976710658 } } } 2026-01-07T17:48:33.206060Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:33.207735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:33.208450Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-07T17:48:33.208513Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-07T17:48:33.233014Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-07T17:48:33.233046Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-07T17:48:33.731956Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678032690575850:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:33.731998Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:33.737914Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:33.811973Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:33.821533Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678032690575818:2081] 1767808113731063 != 1767808113731066 2026-01-07T17:48:33.836173Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:33.836279Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:33.844859Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:33.851250Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:34.039022Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:34.039044Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:34.039057Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:34.039138Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:34.086167Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:34.259997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:34.265971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:34.291222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:34.320021Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-07T17:48:34.320042Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-07T17:48:34.320497Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-07T17:48:34.735514Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:36.503979Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767808114331, tx_id: 281474976710658 } } } 2026-01-07T17:48:36.504280Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-07T17:48:36.550401Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-07T17:48:36.554020Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767808114359 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-07T17:48:36.554300Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.795039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.795130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.795214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.795251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.795298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.795375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.796431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.797750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.890129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.890193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.904912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.905167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.905289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.910234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.910481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.910972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.911176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.913298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.913428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.914290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.914343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.914418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.914456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.914489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.914605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.042964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... LocalPathId: 2] 2026-01-07T17:48:38.167809Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-01-07T17:48:38.167829Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:38.167843Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-01-07T17:48:38.167864Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-07T17:48:38.167879Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-01-07T17:48:38.167899Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:38.167918Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:48:38.167938Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 39] name: DirSub1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:48:38.167952Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:48:38.168260Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-07T17:48:38.168346Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-07T17:48:38.168382Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:48:38.168413Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-07T17:48:38.168440Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:48:38.168481Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:48:38.168528Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2026-01-07T17:48:38.168559Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:48:38.168593Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-07T17:48:38.168631Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 0 2026-01-07T17:48:38.168661Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2026-01-07T17:48:38.170529Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusSuccess TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:38.170609Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2026-01-07T17:48:38.170762Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:38.170795Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:38.170930Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:38.170965Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 107, path id: 1 2026-01-07T17:48:38.171309Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:48:38.171381Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:48:38.171416Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2026-01-07T17:48:38.171449Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-07T17:48:38.171481Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:48:38.171563Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 0 2026-01-07T17:48:38.172968Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 TestModificationResult got TxId: 107, wait until txId: 107 2026-01-07T17:48:38.173457Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:38.173593Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 150us result status StatusSuccess 2026-01-07T17:48:38.173874Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:38.174433Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:38.174556Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 128us result status StatusSuccess 2026-01-07T17:48:38.174792Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:38.175288Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:38.175356Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: Cannot find user: user1, at schemeshard: 72057594046678944 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |93.5%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 14995, msgbus: 18978 2026-01-07T17:44:50.356427Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677074365291745:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:50.357925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:50.550850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.602010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:50.602136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:50.615942Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:50.641882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:50.745216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:44:50.745251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:44:50.745262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:44:50.745357Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:50.758878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:44:50.905501Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592677074365291976:2118] Handle TEvNavigate describe path dc-1 2026-01-07T17:44:50.905589Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592677074365292799:2742] HANDLE EvNavigateScheme dc-1 2026-01-07T17:44:50.905984Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592677074365292799:2742] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:50.941808Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592677074365292799:2742] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-07T17:44:50.946123Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592677074365292799:2742] Handle TEvDescribeSchemeResult Forward to# [1:7592677074365292798:2741] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:44:50.947637Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677074365291976:2118] Handle TEvProposeTransaction 2026-01-07T17:44:50.947675Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677074365291976:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:44:50.947749Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677074365291976:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592677074365292804:2747] 2026-01-07T17:44:51.053997Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677074365292804:2747] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.054062Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677074365292804:2747] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.054084Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677074365292804:2747] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.054139Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677074365292804:2747] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.054445Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677074365292804:2747] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.054538Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677074365292804:2747] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-07T17:44:51.054620Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677074365292804:2747] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:44:51.054771Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677074365292804:2747] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:44:51.055391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:51.056823Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677074365292804:2747] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:44:51.056880Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677074365292804:2747] txid# 281474976715657 SEND to# [1:7592677074365292803:2746] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:44:51.066485Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592677074365291976:2118] Handle TEvProposeTransaction 2026-01-07T17:44:51.066504Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592677074365291976:2118] TxId# 281474976715658 ProcessProposeTransaction 2026-01-07T17:44:51.066531Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592677074365291976:2118] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7592677078660260138:2782] 2026-01-07T17:44:51.069067Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592677078660260138:2782] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-07T17:44:51.069117Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592677078660260138:2782] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-07T17:44:51.069130Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7592677078660260138:2782] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-07T17:44:51.069176Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7592677078660260138:2782] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:44:51.069443Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7592677078660260138:2782] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:44:51.069542Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7592677078660260138:2782] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:44:51.069611Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7592677078660260138:2782] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-07T17:44:51.069781Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7592677078660260138:2782] txid# 281474976715658 HANDLE EvClientConnected 2026-01-07T17:44:51.070188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:44:51.074246Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7592677078660260138:2782] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-07T17:44:51.074283Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7592677078660260138:2782] txid# 281474976715658 SEND to# [1:7592677078660260137:2781] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-07T17:44:51.367546Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:53.339753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677087250194884:2331], DatabaseId: /dc-1, PoolId: default, Failed ... AckPublish, at schemeshard: 72075186224037891, txId: 281474976710665, pathId: [OwnerId: 72075186224037891, LocalPathId: 23], version: 4 2026-01-07T17:48:31.503513Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 23] was 1 2026-01-07T17:48:31.503634Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 27 Version: 4 PathOwnerId: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.503697Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 27 Version: 4 PathOwnerId: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.503711Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976710665 2026-01-07T17:48:31.503725Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186224037891, txId: 281474976710665, pathId: [OwnerId: 72075186224037891, LocalPathId: 27], version: 4 2026-01-07T17:48:31.503739Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 27] was 1 2026-01-07T17:48:31.503794Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976710665, subscribers: 0 2026-01-07T17:48:31.503920Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504022Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504062Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504096Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504130Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504165Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504214Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504252Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504286Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504321Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504353Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504387Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504422Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504465Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504500Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504534Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504563Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504600Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504633Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504666Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504700Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504731Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504763Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504797Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504830Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504862Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.504894Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.505617Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.505701Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.505741Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.505777Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 2026-01-07T17:48:31.900776Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [59:7592677994153879882:2118] Handle TEvNavigate describe path /dc-1/tenant-db 2026-01-07T17:48:31.900887Z node 59 :TX_PROXY DEBUG: describe.cpp:270: Actor# [59:7592678024218652794:3508] HANDLE EvNavigateScheme /dc-1/tenant-db 2026-01-07T17:48:31.901572Z node 59 :TX_PROXY DEBUG: describe.cpp:354: Actor# [59:7592678024218652794:3508] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:48:31.901789Z node 59 :TX_PROXY DEBUG: describe.cpp:433: Actor# [59:7592678024218652794:3508] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2026-01-07T17:48:31.904238Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7592678024218652794:3508] Handle TEvDescribeSchemeResult Forward to# [59:7592678024218652793:3507] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976725657 CreateStep: 1767808105600 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 38 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 38 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186224037891 2026-01-07T17:48:31.947963Z node 59 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2026-01-07T17:48:31.948503Z node 59 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:48:31.950210Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] Test command err: 2026-01-07T17:48:18.038946Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-07T17:48:18.038985Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-07T17:48:18.039025Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:48:18.039045Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-07T17:48:18.039074Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-07T17:48:18.039153Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-07T17:48:18.039979Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-07T17:48:18.043586Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-07T17:48:37.535880Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-07T17:48:37.536002Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-07T17:48:37.536121Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-07T17:48:37.536174Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:48:37.546893Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:48:37.546964Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:48:37.547081Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-07T17:48:37.547131Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-07T17:48:37.547162Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-07T17:48:37.547192Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-07T17:48:37.547222Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-07T17:48:37.547251Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-07T17:48:37.547280Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-07T17:48:37.547308Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-07T17:48:37.547867Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-07T17:48:37.548431Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-07T17:48:37.548590Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-07T17:48:37.548721Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-07T17:48:37.548857Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-07T17:48:37.549010Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-07T17:48:37.549147Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-07T17:48:37.549276Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-07T17:48:37.549336Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:48:37.549781Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:51, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:37.549864Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-07T17:48:37.550148Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 6 2026-01-07T17:48:37.550200Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:51 >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:33.873265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:33.873391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:33.873443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:33.873484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:33.873520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:33.873551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:33.873623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:33.873718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:33.875389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:33.875734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:33.962300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:33.962372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:33.975205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:33.975444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:33.975579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:33.981611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:33.981914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:33.982420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:33.982604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:33.984862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:33.985073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:33.986269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:33.986330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:33.986472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:33.986513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:33.986564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:33.986750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:34.121513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:34.122787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 944, LocalPathId: 10] was 0 2026-01-07T17:48:38.959045Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-07T17:48:38.959069Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-07T17:48:38.959118Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-07T17:48:38.959148Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-07T17:48:38.959193Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-07T17:48:38.959220Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-07T17:48:38.959245Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-07T17:48:38.959268Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-07T17:48:38.959295Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-07T17:48:38.959321Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-07T17:48:38.959345Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-07T17:48:38.959369Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-07T17:48:38.959417Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-07T17:48:38.959443Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-07T17:48:38.959477Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-07T17:48:38.959519Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-07T17:48:38.959546Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-07T17:48:38.959571Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-07T17:48:38.959596Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-07T17:48:38.959621Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-07T17:48:38.959645Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-07T17:48:38.959672Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-07T17:48:38.959698Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-07T17:48:38.959722Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-07T17:48:38.959746Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-07T17:48:38.959787Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:48:38.959822Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:48:38.961677Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.961777Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.961877Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.961974Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.962042Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.962154Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.962454Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.962589Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.962914Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.962994Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.963179Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.963296Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.963344Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.963408Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.963632Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.963703Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.963990Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.964211Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.964276Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.964334Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.964460Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.964509Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.964557Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:38.970646Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:38.975507Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:38.975592Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:38.975740Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:38.975808Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:38.975857Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:38.975966Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:811:2780] sender: [5:867:2058] recipient: [5:15:2062] 2026-01-07T17:48:39.014952Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.015044Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-07T17:48:39.089294Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.095501Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:39.095693Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:39.095730Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:39.096007Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-07T17:48:39.096057Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:39.096105Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:861:2819], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-07T17:48:39.096693Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 0 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2026-01-07T17:48:19.472248Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.521560Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:19.521615Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:19.521668Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:19.521715Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:19.538831Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:183:2196] 2026-01-07T17:48:19.541400Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:19.000000Z 2026-01-07T17:48:19.541677Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:183:2196] 2026-01-07T17:48:19.563477Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.604716Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.625632Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.636180Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.677431Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.718598Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.749603Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003Sclient-1" IncludeFrom: true To: "c0000000003Sclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003wclient-1|0000000000000000" IncludeFrom: true To: "c0000000003wclient-1|FFFFFFFFFFFFFFFF" IncludeTo: true } } CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\014\n\010client-2@\000" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\305\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:20.257018Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:20.300258Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:20.300311Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:20.300354Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:20.300400Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:20.314498Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-07T17:48:20.314706Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:20.314979Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:181:2193] 2026-01-07T17:48:20.315882Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Got KV request Got KV request 2026-01-07T17:48:20.316067Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2026-01-07T17:48:20.316216Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep 2026-01-07T17:48:20.316317Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000000 to m0000000001 Got KV request 2026-01-07T17:48:20.316430Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep 2026-01-07T17:48:20.316490Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000000 to d0000000001 Got KV request 2026-01-07T17:48:20.316685Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:523: key[0]: d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-07T17:48:20.316751Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:531: add key d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-07T17:48:20.316850Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:629: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] check key d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-07T17:48:20.316962Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:659: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-07T17:48:20.317099Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2026-01-07T17:48:20.317148Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-07T17:48:20.317196Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:20.000000Z 2026-01-07T17:48:20.317231Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-07T17:48:20.317338Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000000|0000000000000000 to e0000000001 Got KV request 2026-01-07T17:48:20.317497Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TDeleteKeysStep 2026-01-07T17:48:20.317536Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:20.317572Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2026-01-07T17:48:20.317614Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:181:2193] 2026-01-07T17:48:20.317663Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2026-01-07T17:48:20.317714Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:20.317748Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:20.317781Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:20.317816Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:20.317848Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:20.317886Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:20.317919Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:20.318039Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:20.318201Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:20.328519Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:20.359631Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:20.370161Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:20.370238Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:20.370280Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:20.370330Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:20.370363Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:20.380673Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:20.401506Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:20.401579Z node ... x events 2026-01-07T17:48:39.577961Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:39.598730Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.620270Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.641293Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.661803Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.682313Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.702862Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.723349Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:39.733805Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.754362Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.774830Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.795382Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.815889Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.867811Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.888776Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Send disk status response with cookie: 0 Wait tx rollback for tx 0 2026-01-07T17:48:39.889011Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:39.889134Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:39.889177Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:39.889220Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-07T17:48:39.889270Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:39.889302Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:39.889336Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:39.889375Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:39.889433Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait immediate tx complete 2 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 2026-01-07T17:48:39.889639Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2026-01-07T17:48:39.889677Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:39.889708Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:39.889744Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:39.889784Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-07T17:48:39.889886Z node 5 :PERSQUEUE DEBUG: partition.cpp:3831: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2026-01-07T17:48:39.889922Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 3 2026-01-07T17:48:39.889959Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:39.889990Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:39.890031Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 2026-01-07T17:48:39.890508Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:39.901048Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.911872Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:39.912114Z node 5 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:39.912248Z node 5 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2026-01-07T17:48:39.912404Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:39.912453Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:39.912501Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-07T17:48:39.912561Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:39.912596Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:39.912638Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-07T17:48:39.912709Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2026-01-07T17:48:39.912750Z node 5 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2026-01-07T17:48:39.912793Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:39.912826Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:39.912862Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:39.913079Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got KV request Got KV request 2026-01-07T17:48:39.913258Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:39.913308Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:976: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request Got KV request 2026-01-07T17:48:39.913509Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 4 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 partno 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2026-01-07T17:48:39.913728Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 4 added 1 blobs, size 0 count 45 last offset 0, current partition end offset: 50 2026-01-07T17:48:39.913766Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 4. Send blob request. Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 4 2026-01-07T17:48:39.913955Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:39.934521Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:39.934603Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:39.934729Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:39.934780Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:39.934818Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:39.934852Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:39.934891Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:39.934930Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:39.934980Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood |93.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:47:54.580412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:54.688505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:47:54.707165Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:47:54.707713Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:54.707774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:47:55.010181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:55.010299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:55.097822Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808071522462 != 1767808071522466 2026-01-07T17:47:55.108749Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:55.154724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:55.266672Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:47:55.584825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:55.598334Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:55.702577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:55.749736Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:47:55.750807Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:47:55.753470Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:47:55.753693Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:47:55.794152Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:47:55.794907Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:47:55.795019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:47:55.797292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:47:55.797392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:47:55.797453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:47:55.799863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:47:55.800023Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:47:55.800120Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:47:55.810928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:47:55.840753Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:47:55.841218Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:47:55.841348Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:47:55.841379Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:47:55.841420Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:47:55.841467Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:47:55.841706Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.843024Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.844211Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:47:55.844315Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:47:55.844387Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:47:55.844442Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:47:55.844509Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:47:55.844546Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:47:55.844578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:47:55.844613Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:47:55.844658Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:47:55.844783Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:47:55.844836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:47:55.844881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:47:55.845289Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:47:55.845328Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:47:55.845427Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:55.845720Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:47:55.845780Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:47:55.845858Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:47:55.845964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:47:55.845995Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:47:55.846025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:47:55.846058Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:47:55.846330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:47:55.846364Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:47:55.846413Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:47:55.846443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:47:55.846478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:47:55.846498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:47:55.846522Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:47:55.846546Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:47:55.846562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:47:55.847880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:47:55.847929Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:47:55.858762Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:39.896256Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [10:1167:2975], serverId# [10:1168:2976], sessionId# [0:0:0] 2026-01-07T17:48:39.896331Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [10:1166:2974], Recipient [10:941:2796]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-07T17:48:39.896976Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1171:2979], Recipient [10:941:2796]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:39.897008Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:39.897034Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [10:1170:2978], serverId# [10:1171:2979], sessionId# [0:0:0] 2026-01-07T17:48:39.897146Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1169:2977], Recipient [10:941:2796]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-07T17:48:39.897242Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2026-01-07T17:48:39.897269Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:48:39.897292Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037890 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-07T17:48:39.897320Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2026-01-07T17:48:39.897365Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-07T17:48:39.897401Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2026-01-07T17:48:39.897425Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:39.897450Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-07T17:48:39.897495Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037890 2026-01-07T17:48:39.897527Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-07T17:48:39.897552Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:39.897576Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2026-01-07T17:48:39.897603Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2026-01-07T17:48:39.897659Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-07T17:48:39.897781Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[10:1169:2977], 1002} after executionsCount# 1 2026-01-07T17:48:39.897819Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[10:1169:2977], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:39.897861Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037890 read iterator# {[10:1169:2977], 1002} finished in read 2026-01-07T17:48:39.897891Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-07T17:48:39.897909Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2026-01-07T17:48:39.897927Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2026-01-07T17:48:39.897945Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2026-01-07T17:48:39.897975Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-07T17:48:39.897991Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2026-01-07T17:48:39.898008Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037890 has finished 2026-01-07T17:48:39.898026Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-07T17:48:39.898076Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-07T17:48:39.898449Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1174:2982], Recipient [10:936:2792]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:39.898474Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:39.898497Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [10:1173:2981], serverId# [10:1174:2982], sessionId# [0:0:0] 2026-01-07T17:48:39.898564Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [10:1172:2980], Recipient [10:936:2792]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-07T17:48:39.899083Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1177:2985], Recipient [10:936:2792]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:39.899123Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:39.899161Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [10:1176:2984], serverId# [10:1177:2985], sessionId# [0:0:0] 2026-01-07T17:48:39.899332Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1175:2983], Recipient [10:936:2792]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-07T17:48:39.899410Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2026-01-07T17:48:39.899437Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v2000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T17:48:39.899459Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037891 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-07T17:48:39.899488Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2026-01-07T17:48:39.899528Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-07T17:48:39.899547Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2026-01-07T17:48:39.899564Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:39.899584Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2026-01-07T17:48:39.899613Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037891 2026-01-07T17:48:39.899635Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-07T17:48:39.899654Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:39.899670Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2026-01-07T17:48:39.899686Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2026-01-07T17:48:39.899727Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-07T17:48:39.899799Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037891 Complete read# {[10:1175:2983], 1003} after executionsCount# 1 2026-01-07T17:48:39.899825Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037891 read iterator# {[10:1175:2983], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:39.899861Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037891 read iterator# {[10:1175:2983], 1003} finished in read 2026-01-07T17:48:39.899891Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-07T17:48:39.899934Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2026-01-07T17:48:39.899955Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2026-01-07T17:48:39.899973Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2026-01-07T17:48:39.900003Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-07T17:48:39.900021Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2026-01-07T17:48:39.900036Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037891 has finished 2026-01-07T17:48:39.900052Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2026-01-07T17:48:39.900097Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.795035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.795130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.795198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.795253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.795281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.795331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.796197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.797773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.864451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.864515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.878618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.878893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.880470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.888141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.888478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.892281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.894388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.902587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.903973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.912568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.912816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.912866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.913782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.043073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.043954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.044392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-07T17:48:37.454979Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.457330Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:37.457559Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:37.457598Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:37.457826Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:37.457864Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:37.457908Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-07T17:48:37.458295Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-07T17:48:37.458495Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.458549Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:37.461899Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.463138Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:37.463339Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.463387Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:37.467294Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.468706Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:37.469048Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:37.469211Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 199us result status StatusSuccess 2026-01-07T17:48:37.469614Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm6fwXaZ7Oi/txy+aSj9X\nyK07OApO1KqLBG/zAqAnDrfgt2LYDfn01NuvJLeE5WLWuO3Q7SzlwVrPCMQhBJAX\nER4Dyh7wkH22Q5t5RYbh9H5rJfCraEtNkz9bXCFrHDJ5/CRtUp1g/ti2CERXyfGR\nuSsoWiLYgUG/VIG59qWP0Q4VgEeyRX905uzbB3pSyrDHjbepy93SXg0OLUPKshCk\nXlE3EvmBOTSn+CcHvz/Ro65fVe/Tpnnve6BTQemp99Soi7gw5AYFul4O+yLfzr6T\nrIWcPFcq4OvsUZQQvL+Jd1OONQcVgC8MA6Plmg0ucdslrarutOpGXVpdAWBwHiNT\ntQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767894517449 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:41.470262Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.482326Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.484406Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:41.485145Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:41.485468Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.485541Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:41.489470Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.491025Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:41.491371Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.491430Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:41.494735Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.496219Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:41.496552Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.496628Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:41.500046Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.504317Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-07T17:48:41.504811Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:41.505000Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 223us result status StatusSuccess 2026-01-07T17:48:41.505397Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm6fwXaZ7Oi/txy+aSj9X\nyK07OApO1KqLBG/zAqAnDrfgt2LYDfn01NuvJLeE5WLWuO3Q7SzlwVrPCMQhBJAX\nER4Dyh7wkH22Q5t5RYbh9H5rJfCraEtNkz9bXCFrHDJ5/CRtUp1g/ti2CERXyfGR\nuSsoWiLYgUG/VIG59qWP0Q4VgEeyRX905uzbB3pSyrDHjbepy93SXg0OLUPKshCk\nXlE3EvmBOTSn+CcHvz/Ro65fVe/Tpnnve6BTQemp99Soi7gw5AYFul4O+yLfzr6T\nrIWcPFcq4OvsUZQQvL+Jd1OONQcVgC8MA6Plmg0ucdslrarutOpGXVpdAWBwHiNT\ntQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767894517449 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.795039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.795130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.795211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.795260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.795290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.795363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.796219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.797762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.885318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.885398Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.899482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.899788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.899967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.905945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.906315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.906962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.907221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.909723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.909906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.912568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.912795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.912849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.913813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.058864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.059973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.060023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.060070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.060116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.060187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.060245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:37.288678Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2026-01-07T17:48:37.288843Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:37.288905Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:37.289100Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:37.289142Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-07T17:48:37.289645Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:48:37.289745Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-07T17:48:37.289787Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-07T17:48:37.289827Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-07T17:48:37.289874Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:48:37.289968Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-07T17:48:37.291557Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2026-01-07T17:48:37.291890Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.291930Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-07T17:48:37.329775Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.332864Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:37.333131Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:37.333180Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:37.333503Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:37.333555Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:37.333600Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-07T17:48:37.334109Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-07T17:48:37.334365Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.334445Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:37.338938Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.340755Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:37.341042Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.341122Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:37.345802Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.347534Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:37.347821Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.347899Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:37.364303Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:37.366189Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:37.369047Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" CanLogin: false } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:48:37.369526Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-07T17:48:37.369627Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:48:37.369665Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:48:37.369712Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:48:37.369746Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:48:37.369801Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:48:37.369864Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:48:37.369906Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:48:37.369944Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:48:37.369978Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 1, subscribers: 0 2026-01-07T17:48:37.370013Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-07T17:48:37.372192Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSuccess TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:37.372308Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2026-01-07T17:48:37.372486Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:37.372527Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:37.372714Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:37.372759Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:207:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-07T17:48:37.373303Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:48:37.373431Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:48:37.373476Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:48:37.373519Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-07T17:48:37.373566Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:48:37.373663Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:48:37.375339Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:48:41.375961Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:41.376079Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: account is blocked, at schemeshard: 72057594046678944 >> GenericFederatedQuery::ClickHouseManagedSelectAll >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> GenericFederatedQuery::YdbManagedSelectAll >> GenericFederatedQuery::IcebergHadoopSaSelectAll |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> GenericFederatedQuery::IcebergHiveBasicSelectAll >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow [GOOD] >> DataShardWrite::LocksBrokenStats >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] Test command err: 2026-01-07T17:48:17.956248Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.024415Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:18.024492Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:18.024554Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:18.024613Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:18.042889Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:183:2196] 2026-01-07T17:48:18.044770Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:18.000000Z 2026-01-07T17:48:18.045024Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:183:2196] 2026-01-07T17:48:18.066392Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.107771Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.128748Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.139381Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.181250Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.224850Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.255858Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:18.407578Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:18.430117Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:18.472460Z node 1 :PERSQUEUE WARN: partition.cpp:3024: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 1 Begin 0 2026-01-07T17:48:18.472557Z node 1 :PERSQUEUE WARN: partition.cpp:3024: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 1 Begin 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:18.743896Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.788688Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:18.788751Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:18.788794Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:18.788844Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:18.803969Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:182:2194] 2026-01-07T17:48:18.805434Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:48:18.000000Z 2026-01-07T17:48:18.805608Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:182:2194] 2026-01-07T17:48:18.816140Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.847279Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.868102Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.899124Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.930106Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:18.961118Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.033117Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.064166Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\275\313\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:48:19.212163Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.523785Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:19.554457Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:19.554499Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:19.554530Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:19.554564Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:19.564438Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-07T17:48:19.564597Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:19.564828Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [3:180:2193] 2026-01-07T17:48:19.565504Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Go ... 026-01-07T17:48:42.499255Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 2, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:42.499306Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:42.499357Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 2, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:42.499400Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:42.499457Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:42.499619Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-07T17:48:42.499664Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:42.499705Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:48:42.499750Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 26 affect SourceId src1 2026-01-07T17:48:42.499807Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-07T17:48:42.499836Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:42.499897Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:42.499962Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:42.500012Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:42.500082Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-07T17:48:42.500118Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:42.500156Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-07T17:48:42.500186Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:42.500208Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:42.500235Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:42.500256Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Wait for no tx committed 2026-01-07T17:48:42.500389Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 26 2026-01-07T17:48:42.500428Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:42.500472Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-07T17:48:42.500534Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:42.500571Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:42.500612Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:48:42.500656Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 26 2026-01-07T17:48:42.500715Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2026-01-07T17:48:42.500777Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 26 2026-01-07T17:48:42.500825Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:42.500875Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:42.500916Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request 2026-01-07T17:48:42.521834Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.542378Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.562996Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.583689Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.614859Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.636227Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.646560Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:42.667473Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.688035Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.709721Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.730270Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events Send disk status response with cookie: 0 Wait tx committed for tx 26 2026-01-07T17:48:42.730523Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:42.730656Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-07T17:48:42.730711Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:42.730762Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-07T17:48:42.730815Z node 6 :PERSQUEUE DEBUG: partition.cpp:1608: [72057594037927937][Partition][0][StateIdle] TxId (empty maybe) affect SourceId src1 2026-01-07T17:48:42.730901Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:48:42.730946Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:42.730983Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-07T17:48:42.731018Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2026-01-07T17:48:42.731066Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2026-01-07T17:48:42.731126Z node 6 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2026-01-07T17:48:42.731163Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:48:42.731193Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:48:42.731238Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:42.731385Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got batch complete: 1 Got KV request Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 28 2026-01-07T17:48:42.731580Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:42.741864Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:42.762766Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:48:42.762855Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:48:42.762985Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-07T17:48:42.763056Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:42.763130Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:42.763185Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:42.763231Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:42.763276Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:42.763336Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 28 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks >> TGRpcStreamingTest::ReadFinish >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> TGRpcStreamingTest::ClientNeverWrites ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.795051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.795158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.795240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.795280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.795337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.795418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.796400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.797799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.887024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.887094Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.901703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.901985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.902134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.907697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.907942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.908425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.908573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.910573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.910699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.912488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.912668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.912743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.913790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.076146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.077967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.078024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.078122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.078196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6678944 2026-01-07T17:48:39.592117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.594946Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:39.595147Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:39.595209Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:39.595574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:39.595631Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:39.595673Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-07T17:48:39.596115Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-07T17:48:39.596323Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.596404Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:39.599768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.601215Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:39.601462Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.601536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:39.607114Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.608716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:39.609077Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.609167Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:39.612362Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.613867Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:39.614174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.614252Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-07T17:48:39.614499Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:39.614565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-07T17:48:39.614875Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:39.615059Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 219us result status StatusSuccess 2026-01-07T17:48:39.615473Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtkyFUfhOj2fDBlWkUlvT\nYjmn/H1TNoLNrkbnUvTvITHLDkg5vGjV55cDLR3vT/25SdXwiMFdnhk94I2pH1Wd\negNMolO85au6V/wQATEOltCgheRJFNLuVRmf6Mb82JjOOrNtv/lEH6kzyJIUWiyj\nz+fv2Xn30Vd7knf6WF34TLQECmc7u5gOmPbUfTg2bS4cw/TN8mgosyfL4f4R/ub/\nstlyY1C+47gHaxS/bzl32JhG+1UZiXKQxIhY+rCz9bqaqqFDMmeQoloNDJhj/VPh\nyYIGgGukauEg4bxezyI68kGbPW7JxfbPKoxmPOi0yfT0VTAf0uNy31mhC/zSzoHs\nYwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767894519588 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:48:43.616412Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:43.621821Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:43.626094Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:43.626731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-07T17:48:43.627248Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:43.627377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:43.632451Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:43.638654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-07T17:48:43.639284Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:43.639538Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 298us result status StatusSuccess 2026-01-07T17:48:43.640080Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtkyFUfhOj2fDBlWkUlvT\nYjmn/H1TNoLNrkbnUvTvITHLDkg5vGjV55cDLR3vT/25SdXwiMFdnhk94I2pH1Wd\negNMolO85au6V/wQATEOltCgheRJFNLuVRmf6Mb82JjOOrNtv/lEH6kzyJIUWiyj\nz+fv2Xn30Vd7knf6WF34TLQECmc7u5gOmPbUfTg2bS4cw/TN8mgosyfL4f4R/ub/\nstlyY1C+47gHaxS/bzl32JhG+1UZiXKQxIhY+rCz9bqaqqFDMmeQoloNDJhj/VPh\nyYIGgGukauEg4bxezyI68kGbPW7JxfbPKoxmPOi0yfT0VTAf0uNy31mhC/zSzoHs\nYwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767894519588 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionScaleManagerGraphCmpTest::Equal [GOOD] >> TPartitionScaleManagerGraphCmpTest::ExtraRootPartitionsInSourceTopic [GOOD] >> TPartitionScaleManagerGraphCmpTest::EqualSplitted [GOOD] >> TGRpcStreamingTest::ClientDisconnects >> PQCountersLabeled::PartitionBlobCompactionCounters [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] >> TPQTest::The_Keys_Are_Loaded_In_Several_Iterations [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionScaleManagerGraphCmpTest::EqualSplitted [GOOD] Test command err: 2026-01-07T17:48:17.795475Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677962346982430:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:17.797021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:17.823822Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677964174328677:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:17.829797Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:48:17.830006Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:17.837416Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:48:17.940949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:17.976449Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:18.028868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:18.028993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:18.029806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:18.029879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:18.031584Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:48:18.031716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:18.032290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:18.115655Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:18.141219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:18.150003Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:18.172505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003870/r3tmp/yandexwdAGEO.tmp 2026-01-07T17:48:18.172556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003870/r3tmp/yandexwdAGEO.tmp 2026-01-07T17:48:18.172789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003870/r3tmp/yandexwdAGEO.tmp 2026-01-07T17:48:18.172971Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:18.200777Z INFO: TTestServer started on Port 21412 GrpcPort 18699 PQClient connected to localhost:18699 2026-01-07T17:48:18.359653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:18.437116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:18.799541Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:18.834841Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:20.424105Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677977059230959:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:20.424119Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677977059230950:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:20.424175Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:20.424343Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592677977059230965:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:20.424378Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:20.428620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:48:20.440133Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592677977059230964:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-07T17:48:20.543728Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592677977059230993:2181] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:48:20.831199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:20.836788Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592677975231885739:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:48:20.837301Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzU4YzBhZWMtNGFkYWUyNGEtZGVjZjE1MTQtYjU0NzcwOTU=, ActorId: [1:7592677975231885699:2329], ActorState: ExecuteState, LegacyTraceId: 01kecs5t7cf1nmke0tzs9gwrfk, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:48:20.838419Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592677977059231007:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:48:20.838701Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZTFmY2JjOC0xZTlhMzczMC1iZjVmMzYxNi0zODRlZGI2ZQ==, ActorId: [2:7592677977059230947:2304], ActorState: ExecuteState, LegacyTraceId: 01kecs5t264p4wsgdpmyn32qza, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:48:20.839529Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:48:20.839539Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' be ... ror=timeout; === CheckClustersList. Ok 2026-01-07T17:48:40.344101Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:40.726662Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:48:41.095734Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:41.573801Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:48:41.892703Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:48:42.200621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (11131928866524144434, "Root", "00415F536F757263655F35", 1767808122620, 1767808122620, 0, 13); 2026-01-07T17:48:42.714949Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-07T17:48:42.714997Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-07T17:48:42.715007Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-07T17:48:42.715025Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:116: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2026-01-07T17:48:42.715125Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [3:7592678071664361090:4115], Recipient [3:7592678063074425654:3597]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7592678071664361089:4115] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-07T17:48:42.715194Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [3:7592678071664361089:4115], Recipient [3:7592678063074425654:3597]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2026-01-07T17:48:42.715243Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:139: StateOwnershipFast, received event# 271188558, Sender [3:7592678063074425654:3597], Recipient [3:7592678071664361089:4115]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-07T17:48:42.715270Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:88: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2026-01-07T17:48:42.715330Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [3:7592678071664361089:4115], Recipient [3:7592678063074425654:3597]: NActors::TEvents::TEvPoison 2026-01-07T17:48:42.715369Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:101: StateInitTable, received event# 277020685, Sender [3:7592678020124750397:2073], Recipient [3:7592678071664361089:4115]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2026-01-07T17:48:42.715382Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2026-01-07T17:48:42.717765Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:132: StateCreateKqpSession, received event# 271646728, Sender [3:7592678020124750614:2269], Recipient [3:7592678071664361089:4115]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=3&id=MzM4MjYwYy0yNjc0YjE4Zi1mYTAwMjM0ZS01OTQ4OTJkMg==" NodeId: 3 } YdbStatus: SUCCESS ResourceExhausted: false 2026-01-07T17:48:42.717816Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2026-01-07T17:48:42.853586Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:163: StateSelect, received event# 271646721, Sender [3:7592678020124750614:2269], Recipient [3:7592678071664361089:4115]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=MzM4MjYwYy0yNjc0YjE4Zi1mYTAwMjM0ZS01OTQ4OTJkMg==" PreparedQuery: "49c63c36-49934d00-d4129763-c4a21c49" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01kecs6fz1fdrq30rav8vg7mfh" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1767808122620 } items { uint64_value: 1767808122620 } items { uint64_value: 13 } } format: FORMAT_VALUE } QueryDiagnostics: "" } YdbStatus: SUCCESS C 2026-01-07T17:48:42.853765Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:151: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2026-01-07T17:48:42.853793Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:209: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2026-01-07T17:48:42.853913Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [3:7592678071664361116:4115], Recipient [3:7592678063074425654:3597]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7592678071664361089:4115] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-07T17:48:42.853986Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [3:7592678071664361089:4115], Recipient [3:7592678063074425654:3597]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2026-01-07T17:48:42.854061Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:240: StateCheckPartition, received event# 271188558, Sender [3:7592678063074425654:3597], Recipient [3:7592678071664361089:4115]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-07T17:48:42.854101Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2026-01-07T17:48:42.854338Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [3:7592678071664361089:4115], Recipient [3:7592678063074425654:3597]: NActors::TEvents::TEvPoison 2026-01-07T17:48:42.935336Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:212: StateUpdate, received event# 271646721, Sender [3:7592678020124750614:2269], Recipient [3:7592678071664361089:4115]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=MzM4MjYwYy0yNjc0YjE4Zi1mYTAwMjM0ZS01OTQ4OTJkMg==" PreparedQuery: "cf134867-9e67ac77-1c74a72d-d345e5bf" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 45 2026-01-07T17:48:42.935388Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-07T17:48:42.935428Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2026-01-07T17:48:42.935448Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7592678071664361089:4115] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Received TEvChooseResult: 1 Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: 2026-01-07T17:48:13.805970Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:48:13.866463Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:13.866533Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:13.866592Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:13.866642Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:48:13.885156Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:13.903787Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:48:13.904816Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:48:13.907375Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:48:13.910704Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-01-07T17:48:13.912494Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-01-07T17:48:13.963895Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:13.964359Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|80cc6800-61231c01-5e43e477-5cce7d89_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:237:2057] recipient: [1:104:2137] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:240:2057] recipient: [1:239:2234] Leader for TabletID 72057594037927937 is [1:241:2235] sender: [1:242:2057] recipient: [1:239:2234] 2026-01-07T17:48:14.123857Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:14.123915Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:14.125047Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:14.125102Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:14.125728Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:290:2235] 2026-01-07T17:48:14.127682Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:291:2235] 2026-01-07T17:48:14.135356Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:48:14.136110Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:291:2235] 2026-01-07T17:48:14.150946Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:48:14.151853Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:290:2235] 2026-01-07T17:48:14.152582Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [1:241:2235] sender: [1:319:2057] recipient: [1:14:2061] Got start offset = 40000 2026-01-07T17:48:14.425150Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-01-07T17:48:14.468493Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:14.468555Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:14.468607Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:14.468660Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:181:2057] recipient: [2:14:2061] 2026-01-07T17:48:14.485612Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:14.486355Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } Consumers { Name: "user1" Generation: 2 Important: true } 2026-01-07T17:48:14.486929Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:187:2142] 2026-01-07T17:48:14.489163Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:187:2142] 2026-01-07T17:48:14.491609Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:188:2142] 2026-01-07T17:48:14.493235Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:188:2142] 2026-01-07T17:48:14.533884Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:14.534350Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ff6a803d-1bd338b4-719425a5-f946d647_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:234:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:237:2057] recipient: [2:236:2232] Leader for TabletID 72057594037927937 is [2:238:2233] sender: [2:239:2057] recipient: [2:236:2232] 2026-01-07T17:48:14.656047Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:14.656128Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:14.656844Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:14.656898Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:48:14.657538Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:287:2233] 2026-01-07T17:48:14.659557Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:288:2233] 2026-01-07T17:48:14.667878Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:48:14.674095Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:288:2233] 2026-01-07T17:48:14.681940Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:48:14.682683Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:287:2233] 2026-01-07T17:48:14.683323Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:238:2233] sender: [2:318:2057] recipient: [2:14:2061] Got start offset = 40000 2026-01-07T17:48:15.038069Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:107:2057] recipient: [3:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:107:2057] recipient: [3:105:2138] L ... 0000000000000000234_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000235_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000236_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000237_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000238_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000239_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000240_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000241_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000242_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000243_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000244_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000245_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000246_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000247_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000248_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000249_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000250_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000251_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000252_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000253_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000254_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000255_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000256_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000257_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000258_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000259_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000260_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000261_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000262_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000263_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000264_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000265_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000266_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000267_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000268_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000269_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000270_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000271_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000272_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000273_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000274_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000275_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000276_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000277_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000278_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000279_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000280_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000281_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000282_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000283_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000284_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000285_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000286_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000287_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000288_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000289_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000290_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000291_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000292_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000293_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000294_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000295_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000296_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000297_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000298_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000299_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000300_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000301_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000302_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000303_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000304_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000305_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000306_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000307_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000308_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000309_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000310_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000311_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000312_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000313_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000314_00000_0000000001_00000? size 28702 2026-01-07T17:48:45.613023Z node 9 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:45.613071Z node 9 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:45.613101Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:45.613134Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:45.613163Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:45.613190Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:45.613217Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:45.613268Z node 9 :PERSQUEUE DEBUG: partition.cpp:757: [72057594037927937][Partition][0][StateIdle] Init complete for topic 'rt3.dc1--asdfgs--topic' Partition: 0 SourceId: sourceid SeqNo: 315 offset: 314 MaxOffset: 315 2026-01-07T17:48:45.613300Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 13 2026-01-07T17:48:45.613333Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:976: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 13 2026-01-07T17:48:45.613389Z node 9 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:45.613885Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:45.614747Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 3 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 partno 0 count 1 size 1024000 endOffset 315 max time lag 0ms effective offset 0 2026-01-07T17:48:45.614981Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 8352628 count 168 last offset 0, current partition end offset: 315 2026-01-07T17:48:45.615012Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-07T17:48:45.615180Z node 9 :PERSQUEUE DEBUG: cache_eviction.h:464: No blob in L1. Partition 0 offset 0 partno 0 count 167 parts_count 0 actorID [9:3737:5380] 2026-01-07T17:48:45.615223Z node 9 :PERSQUEUE DEBUG: read.h:142: [72057594037927937][PQCacheProxy]Reading cookie 3. Have to read 1 of 1 from KV 2026-01-07T17:48:45.615301Z node 9 :PERSQUEUE DEBUG: pq_l2_cache.cpp:223: PQ Cache (L2). Missed blob. tabletId '72057594037927937' partition 0 offset 0 partno 0 count 167 parts_count 0 2026-01-07T17:48:45.636617Z node 9 :PERSQUEUE DEBUG: read.h:178: [72057594037927937][PQCacheProxy]Got results. 1 of 1 from KV. Status 1 2026-01-07T17:48:45.636683Z node 9 :PERSQUEUE DEBUG: read.h:195: [72057594037927937][PQCacheProxy]Got results. result 0 from KV. Status 0 2026-01-07T17:48:45.636765Z node 9 :PERSQUEUE DEBUG: cache_eviction.h:398: Prefetched blob in L1. Partition 0 offset 0 count 167 size 8352628 actorID [9:3737:5380] 2026-01-07T17:48:45.636897Z node 9 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 167 parts 0 suffix '0' size 8352628 2026-01-07T17:48:45.637150Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-07T17:48:45.651938Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 167 count 11 size 550146 from pos 0 cbcount 11 2026-01-07T17:48:45.657723Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp done, result 130 queuesize 0 startOffset 0 2026-01-07T17:48:45.658531Z node 9 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 9 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [9:3696:5380] sender: [9:3914:2057] recipient: [9:14:2061] 2026-01-07T17:48:45.659089Z node 9 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [9:3913:5430], now have 1 active actors on pipe Got start offset = 0 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> TGRpcStreamingTest::ReadFinish [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:47:54.368967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:54.533873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:47:54.581003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:47:54.581780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:54.581845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:47:55.014295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:55.014427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:55.094344Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808071521910 != 1767808071521914 2026-01-07T17:47:55.110393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:55.153438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:55.267170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:47:55.571448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:55.585025Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:55.702658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:55.761859Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:47:55.763126Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:47:55.763427Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-01-07T17:47:55.763687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:47:55.773808Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:899:2775]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:47:55.809145Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:47:55.809485Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:899:2775]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:47:55.809715Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-07T17:47:55.809864Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:47:55.816381Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:899:2775]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:47:55.816976Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:47:55.817096Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:47:55.818456Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:47:55.818518Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:47:55.818577Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:47:55.818929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:47:55.819109Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:47:55.819172Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-07T17:47:55.819521Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:47:55.819623Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:47:55.820681Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T17:47:55.820731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T17:47:55.820773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T17:47:55.821020Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:47:55.821108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:47:55.821161Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-07T17:47:55.833539Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:47:55.869467Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:47:55.869708Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:47:55.869829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-07T17:47:55.869913Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:47:55.869951Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:47:55.869986Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:47:55.870336Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:897:2773], Recipient [1:897:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.870402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.870484Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:47:55.870521Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:47:55.870603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:47:55.870661Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-07T17:47:55.870683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T17:47:55.870704Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:47:55.870736Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:47:55.871049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:899:2775], Recipient [1:899:2775]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.871080Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.871205Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:47:55.871319Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:47:55.871461Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:47:55.871512Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:47:55.871569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:47:55.871623Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:47:55.871667Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:47:55.871706Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:47:55.871752Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:47:55.871797Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-07T17:47:55.871850Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-07T17:47:55.871908Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:47:55.871928Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:47:55.871946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2026-01-07T17:47:55.871965Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2026-01-07T17:47:55.871996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 202 ... k, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:45.258141Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2026-01-07T17:48:45.258328Z node 11 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=1234567890011; 2026-01-07T17:48:45.258485Z node 11 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [3000 : 1234567890011] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2026-01-07T17:48:45.258545Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:48:45.258615Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [11:921:2784], Recipient [11:919:2782]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-07T17:48:45.258635Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:45.258657Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2026-01-07T17:48:45.390478Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1165:2968], Recipient [11:919:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T17:48:45.390660Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:48:45.390738Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-07T17:48:45.390838Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:48:45.390894Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:48:45.390950Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:45.390987Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:48:45.391028Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-07T17:48:45.391068Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:48:45.391089Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:45.391106Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:48:45.391122Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:48:45.391224Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T17:48:45.391471Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-07T17:48:45.391543Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[11:1165:2968], 0} after executionsCount# 1 2026-01-07T17:48:45.391598Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[11:1165:2968], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:45.391681Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[11:1165:2968], 0} finished in read 2026-01-07T17:48:45.391744Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:48:45.391764Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:48:45.391784Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:48:45.391805Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:48:45.391842Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T17:48:45.391864Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:48:45.391894Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-07T17:48:45.391943Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:48:45.392042Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:48:45.392475Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [11:1165:2968], Recipient [11:919:2782]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:48:45.392529Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-07T17:48:45.392632Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1165:2968], Recipient [11:921:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2026-01-07T17:48:45.392696Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-07T17:48:45.392731Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-07T17:48:45.392771Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:48:45.392788Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-07T17:48:45.392808Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:45.392827Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-07T17:48:45.392854Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-07T17:48:45.392876Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:48:45.392901Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:45.392943Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-07T17:48:45.392963Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-07T17:48:45.393028Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2026-01-07T17:48:45.393155Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-07T17:48:45.393185Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[11:1165:2968], 1} after executionsCount# 1 2026-01-07T17:48:45.393211Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[11:1165:2968], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:45.393251Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[11:1165:2968], 1} finished in read 2026-01-07T17:48:45.393281Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:48:45.393300Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-07T17:48:45.393318Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:48:45.393337Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:48:45.393362Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-07T17:48:45.393378Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:48:45.393395Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-07T17:48:45.393421Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-07T17:48:45.393487Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-07T17:48:45.394390Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [11:1165:2968], Recipient [11:921:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-07T17:48:45.394431Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 11 } items { int32_value: 1002 } } |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError >> DataShardWrite::LocksBrokenStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2026-01-07T17:48:44.614286Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678079774154296:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:44.614472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:44.850262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:44.897549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:44.897666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:44.943234Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678079774154247:2081] 1767808124609198 != 1767808124609201 2026-01-07T17:48:44.950081Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:44.954808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:45.078359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:45.093255Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7db9b3911080] stream accepted Name# Session ok# true peer# ipv6:[::1]:41458 2026-01-07T17:48:45.093636Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7db9b3911080] facade attach Name# Session actor# [1:7592678084069122312:2464] peer# ipv6:[::1]:41458 2026-01-07T17:48:45.093677Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7db9b3911080] facade write Name# Session data# peer# ipv6:[::1]:41458 2026-01-07T17:48:45.093965Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:399: [0x7db9b3911080] facade write Name# Session data# peer# ipv6:[::1]:41458 grpc status# (0) message# 2026-01-07T17:48:45.094010Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7db9b3911080] write finished Name# Session ok# true peer# ipv6:[::1]:41458 2026-01-07T17:48:45.094060Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2026-01-07T17:48:45.094555Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7db9b3911080] stream done notification Name# Session ok# true peer# ipv6:[::1]:41458 2026-01-07T17:48:45.094649Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7db9b3911080] write finished Name# Session ok# true peer# ipv6:[::1]:41458 2026-01-07T17:48:45.094700Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7db9b3911080] stream finished Name# Session ok# true peer# ipv6:[::1]:41458 grpc status# (0) message# 2026-01-07T17:48:45.094713Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2026-01-07T17:48:45.094769Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7db9b3911080] deregistering request Name# Session peer# ipv6:[::1]:41458 (finish done) |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2026-01-07T17:48:44.613872Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678080563293072:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:44.614093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:44.851699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:44.887024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:44.887132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:44.921293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:44.970311Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:45.093270Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7de2253bf280] stream accepted Name# Session ok# true peer# ipv6:[::1]:41742 2026-01-07T17:48:45.093618Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7de2253bf280] facade attach Name# Session actor# [1:7592678084858261067:2460] peer# ipv6:[::1]:41742 2026-01-07T17:48:45.093667Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7de2253bf280] facade read Name# Session peer# ipv6:[::1]:41742 2026-01-07T17:48:45.093746Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7de2253bf280] facade finish Name# Session peer# ipv6:[::1]:41742 grpc status# (0) message# 2026-01-07T17:48:45.094263Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7de2253bf280] stream done notification Name# Session ok# true peer# ipv6:[::1]:41742 2026-01-07T17:48:45.094266Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7de2253bf280] read finished Name# Session ok# false data# peer# ipv6:[::1]:41742 2026-01-07T17:48:45.094310Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7de2253bf280] stream finished Name# Session ok# true peer# ipv6:[::1]:41742 grpc status# (0) message# 2026-01-07T17:48:45.094356Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7de2253bf280] deregistering request Name# Session peer# ipv6:[::1]:41742 (finish done) 2026-01-07T17:48:45.094486Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:265: Received TEvReadFinished, success = 0 2026-01-07T17:48:45.109645Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2026-01-07T17:48:44.613288Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678081900327854:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:44.613363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:44.859443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:44.886297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:44.886392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:44.902206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:44.988991Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:45.093351Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7da222f11080] stream accepted Name# Session ok# true peer# ipv6:[::1]:38342 2026-01-07T17:48:45.093747Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7da222f11080] facade attach Name# Session actor# [1:7592678086195295857:2460] peer# ipv6:[::1]:38342 2026-01-07T17:48:45.093781Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7da222f11080] facade read Name# Session peer# ipv6:[::1]:38342 2026-01-07T17:48:45.093910Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7da222f11080] read finished Name# Session ok# true data# peer# ipv6:[::1]:38342 2026-01-07T17:48:45.093948Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:142: Received TEvReadFinished, success = 1 2026-01-07T17:48:45.093971Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7da222f11080] facade write Name# Session data# peer# ipv6:[::1]:38342 2026-01-07T17:48:45.094221Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7da222f11080] facade finish Name# Session peer# ipv6:[::1]:38342 grpc status# (0) message# 2026-01-07T17:48:45.094880Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7da222f11080] write finished Name# Session ok# true peer# ipv6:[::1]:38342 2026-01-07T17:48:45.095135Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7da222f11080] stream done notification Name# Session ok# true peer# ipv6:[::1]:38342 2026-01-07T17:48:45.095146Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7da222f11080] stream finished Name# Session ok# true peer# ipv6:[::1]:38342 grpc status# (0) message# 2026-01-07T17:48:45.095209Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7da222f11080] deregistering request Name# Session peer# ipv6:[::1]:38342 (finish done) 2026-01-07T17:48:45.102918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2026-01-07T17:48:44.613289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678079116047973:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:44.613379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:44.844045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:44.908695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:44.908799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:44.948049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678079116047934:2081] 1767808124609181 != 1767808124609184 2026-01-07T17:48:44.952482Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:44.961239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:45.017491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:45.093246Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d97ed20e680] stream accepted Name# Session ok# true peer# ipv6:[::1]:59174 2026-01-07T17:48:45.093592Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d97ed20e680] facade attach Name# Session actor# [1:7592678083411015996:2461] peer# ipv6:[::1]:59174 2026-01-07T17:48:45.093632Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d97ed20e680] facade read Name# Session peer# ipv6:[::1]:59174 2026-01-07T17:48:45.093735Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d97ed20e680] facade write Name# Session data# peer# ipv6:[::1]:59174 2026-01-07T17:48:45.094061Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d97ed20e680] facade finish Name# Session peer# ipv6:[::1]:59174 grpc status# (0) message# 2026-01-07T17:48:45.094366Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d97ed20e680] write finished Name# Session ok# true peer# ipv6:[::1]:59174 2026-01-07T17:48:45.094426Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:187: Received TEvWriteFinished, success = 1 2026-01-07T17:48:45.094631Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d97ed20e680] stream done notification Name# Session ok# true peer# ipv6:[::1]:59174 2026-01-07T17:48:45.094646Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d97ed20e680] read finished Name# Session ok# false data# peer# ipv6:[::1]:59174 2026-01-07T17:48:45.094688Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d97ed20e680] stream finished Name# Session ok# true peer# ipv6:[::1]:59174 grpc status# (0) message# 2026-01-07T17:48:45.094709Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:194: Received TEvNotifiedWhenDone 2026-01-07T17:48:45.094729Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:181: Received TEvReadFinished, success = 0 2026-01-07T17:48:45.094737Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d97ed20e680] deregistering request Name# Session peer# ipv6:[::1]:59174 (finish done) >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2026-01-07T17:48:44.948571Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678080715385481:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:44.949740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:45.120201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:45.120339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:45.142888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:45.159518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:45.167140Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:45.179371Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678080715385451:2081] 1767808124946689 != 1767808124946692 2026-01-07T17:48:45.209668Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7ddf3596f880] stream accepted Name# Session ok# true peer# ipv6:[::1]:48900 2026-01-07T17:48:45.209929Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7ddf3596f880] facade attach Name# Session actor# [1:7592678085010353384:2355] peer# ipv6:[::1]:48900 2026-01-07T17:48:45.210538Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7ddf3596f880] stream done notification Name# Session ok# true peer# ipv6:[::1]:48900 2026-01-07T17:48:45.210590Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:230: Received TEvNotifiedWhenDone 2026-01-07T17:48:45.210960Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7ddf3596f880] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2026-01-07T17:48:45.211010Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7ddf3596f880] deregistering request Name# Session peer# unknown (finish done) |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |93.6%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2026-01-07T17:48:23.885440Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:23.885583Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:23.904211Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:23.904623Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:23.939757Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:24.302371Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:24.302483Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:24.321381Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:24.321671Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:24.335698Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:24.700408Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:24.700494Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:24.712898Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:24.713030Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:24.747679Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:24.748244Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=8642456413894011199, session=0, seqNo=0) 2026-01-07T17:48:24.748410Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:48:24.760734Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=8642456413894011199, session=1) 2026-01-07T17:48:24.761281Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:146:2168], cookie=10083969112593305251) 2026-01-07T17:48:24.761395Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:146:2168], cookie=10083969112593305251) 2026-01-07T17:48:25.187275Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.199684Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.550197Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.562317Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.911504Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.923416Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.273000Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.284769Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.653563Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.665283Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.013963Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.025695Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.364331Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.375957Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.725079Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.737064Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.087395Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.099050Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.481482Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.493164Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.864003Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.876228Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.237682Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.249722Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.610101Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.622043Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.981656Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.993994Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.385042Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.396772Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.767155Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.779313Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.142148Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.154105Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.513981Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.525673Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.886729Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.899010Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:32.286994Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:32.299169Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:32.659750Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:32.671611Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.033051Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:33.045219Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.406580Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:33.418552Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.779069Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:33.791313Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:34.165425Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.177320Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:34.548496Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.560631Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:34.921562Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.933628Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:35.294148Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:35.306339Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:35.667967Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:35.680204Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:36.083815Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:36.096155Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:36.476011Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:36.489745Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:36.864492Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:36.876325Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:37.237694Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:37.249750Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:37.600213Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:37.612242Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:37.983356Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:37.995080Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:38.356004Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:38.367839Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:38.729008Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:38.741452Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:39.103946Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:39.115668Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:39.475978Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:39.487591Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:39.878872Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:39.890573Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:40.251911Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:40.263895Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:40.636027Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:40.648121Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:41.009447Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:41.021605Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:41.382793Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:41.394990Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:41.797684Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:41.809554Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:42.170289Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:42.182050Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:42.553244Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:42.565381Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:42.926647Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:42.938792Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:43.300710Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:43.312899Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:43.705275Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:43.717297Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:44.078765Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:44.090658Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:44.452193Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:44.464075Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:44.815410Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:44.827643Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:45.168268Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:45.180625Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:45.541652Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:45.553538Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:45.903804Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:45.915586Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:46.255031Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:46.266932Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:46.627841Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:46.639763Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:46.991989Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:47.004360Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:47.409432Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-07T17:48:47.409545Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-07T17:48:47.421423Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-07T17:48:47.432262Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:535:2482], cookie=12722971592981824617) 2026-01-07T17:48:47.432362Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:535:2482], cookie=12722971592981824617) 2026-01-07T17:48:47.846626Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:47.846715Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:47.864171Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:47.864511Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:47.888332Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:47.892717Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=13462090399439257229, path="Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-07T17:48:47.892958Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:48:47.904796Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=13462090399439257229) 2026-01-07T17:48:47.906383Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:146:2168]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:47.906447Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:146:2168], cookie=0) 2026-01-07T17:48:47.906655Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:148:2170]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:48:47.906674Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:148:2170], cookie=0) 2026-01-07T17:48:47.948591Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2026-01-07T17:48:47.948717Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:148:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2026-01-07T17:48:47.949019Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:151:2173]) 2026-01-07T17:48:47.949192Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:37: [72057594037927937] Send TEvResourcesAllocated to [4:148:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2026-01-07T17:48:48.001279Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskRackGuardFullRack [GOOD] Test command err: 2026-01-07T17:48:17.233576Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-07T17:48:17.233652Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-07T17:48:17.233751Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:48:17.233805Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-07T17:48:17.233862Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-07T17:48:17.233965Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-07T17:48:17.235656Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } Hosts { Name: "node-9" State: UNKNOWN Devices { Name: "pdisk-9-36" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-37" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-38" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-39" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 9 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-10" State: UNKNOWN Devices { Name: "pdisk-10-40" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-41" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-42" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-43" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 10 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-11" State: UNKNOWN Devices { Name: "pdisk-11-44" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-45" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-46" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-47" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 11 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-12" State: UNKNOWN Devices { Name: "pdisk-12-48" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-49" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-50" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-51" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 12 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-13" State: UNKNOWN Devices { Name: "pdisk-13-52" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-53" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-54" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-55" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 13 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-14" State: UNKNOWN Devices { Name: "pdisk-14-56" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-57" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-58" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-59" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 14 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-15" State: UNKNOWN Devices { Name: "pdisk-15-60" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-61" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-62" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-63" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 15 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-16" State: UNKNOWN Devices { Name: "pdisk-16-64" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-65" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-66" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-67" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 16 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-07T17:48:17.245324Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/9/pdisk-36.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/9/pdisk-37.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/9/pdisk-38.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/9/pdisk-39.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/10/pdisk-40.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/10/pdisk-41.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/10/pdisk-42.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/10/pdisk-43.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/11/pdisk-44.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/11/pdisk-45.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/11/pdisk-46.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/11/pdisk-47.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/12/pdisk-48.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/12/pdisk-49.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/12/pdisk-50.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/12/pdisk-51.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/13/pdisk-52.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/13/pdisk-53.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/13/pdisk-54.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/13/pdisk-55.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/14/pdisk-56.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/14/pdisk-57.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/14/pdisk-58.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/14/pdisk-59.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/15/pdisk-60.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/15/pdisk-61.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/15/pdisk-62.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/15/pdisk-63.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/16/pdisk-64.data" G ... isk-103.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.554681Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 104 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-104.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 105 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-105.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 106 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-106.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 107 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-107.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.554737Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 108 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-108.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 109 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-109.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 110 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-110.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 111 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-111.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.554801Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 112 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-112.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 113 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-113.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 114 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-114.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 115 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-115.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.554860Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 84 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-84.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 85 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-85.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 86 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-86.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 87 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-87.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.554922Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 88 CreateTime: 0 ChangeTime: 0 Path: "/22/pdisk-88.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 89 CreateTime: 0 ChangeTime: 0 Path: "/22/pdisk-89.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 90 CreateTime: 0 ChangeTime: 0 Path: "/22/pdisk-90.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 91 CreateTime: 0 ChangeTime: 0 Path: "/22/pdisk-91.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.554985Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 92 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-92.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 93 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-93.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 94 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-94.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 95 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-95.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.555049Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 96 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-96.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 97 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-97.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 98 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-98.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 99 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-99.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.555108Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 72 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-72.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 73 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-73.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 74 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-74.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 75 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-75.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.555168Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 76 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-76.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 77 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-77.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 78 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-78.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 79 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-79.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.555228Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 80 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-80.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 81 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-81.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 82 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-82.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 83 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-83.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-07T17:48:44.555261Z node 17 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:48:44.555696Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 27:109, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:44.555731Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 27:110, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:44.555752Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 27:111, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:44.555773Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 19:76, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:44.555793Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 19:77, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:44.555813Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 19:78, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:44.555832Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 19:79, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:44.555850Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 27:108, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:48:44.555873Z node 17 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 8 2026-01-07T17:48:44.556090Z node 17 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 20 2026-01-07T17:48:44.556115Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 19:76 2026-01-07T17:48:44.556135Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 19:77 2026-01-07T17:48:44.556149Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 19:78 2026-01-07T17:48:44.556162Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 19:79 2026-01-07T17:48:44.556175Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 27:108 2026-01-07T17:48:44.556186Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 27:109 2026-01-07T17:48:44.556198Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 27:110 2026-01-07T17:48:44.556211Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 27:111 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:47:54.519165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:54.632994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:47:54.650276Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:47:54.650755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:54.650811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:47:55.010159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:55.010286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:55.100991Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808071521303 != 1767808071521307 2026-01-07T17:47:55.117365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:55.164012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:55.266772Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:47:55.577388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:55.592604Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:55.702577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:55.752193Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:47:55.753270Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:47:55.753566Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:47:55.753821Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:47:55.800053Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:47:55.800792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:47:55.800948Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:47:55.802712Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:47:55.802807Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:47:55.802867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:47:55.803258Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:47:55.803400Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:47:55.803496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:47:55.814248Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:47:55.855289Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:47:55.855540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:47:55.855663Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:47:55.855714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:47:55.855752Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:47:55.855786Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:47:55.856039Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.856100Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.856463Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:47:55.856581Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:47:55.856677Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:47:55.856723Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:47:55.856792Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:47:55.856833Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:47:55.856867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:47:55.856906Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:47:55.856970Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:47:55.857128Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:47:55.857169Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:47:55.857211Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:47:55.857634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:47:55.857683Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:47:55.857801Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:55.858072Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:47:55.858145Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:47:55.858242Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:47:55.858304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:47:55.858357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:47:55.858395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:47:55.858431Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:47:55.858859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:47:55.858899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:47:55.858949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:47:55.858986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:47:55.859039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:47:55.859071Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:47:55.859104Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:47:55.859143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:47:55.859173Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:47:55.860724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:47:55.860776Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:47:55.871623Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 6715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-07T17:48:47.393354Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-07T17:48:47.393392Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:48:47.393418Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-07T17:48:47.393460Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:47.393488Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:48:47.393523Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/0 2026-01-07T17:48:47.393568Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-07T17:48:47.393600Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:48:47.393625Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:47.393650Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T17:48:47.393674Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-07T17:48:47.393697Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:48:47.393718Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T17:48:47.393740Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-07T17:48:47.393763Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-07T17:48:47.393791Z node 11 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-07T17:48:47.393887Z node 11 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-07T17:48:47.393944Z node 11 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-07T17:48:47.394002Z node 11 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T17:48:47.394072Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:48:47.394113Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T17:48:47.394163Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T17:48:47.394221Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:48:47.394254Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-07T17:48:47.394279Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T17:48:47.394306Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:48:47.394334Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:48:47.394373Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T17:48:47.394397Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:48:47.394421Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-07T17:48:47.394473Z node 11 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T17:48:47.394505Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T17:48:47.394542Z node 11 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T17:48:47.394609Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:48:47.396115Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [11:70:2117], Recipient [11:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 11 Status: STATUS_NOT_FOUND 2026-01-07T17:48:47.396649Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [11:1108:2914], Recipient [11:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:47.396696Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:47.396772Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [11:1107:2913], serverId# [11:1108:2914], sessionId# [0:0:0] 2026-01-07T17:48:47.396954Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [11:1106:2912], Recipient [11:886:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-07T17:48:47.398193Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [11:1111:2917], Recipient [11:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:47.398269Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:47.398329Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [11:1110:2916], serverId# [11:1111:2917], sessionId# [0:0:0] 2026-01-07T17:48:47.398559Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1109:2915], Recipient [11:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-07T17:48:47.398698Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:48:47.398765Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-07T17:48:47.398824Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2026-01-07T17:48:47.398896Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-07T17:48:47.399021Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:48:47.399072Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:48:47.399142Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:47.399187Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:48:47.399236Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-07T17:48:47.399284Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:48:47.399314Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:47.399339Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:48:47.399364Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:48:47.399475Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-07T17:48:47.399767Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[11:1109:2915], 1000} after executionsCount# 1 2026-01-07T17:48:47.399844Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[11:1109:2915], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:47.399938Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[11:1109:2915], 1000} finished in read 2026-01-07T17:48:47.400019Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:48:47.400049Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:48:47.400073Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:48:47.400098Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:48:47.400143Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-07T17:48:47.400165Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:48:47.400193Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-07T17:48:47.400241Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:48:47.400359Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> THealthCheckTest::TestStateStorageRed [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionsWithLoad |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest::LdapFetchGroupsWithDelayUpdateSecurityState |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks-Volatile |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpScan::ScanDuringSplit10 >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageRed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:45:59.812595Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:45:59.914322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:45:59.922585Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:45:59.923020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:45:59.923210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:00.337574Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:00.438177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:00.438339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:00.475445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:00.563581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:01.436550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:01.436597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:01.436630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:01.437423Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... blocking NKikimr::NSchemeShard::TEvSchemeShard::TEvDescribeScheme from MONITORING_REQUEST to FLAT_SCHEMESHARD_ACTOR cookie 1 2026-01-07T17:46:01.497064Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:11.140515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:46:11.140607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2026-01-07T17:46:22.100476Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:22.100670Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:22.114945Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:22.117037Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:22.118510Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:467:2231], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:22.118974Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:22.119166Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:22.120331Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:852:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:22.120809Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:22.121101Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:22.459322Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:22.564489Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:22.564634Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:22.565367Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:22.565465Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:22.610570Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:46:22.611128Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:22.611468Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:22.671846Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:46:22.685058Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:26.591060Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:26.598118Z node 3 :HIVE DEBUG: hive_impl.cpp:764: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 3: Status: 2 2026-01-07T17:46:26.598243Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(3)::Execute 2026-01-07T17:46:26.598303Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-07T17:46:26.598400Z node 3 :HIVE DEBUG: tx__status.cpp:66: HIVE#72057594037968897 THive::TTxStatus(3)::Complete 2026-01-07T17:46:26.602467Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(3)::Execute 2026-01-07T17:46:26.602606Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:46:26.602655Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(3) DeregisterInDomains (72057594046644480:1) : 2 -> 1 2026-01-07T17:46:26.602710Z node 3 :HIVE DEBUG: hive_impl.cpp:2891: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(1, 3) 2026-01-07T17:46:26.602763Z node 3 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [3:1222:2631] 2026-01-07T17:46:26.602818Z node 3 :HIVE DEBUG: hive_impl.cpp:133: HIVE#72057594037968897 TryToDeleteNode(3): waiting 3600.000000s 2026-01-07T17:46:26.603437Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:26.603483Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:26.603523Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:26.604306Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:26.604828Z node 3 :HIVE TRACE: hive_impl.cpp:150: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([3:883:2419]) [3:1222:2631] 2026-01-07T17:46:26.606149Z node 3 :HIVE TRACE: hive_impl.cpp:142: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([3:1710:3000]) [3:1938:3004] 2026-01-07T17:46:26.606355Z node 3 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2026-01-07T17:46:26.617805Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2026-01-07T17:46:26.617920Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-07T17:46:26.618191Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2026-01-07T17:46:26.618271Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-07T17:46:26.618456Z node 3 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2026-01-07T17:46:26.618516Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-01-07T17:46:26.618660Z node 3 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-01-07T17:46:26.619230Z node 3 :HIVE DEBUG: hive_impl.cpp:764: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 4: Status: 2 2026-01-07T17:46:26.619299Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(4)::Execute 2026-01-07T17:46:26.619333Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2026-01-07T17:46:26.619679Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(4)::Execute 2026-01-07T17:46:26.619747Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-07T17:46:26.619779Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(4) DeregisterInDomain ... known -> Disconnected 2026-01-07T17:48:42.686263Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.686452Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.686499Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.686699Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.686769Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.686962Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.687006Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.687160Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.687204Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.687385Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.687431Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.687568Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.687611Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.687748Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.687798Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.767807Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 30 Cookie 30 2026-01-07T17:48:42.768388Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 31 Cookie 31 2026-01-07T17:48:42.768610Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 32 Cookie 32 2026-01-07T17:48:42.768793Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 33 Cookie 33 2026-01-07T17:48:42.768951Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 34 Cookie 34 2026-01-07T17:48:42.769112Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 35 Cookie 35 2026-01-07T17:48:42.769248Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 36 Cookie 36 2026-01-07T17:48:42.769369Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 37 Cookie 37 2026-01-07T17:48:42.769830Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.770749Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.771127Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.771374Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.771629Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.771889Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.772107Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.772319Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.772548Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.829210Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:42.912747Z node 30 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:42.928833Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:42.945675Z node 35 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.003901Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.021662Z node 36 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.052757Z node 29 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.083579Z node 33 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.099367Z node 32 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:48:44.074718Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:44.074772Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:44.074797Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:44.075002Z node 29 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:44.162521Z node 30 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:44.162635Z node 29 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:44.162712Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:44.162792Z node 32 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:44.162913Z node 33 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:44.163009Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:44.163102Z node 35 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:44.163188Z node 36 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:44.163532Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-35" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 35 host: "::1" port: 12007 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-36" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 36 host: "::1" port: 12008 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-37" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 37 host: "::1" port: 12009 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-35" reason: "YELLOW-7932-1231c6b1-36" reason: "YELLOW-7932-1231c6b1-37" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-7831" status: RED message: "There is not enough functional rings" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-29" type: "STATE_STORAGE_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-b30b-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-568c" status: RED message: "There is not enough functional rings" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-29" type: "SCHEME_BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-106b-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-18c3" status: RED message: "There is not enough functional rings" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-29" type: "BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-0632-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "BOARD_NODE" level: 3 listed: 6 count: 6 } location { id: 29 host: "::1" port: 12001 } |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> LdapAuthProviderTest::LdapFetchGroupsWithDelayUpdateSecurityState [GOOD] >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryService::CloseSessionsWithLoad |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad >> TModifyUserTest::ModifyUser >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2026-01-07T17:48:36.232015Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678048005079030:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:36.232119Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:36.500555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:36.546826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:36.546890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:36.572119Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:36.596467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:36.707268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:36.861880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:36.861904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:36.861929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:36.861985Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:37.178147Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:37.181800Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:37.181845Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:37.183090Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:65457, port: 65457 2026-01-07T17:48:37.183150Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:48:37.195744Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:37.237471Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:37.289239Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:37.289761Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:37.289820Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:37.333319Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:37.377309Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:37.378824Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Kzbw (14FBECE0) () has now valid token of ldapuser@ldap 2026-01-07T17:48:37.379102Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:39.006678Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678060472289334:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:39.006803Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:39.020426Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:39.092543Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:39.099692Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678056177322011:2081] 1767808119005263 != 1767808119005266 2026-01-07T17:48:39.123173Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:39.123232Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:39.127430Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:39.155284Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:39.155304Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:39.155318Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:39.155389Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:39.228653Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:39.262594Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:39.264828Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:39.264853Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:39.265523Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22338, port: 22338 2026-01-07T17:48:39.265597Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:48:39.275615Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:39.317414Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:39.361793Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****AxMg (A72312D3) () has now valid token of ldapuser@ldap 2026-01-07T17:48:41.673762Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678069336400886:2097];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:41.674414Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:41.682063Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:41.736440Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:41.784055Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:41.784107Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:41.785395Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:41.785437Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:41.785443Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:41.785513Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:41.785564Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:41.869065Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:41.871549Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:41.871569Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:41.872059Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://qqq:28627 ldap://localhost:28627 ldap://localhost:11111, port: 28627 2026-01-07T17:48:41.872143Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:48:42.007953Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:42.049467Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:42.097302Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:42.097751Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:42.097789Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:42.145421Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:42.189365Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:42.190635Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****E-2g (EC05D40D) () has now valid token of ldapuser@ldap 2026-01-07T17:48:42.192001Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:44.114025Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678082064438425:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:44.114081Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:44.137844Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:44.170036Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:44.171249Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678082064438394:2081] 1767808124113289 != 1767808124113292 2026-01-07T17:48:44.222683Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:44.222746Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:44.224203Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:44.231379Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:44.231399Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:44.231405Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:44.231488Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:44.285566Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:44.288025Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:44.288058Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:44.288660Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:65342, port: 65342 2026-01-07T17:48:44.288732Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:48:44.300344Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:44.341505Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-07T17:48:44.389804Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****57lg (B650509B) () has now valid token of ldapuser@ldap 2026-01-07T17:48:44.391227Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:47.045480Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678092108847491:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:47.045552Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:47.058099Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:47.141697Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:47.144104Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592678092108847464:2081] 1767808127044568 != 1767808127044571 2026-01-07T17:48:47.169713Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:47.169778Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:47.177031Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:47.215889Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:47.215932Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:47.215940Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:47.216025Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:47.310401Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:47.310854Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:47.313122Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:47.313147Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:47.313658Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:8271, port: 8271 2026-01-07T17:48:47.313712Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:48:47.322649Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:47.365526Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:8271. Invalid credentials 2026-01-07T17:48:47.366065Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****Sgsg (A6A83694) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:8271. Invalid credentials)' 2026-01-07T17:48:50.371917Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592678107819236965:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:50.371950Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:50.384187Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:50.480974Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:50.481826Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592678107819236937:2081] 1767808130371329 != 1767808130371332 2026-01-07T17:48:50.503557Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:50.503620Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:50.512187Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:50.542312Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:50.542335Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:50.542342Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:50.542411Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:50.619075Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:50.622317Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:50.622357Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:50.623088Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14170, port: 14170 2026-01-07T17:48:50.623189Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:48:50.632376Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:50.673442Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:14170. Invalid credentials 2026-01-07T17:48:50.674014Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****_5pw (247D94B3) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:14170. Invalid credentials)' 2026-01-07T17:48:50.675386Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-07T17:48:37.033175Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678050831288206:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:37.033259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:37.223241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:37.223367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:37.257624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:37.271901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:37.297394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:37.298559Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678050831288179:2081] 1767808117031766 != 1767808117031769 2026-01-07T17:48:37.344274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:37.344305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:37.344313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:37.344413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:37.445078Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:37.447712Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:37.447740Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:37.448461Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:25551, port: 25551 2026-01-07T17:48:37.449230Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:37.454878Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:37.497272Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:37.542159Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****vLyw (E65A7031) () has now valid token of ldapuser@ldap 2026-01-07T17:48:37.543112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:39.574718Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678056771950505:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:39.574805Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:39.581726Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:39.659283Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:39.661784Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678056771950478:2081] 1767808119573702 != 1767808119573705 2026-01-07T17:48:39.668738Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:39.681495Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:39.681585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:39.692225Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:39.726055Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:39.726090Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:39.726097Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:39.726192Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:39.775211Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:39.777547Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:39.777574Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:39.778072Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:29174, port: 29174 2026-01-07T17:48:39.778172Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:39.793539Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:39.837209Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:39.837672Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:39.837728Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:39.881204Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:39.929175Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:39.929838Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****iQiA (C31D8890) () has now valid token of ldapuser@ldap 2026-01-07T17:48:39.931213Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:42.092538Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678073196411008:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:42.092573Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:42.103617Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:42.172036Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678073196410981:2081] 1767808122091883 != 1767808122091886 2026-01-07T17:48:42.177658Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:42.201929Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.202006Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.212425Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:42.245851Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:42.245868Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:42.245872Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:42.245948Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:42.291855Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:42.294456Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:42.294485Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:42.295064Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22340, port: 22340 2026-01-07T17:48:42.295155Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:42.298619Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:42.341433Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****wByw (0BFB373A) () has now valid token of ldapuser@ldap 2026-01-07T17:48:42.357038Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:44.601665Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678079604818668:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:44.601727Z node 4 :METADATA_PROVIDER ... ns 2026-01-07T17:48:44.662919Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:44.710853Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:44.710963Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:44.714730Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:44.728531Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:44.728555Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:44.728561Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:44.728627Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:44.788773Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:44.792142Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:44.792164Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:44.792640Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://qqq:23376 ldap://localhost:23376 ldap://localhost:11111, port: 23376 2026-01-07T17:48:44.792724Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:44.799277Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:44.841204Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:44.841585Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:44.841622Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:44.885301Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:44.929278Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:44.930201Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****FxCw (A88454AE) () has now valid token of ldapuser@ldap 2026-01-07T17:48:44.931441Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:47.484591Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678095232514683:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:47.485184Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:47.497263Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:47.541241Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592678095232514651:2081] 1767808127483695 != 1767808127483698 2026-01-07T17:48:47.553735Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:47.558746Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:47.558803Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:47.579325Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:47.624011Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:47.624031Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:47.624037Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:47.624112Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:47.686146Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:47.689788Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:47.689817Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:47.690478Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:10411, port: 10411 2026-01-07T17:48:47.690583Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:47.693961Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-07T17:48:47.737203Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:47.737610Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:47.737657Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:48:47.781252Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:48:47.825265Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:48:47.826147Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****3Kaw (366E072A) () has now valid token of ldapuser@ldap 2026-01-07T17:48:47.827679Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:51.079563Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592678108675580878:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:51.079634Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:51.090428Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:51.164573Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:51.165740Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592678108675580850:2081] 1767808131078858 != 1767808131078861 2026-01-07T17:48:51.176895Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:51.176996Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:51.194391Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:51.237281Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:51.237307Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:51.237314Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:51.237403Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:51.288384Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:51.290988Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:51.291014Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:51.291571Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:7432, port: 7432 2026-01-07T17:48:51.291663Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:51.298812Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-07T17:48:51.298878Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:7432. Bad search filter 2026-01-07T17:48:51.299034Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****OQSg (B9103FE3) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:7432. Bad search filter)' 2026-01-07T17:48:51.325953Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TSubDomainTest::CreateTablet >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount >> TModifyUserTest::ModifyUser [GOOD] >> TModifyUserTest::ModifyLdapUser >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TMaintenanceApiTest::SingleCompositeActionGroup >> TCmsTest::RequestRestartServicesOk >> TCmsTest::ManagePermissions >> TCmsTest::DynamicConfig >> TCmsTest::WalleRebootDownNode >> TCmsTest::StateStorageTwoRings >> TCmsTest::RequestRestartServicesReject >> TCmsTest::ManualRequestApproval >> TCmsTest::RestartNodeInDownState >> TCmsTenatsTest::TestNoneTenantPolicy >> TCmsTest::StateRequest >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount >> TCmsTest::CollectInfo >> TCdcStreamTests::Basic >> TCdcStreamTests::VirtualTimestamps >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> TCmsTest::TestKeepAvailableModeScheduled >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::StateStorageTwoBrokenRings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:47:54.362660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:54.534675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:47:54.581072Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:47:54.581717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:47:54.581784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:47:55.010179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:55.010308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:55.089013Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808071521586 != 1767808071521590 2026-01-07T17:47:55.105464Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:55.153477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:55.266755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:47:55.571771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:55.585442Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:47:55.702643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:47:55.751815Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:47:55.752841Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:47:55.753389Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:47:55.753669Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:47:55.798904Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:47:55.799653Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:47:55.799773Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:47:55.801496Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:47:55.801574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:47:55.801632Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:47:55.802037Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:47:55.802166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:47:55.802251Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:47:55.813066Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:47:55.839834Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:47:55.841266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:47:55.841478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:47:55.841519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:47:55.841559Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:47:55.841598Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:47:55.841867Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.843091Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:47:55.844288Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:47:55.844396Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:47:55.844501Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:47:55.844591Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:47:55.844655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:47:55.844694Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:47:55.844734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:47:55.844774Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:47:55.844830Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:47:55.844973Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:47:55.845024Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:47:55.845087Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:47:55.845483Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T17:47:55.845525Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:47:55.845674Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:47:55.845933Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:47:55.845990Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:47:55.846080Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:47:55.846153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:47:55.846209Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:47:55.846248Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:47:55.846282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:47:55.846611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:47:55.846661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:47:55.846708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T17:47:55.846753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T17:47:55.846801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T17:47:55.846831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T17:47:55.846865Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T17:47:55.846915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T17:47:55.846943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T17:47:55.848354Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T17:47:55.848409Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:47:55.859205Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... at 72075186224037889 2026-01-07T17:48:55.369617Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [12:921:2784], Recipient [12:918:2782]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-07T17:48:55.369679Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:55.369744Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2026-01-07T17:48:55.370054Z node 12 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-07T17:48:55.370141Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [12:918:2782], Recipient [12:921:2784]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2026-01-07T17:48:55.370189Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:48:55.370222Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2026-01-07T17:48:55.572231Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [12:1165:2968], Recipient [12:918:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T17:48:55.572437Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T17:48:55.572555Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-07T17:48:55.572686Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:48:55.572746Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-07T17:48:55.572805Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:55.572881Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T17:48:55.572970Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-07T17:48:55.573032Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:48:55.573074Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:55.573099Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T17:48:55.573135Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-07T17:48:55.573284Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T17:48:55.573605Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-07T17:48:55.573679Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[12:1165:2968], 0} after executionsCount# 1 2026-01-07T17:48:55.573762Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[12:1165:2968], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:55.573881Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[12:1165:2968], 0} finished in read 2026-01-07T17:48:55.574003Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:48:55.574037Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T17:48:55.574065Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T17:48:55.574092Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-07T17:48:55.574142Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-07T17:48:55.574169Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T17:48:55.574203Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-07T17:48:55.574258Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T17:48:55.574403Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T17:48:55.574989Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [12:1165:2968], Recipient [12:918:2782]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:48:55.575065Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-07T17:48:55.575274Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [12:1165:2968], Recipient [12:921:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-07T17:48:55.575381Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-07T17:48:55.575436Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-07T17:48:55.575496Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:48:55.575524Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-07T17:48:55.575561Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-07T17:48:55.575590Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-07T17:48:55.575636Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-07T17:48:55.575667Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:48:55.575693Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-07T17:48:55.575716Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-07T17:48:55.575737Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-07T17:48:55.575829Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-07T17:48:55.576027Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-07T17:48:55.576074Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[12:1165:2968], 1} after executionsCount# 1 2026-01-07T17:48:55.576111Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[12:1165:2968], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:48:55.576178Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[12:1165:2968], 1} finished in read 2026-01-07T17:48:55.576229Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:48:55.576257Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-07T17:48:55.576282Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-07T17:48:55.576309Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-07T17:48:55.576352Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-07T17:48:55.576375Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-07T17:48:55.576416Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-07T17:48:55.576457Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-07T17:48:55.576540Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-07T17:48:55.577873Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [12:1165:2968], Recipient [12:921:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-07T17:48:55.577930Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1003 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1004 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] Test command err: 2026-01-07T17:48:36.232587Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678046091214698:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:36.232644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:36.519027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:36.563327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:36.563421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:36.589581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:36.657420Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:36.805497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:36.861829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:36.861847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:36.861954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:36.862058Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:37.241054Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:37.334596Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:37.339620Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:37.339677Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:37.341539Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://unavailablehost:3191, port: 3191 2026-01-07T17:48:37.341635Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:48:37.345318Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2026-01-07T17:48:37.345846Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****UCHg (473A7E90) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2026-01-07T17:48:37.346105Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:37.346128Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:37.346939Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://unavailablehost:3191, port: 3191 2026-01-07T17:48:37.346997Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:48:37.350949Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2026-01-07T17:48:37.351096Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****UCHg (473A7E90) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2026-01-07T17:48:39.044100Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678059086655082:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:39.044189Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:39.059152Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:39.118905Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:39.120779Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678059086655055:2081] 1767808119042890 != 1767808119042893 2026-01-07T17:48:39.165963Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:39.166062Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:39.172444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:39.201378Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:39.201427Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:39.201433Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:39.201505Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:39.288733Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:39.292073Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:39.292129Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:39.292879Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****2gxA (AC53D3B1) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2026-01-07T17:48:39.326173Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:41.556148Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678066576989020:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:41.556199Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:41.566600Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:41.641616Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678066576988993:2081] 1767808121555432 != 1767808121555435 2026-01-07T17:48:41.643152Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:41.677260Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:41.677339Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:41.681503Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:41.709711Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:41.709736Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:41.709743Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:41.709813Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:41.824524Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:41.826261Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:41.828903Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:41.828937Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:41.829469Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****WYkA (3FA6A1F2) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2026-01-07T17:48:43.985106Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678074755954498:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:43.985141Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:43.996183Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:44.057484Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:44.090636Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:44.090711Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:44.094839Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:44.114062Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:44.114081Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:44.114091Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:44.114157Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:44.174805Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:44.175754Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:44.178368Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:44.178394Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:44.178921Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****h8XA (7580B765) () has now permanent error message 'Could not login via LDAP (Parameter BindDn is empty)' 2026-01-07T17:48:46.917969Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678089262738367:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:46.918046Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:46.929497Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:46.990504Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:46.991770Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592678089262738340:2081] 1767808126917123 != 1767808126917126 2026-01-07T17:48:47.038464Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:47.038550Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:47.044363Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:47.091299Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:47.091321Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:47.091333Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:47.091415Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:47.099132Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:47.186210Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:47.188939Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:47.188966Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:47.189471Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****F-3g (AAB4CB88) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2026-01-07T17:48:49.952013Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592678102648126308:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:49.952222Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:49.972095Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:50.044664Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:50.060073Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:50.060166Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:50.085695Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:50.125450Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:50.125493Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:50.125502Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:50.125604Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:50.146749Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:50.278768Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:50.278796Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:50.278845Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****ce8A (FB83FB66)) 2026-01-07T17:48:50.957496Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:51.278676Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:51.278713Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root 2026-01-07T17:48:51.279061Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:51.279087Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:51.279956Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:7562, port: 7562 2026-01-07T17:48:51.280075Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:51.337493Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:51.381291Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:51.381799Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:51.381873Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:51.425396Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:51.469325Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:51.470679Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ce8A (FB83FB66) () has now valid token of ldapuser@ldap 2026-01-07T17:48:52.833669Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592678115429539735:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:52.833726Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:52.844449Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:52.913918Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:52.943502Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:52.943569Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:52.948437Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:52.980314Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:52.980334Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:52.980343Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:52.980423Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:53.140339Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:53.221980Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:53.222004Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:53.222035Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****Ahmw (0C6B832E)) 2026-01-07T17:48:53.838562Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:55.836447Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****Ahmw (0C6B832E) () has now permanent error message 'Login state is not available' 2026-01-07T17:48:55.836532Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:2352: Finish waiting for login providers for 1 databases: /Root, |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-07T17:48:36.234667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678047957267565:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:36.234747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:36.530495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:36.544343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:36.544442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:36.591620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:36.642286Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678047957267458:2081] 1767808116229362 != 1767808116229365 2026-01-07T17:48:36.646669Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:36.828508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:36.861874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:36.861893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:36.861909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:36.861993Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:37.219358Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:37.223189Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:37.223235Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:37.224546Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:15558, port: 15558 2026-01-07T17:48:37.224609Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:37.285499Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:37.333332Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:37.379588Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****dvDw (B6C172DD) () has now valid token of ldapuser@ldap 2026-01-07T17:48:37.379820Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:38.728324Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678054081827597:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:38.728385Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:38.740851Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:38.823377Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:38.824885Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678054081827570:2081] 1767808118727325 != 1767808118727328 2026-01-07T17:48:38.860454Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:38.860523Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:38.865550Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:38.901360Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:38.901378Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:38.901392Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:38.901450Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:38.977219Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:38.979968Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:38.980003Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:38.980703Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:30375, port: 30375 2026-01-07T17:48:38.980793Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:39.033447Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:39.077825Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****yJdA (22FDF649) () has now valid token of ldapuser@ldap 2026-01-07T17:48:39.079077Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:41.182654Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678066299685002:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:41.182708Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:41.194273Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:41.259880Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:41.299012Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:41.299090Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:41.300678Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:41.330948Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:41.330971Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:41.330978Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:41.331037Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:41.398912Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:41.503550Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:41.505850Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:41.505870Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:41.506302Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:22272, port: 22272 2026-01-07T17:48:41.506368Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:41.561526Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:41.605278Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:41.605748Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:41.605796Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:41.653320Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:41.697224Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:41.698035Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****a0SA (9070300B) () has now valid token of ldapuser@ldap 2026-01-07T17:48:43.713961Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678077922995430:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:43.714012Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:43.723963Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cp ... isconnected 2026-01-07T17:48:47.076914Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:47.100992Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:47.134641Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:47.134665Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:47.134673Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:47.134732Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:47.218426Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:47.235308Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:47.237855Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:47.237887Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:47.238599Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://qqq:17570 ldaps://localhost:17570 ldaps://localhost:11111, port: 17570 2026-01-07T17:48:47.238715Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:47.297415Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:47.341276Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:47.341820Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:47.341863Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:47.385381Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:47.433339Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:47.434507Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****V5RA (64CA7665) () has now valid token of ldapuser@ldap 2026-01-07T17:48:50.189617Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592678105496467807:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:50.189659Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:50.235444Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:50.264083Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:50.267950Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592678105496467779:2081] 1767808130188917 != 1767808130188920 2026-01-07T17:48:50.281226Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:50.281291Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:50.286791Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:50.323156Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:50.323184Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:50.323193Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:50.323263Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:50.420793Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:50.424323Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:50.424349Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:50.424887Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:9642, port: 9642 2026-01-07T17:48:50.424996Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:50.481431Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-07T17:48:50.525258Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:50.525781Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:50.525828Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:48:50.573337Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:48:50.617262Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:48:50.618764Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****brDA (540AB133) () has now valid token of ldapuser@ldap 2026-01-07T17:48:50.619920Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:53.281381Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592678117936495862:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:53.281429Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:53.288810Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:53.380601Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:53.383523Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:53.384890Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7592678117936495835:2081] 1767808133280637 != 1767808133280640 2026-01-07T17:48:53.419996Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:53.420081Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:53.422746Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:53.460979Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:53.461015Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:53.461025Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:53.461111Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:53.587895Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:53.590611Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:53.590632Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:53.591124Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:11928, port: 11928 2026-01-07T17:48:53.591183Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:53.641714Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-07T17:48:53.641827Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:11928. Bad search filter 2026-01-07T17:48:53.642479Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****sE3Q (188F87A8) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:11928. Bad search filter)' 2026-01-07T17:48:53.644190Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] >> GenericFederatedQuery::YdbFilterPushdown >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::LastRefreshTime |93.8%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesMultipleNodes >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> GenericFederatedQuery::IcebergHadoopBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TCmsTest::ManualRequestApproval [GOOD] >> TCmsTest::ManualRequestApprovalAlreadyLockedNode >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::DropMultipleStreams >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> TCmsTest::TestOutdatedState >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] Test command err: 2026-01-07T17:48:10.005921Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:134:2057] recipient: [1:132:2164] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:134:2057] recipient: [1:132:2164] Leader for TabletID 72057594037927937 is [1:138:2168] sender: [1:139:2057] recipient: [1:132:2164] 2026-01-07T17:48:10.104405Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:10.104479Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:10.105295Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:10.105929Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:180:2057] recipient: [1:178:2198] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:180:2057] recipient: [1:178:2198] Leader for TabletID 72057594037927938 is [1:184:2202] sender: [1:185:2057] recipient: [1:178:2198] Leader for TabletID 72057594037927937 is [1:138:2168] sender: [1:210:2057] recipient: [1:14:2061] 2026-01-07T17:48:10.134669Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:10.161019Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:208:2220] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:48:10.162734Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2168] 2026-01-07T17:48:10.165464Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:216:2168] 2026-01-07T17:48:10.170104Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:217:2168] 2026-01-07T17:48:10.172024Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:217:2168] 2026-01-07T17:48:10.181813Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:10.182293Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|eea3a3ec-9d0ece34-3a1203a9-ac9f273d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:10.195087Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:10.195517Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8500fa30-9deb427e-37bd68af-702368d3_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:48:10.202303Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:48:10.202715Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2d943f65-cd68d45f-1f114116-32cb82e1_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 2026-01-07T17:48:10.219807Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.251021Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.282445Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.324142Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.335519Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.397624Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.493300Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.524493Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.658763Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.710587Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.876899Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:10.908739Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:11.127212Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", ... let_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:46.041007Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:46.279416Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:46.435451Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:46.528188Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:46.777724Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:47.006180Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:47.245303Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:47.381188Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:47.495079Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:47.829835Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:48.048022Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:48.276123Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:48.493937Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:48.504607Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:48.785372Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:49.003353Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:49.221583Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:49.429904Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:49.481947Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:49.681004Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:49.929991Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:50.158832Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:50.377203Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:50.502152Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:50.646794Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:50.875275Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:51.115284Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:51.364286Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:51.582826Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:51.658398Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][topic] pipe [6:428:2374] connected; active server actors: 1 2026-01-07T17:48:51.725621Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:52.017672Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:52.266545Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:52.526019Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:52.764729Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:52.900529Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:53.034963Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:53.273923Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:53.523121Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:53.782996Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:54.106381Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:54.220249Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:54.406733Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:54.665616Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:54.923842Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:55.203597Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:55.339383Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:55.494641Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:55.743778Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:56.016369Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:56.295712Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:56.492958Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:56.567963Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:56.786777Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:57.036091Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:57.275020Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:57.523932Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:57.544741Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:57.793247Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:48:58.021383Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TCmsTest::RequestReplaceDevices >> GenericFederatedQuery::IcebergHiveBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.795040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.795123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.795175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.795203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.795232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.795291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.795353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.796144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.797744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.864366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.864445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.879314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.879570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.880456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.888313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.888663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.892280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.894296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.902487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.903968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.912570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.912756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.912797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.912871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.913797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:33.036220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.037964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:33.038744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 48:56.813070Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:48:56.813097Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:48:56.813530Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.813787Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.813928Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.814136Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.814396Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.814690Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.815407Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.815763Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.816551Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.816735Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.817158Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.817462Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.817606Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.817720Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.818378Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.818515Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.819155Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.819627Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.819769Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.819888Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.820161Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.820281Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.820412Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-07T17:48:56.828211Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:56.833645Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:56.833772Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:56.834600Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:56.834684Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:56.834739Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:56.836052Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:777:2746] sender: [5:835:2058] recipient: [5:15:2062] 2026-01-07T17:48:56.876517Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:56.876602Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-07T17:48:56.962274Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-07T17:48:56.962469Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:56.962557Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:56.962814Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:56.962869Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:827:2785], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-07T17:48:56.963586Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 0 2026-01-07T17:48:58.964408Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-07T17:48:58.968470Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-07T17:48:58.974260Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-07T17:48:58.974650Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-07T17:48:58.975188Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:48:58.975408Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 239us result status StatusSuccess 2026-01-07T17:48:58.975916Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAs1cz+N2QpMlYxAHNHR2O\neMGWerh0h+Ub0bovpn+2CENPtd0g90jzfcZ9qtv/hL2n/reccy9OxE6jLg+4Q120\nXxFlTcPk1cCzW166qbzzwJcT+mG4A6YmBcGyFASGvcsSskOV5dxWQqM1mfu2LxPe\nYG4Uad0WRuyoNJqexUAtJtzHmdTwJHUJ4dvrzqjqnK5tB+tQ6nCEiRRCs+WTtutO\nv5ciCLgW99boNFzi1IJNNacc7QAJ1wO9mkQ5TXbIGnUL8HgYqXiFdt4CfIZvIfXY\nXmB8vzSHsRpK7mCPkJO8srEUR4nIYKCw2Tj+cSD/k7p53FayUmO0BaagwW4FRYG8\nTQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767894534277 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3xqyjQWcV45wgv8wepAs\nGD0Ts5N4do99WjOsNf7RefledD+MlCtfU5KkDDsp5kVIEELUfpMsPYzUI74FUtrj\nDYzD+Pqr08upOmLhgeTQSiTMTdwqUhJURsd3Lc+dFfrlNrpO9SQWfVdC0z+uKxLY\nBKEcLlJNAW4L84vou51NLW2CdVQMAgblCiwgE2geiqywO5uq8/5OW8qT13aG3p0W\n3/wc0+3XL6fdPkH5gH9G/Q1ZRhZFrmaNBZ8G8jJ8M/Q7wmJLk2MbNTJFpLAp8KhA\nqlAYkKI0qz/zCDUKY/qI61iCrLq/zXPin9I3NWDBOTQpccIv28bblAotzjXYGdGj\nZQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767894534687 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4uFXEv5a7UOV8QNTAwM/\nu9lz3l5kixPmGiNiPat/L6NFVMgmWnJyMPXAjtd2b4VKyJJ1cJC6P/QN2JUBDSrs\nu1tiHUJElcLjx58fAgtQP6zMmCi4cQrejPWiowdbSS12+V+bS8NEhcStXnAMoqGq\nsHc1u3IW+VBTZjpG+WZblehGIZKYectvRWXRya2JtMXhrbeWQ1QCbFrHzd/WYsxg\nbcqy+FhAkbuiMK+dIcRDNmsSPMYFKa9m3K9xpJxMyM8m0dBmS/C+DfpNHGuTmn0R\nHDrWJcMpW45bfbU7yz30ZEPZt7bfSmPoHx+tzJIhqWwOtJZMZ423ZMTXQ9Vn0iEJ\nJwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767894536959 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TCdcStreamTests::DropMultipleStreams [GOOD] >> TCdcStreamTests::Attributes >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::SchemaChanges >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> GenericFederatedQuery::IcebergHadoopSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesRejectSecond >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTest::RequestRestartServicesNoUser >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] |93.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::SamePriorityRequest >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::BridgeModeStateStorage >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTest::ManualRequestApprovalAlreadyLockedNode [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TCmsTest::ManageRequestsWrong >> TCdcStreamTests::SchemaChanges [GOOD] >> TCdcStreamTests::RetentionPeriod >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::TestDrainAction >> TModifyUserTest::ModifyUserIsEnabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2026-01-07T17:48:38.507313Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678053376742865:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:38.507375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:38.670789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:38.677837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:38.677932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:38.723075Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:38.738531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:38.738837Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678053376742838:2081] 1767808118505792 != 1767808118505795 2026-01-07T17:48:38.835175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:38.835198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:38.835212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:38.835329Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:38.902371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:39.020777Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:39.024616Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:39.024676Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:39.025525Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:17736, port: 17736 2026-01-07T17:48:39.026281Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:39.049163Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:39.093244Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:39.093641Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:39.093691Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:39.137320Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:39.185236Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:39.186252Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****xC1g (500F1500) () has now valid token of ldapuser@ldap 2026-01-07T17:48:39.514440Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:43.507597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678053376742865:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:43.507686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:48:43.512645Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****xC1g (500F1500) 2026-01-07T17:48:43.512811Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:17736, port: 17736 2026-01-07T17:48:43.512952Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:43.628896Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:43.629346Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:17736 return no entries 2026-01-07T17:48:43.629633Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****xC1g (500F1500) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:17736 return no entries)' 2026-01-07T17:48:48.514875Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****xC1g (500F1500) 2026-01-07T17:48:49.765258Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678102731917530:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:49.765348Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:49.777887Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:49.851525Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:49.857191Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678102731917503:2081] 1767808129764504 != 1767808129764507 2026-01-07T17:48:49.899941Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:49.900022Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:49.902973Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:49.934406Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:49.934430Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:49.934437Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:49.934522Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:49.948904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:50.014984Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:50.017742Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:50.017771Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:50.018385Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:27456, port: 27456 2026-01-07T17:48:50.018514Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:50.022477Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:50.022826Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:27456. Server is busy 2026-01-07T17:48:50.023007Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****gseQ (317CED40) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:27456. Server is busy)' 2026-01-07T17:48:50.023314Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:50.023348Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:50.023979Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:27456, port: 27456 2026-01-07T17:48:50.024023Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:50.027284Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:50.027624Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:27456. Server is busy 2026-01-07T17:48:50.027793Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****gseQ (317CED40) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:27456. Server is busy)' 2026-01-07T17:48:50.770783Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:51.767669Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****gseQ (317CED40) 2026-01-07T17:48:51.767978Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:51.767999Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:51.769132Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:27456, port: 27456 2026-01-07T17:48:51.769195Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:51.772292Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:51.772625Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:27456. Server is busy 2026-01-07T17:48:51.772864Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****gseQ (317CED40) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:27456. Server is busy)' 2026-01-07T17:48:54.765427Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678102731917530:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:54.765548Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:48:55.770480Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****gseQ (317CED40) 2026-01-07T17:48:55.770788Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:55.770819Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:55.771520Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:27456, port: 27456 2026-01-07T17:48:55.771591Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:55.775259Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:55.821273Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:55.821796Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:55.821853Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:55.869279Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:55.913339Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:55.914111Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****gseQ (317CED40) () has now valid token of ldapuser@ldap 2026-01-07T17:48:59.773228Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****gseQ (317CED40) 2026-01-07T17:48:59.773299Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:27456, port: 27456 2026-01-07T17:48:59.773346Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:59.783220Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:59.829163Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:59.829477Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:59.829527Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:59.877192Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:59.925177Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:59.925848Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****gseQ (317CED40) () has now valid token of ldapuser@ldap |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2026-01-07T17:48:22.225504Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:22.225647Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:22.248029Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:22.248145Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:22.277704Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:22.278162Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=1094726172892604589, session=0, seqNo=0) 2026-01-07T17:48:22.278923Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:48:22.290967Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=1094726172892604589, session=1) 2026-01-07T17:48:22.291216Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=17339941860010685890, session=0, seqNo=0) 2026-01-07T17:48:22.291294Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:48:22.302948Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=17339941860010685890, session=2) 2026-01-07T17:48:22.303421Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:146:2168], cookie=1254376044692136130, name="Sem1", limit=1) 2026-01-07T17:48:22.303525Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-07T17:48:22.315212Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:146:2168], cookie=1254376044692136130) 2026-01-07T17:48:22.315515Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-07T17:48:22.315640Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-07T17:48:22.315794Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-07T17:48:22.327304Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=111) 2026-01-07T17:48:22.327387Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=222) 2026-01-07T17:48:22.327799Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=6805429603578496912, name="Sem1") 2026-01-07T17:48:22.327861Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=6805429603578496912) 2026-01-07T17:48:22.329180Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:157:2179], cookie=9053311490427942268, name="Sem1") 2026-01-07T17:48:22.329231Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:157:2179], cookie=9053311490427942268) 2026-01-07T17:48:22.751250Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:22.763018Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.102212Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.114229Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.463699Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.475586Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.803705Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.815525Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.175862Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:24.187778Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.526940Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:24.538664Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.867602Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:24.879701Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.208747Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.220448Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.559603Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.571327Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.983343Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.995253Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.364435Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.376254Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.735323Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.747078Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.106249Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.118018Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.466793Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.478551Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.860982Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.873112Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.234053Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.245941Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.596305Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.608069Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.959010Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.970816Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.320760Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.332817Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.713899Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.725719Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.086377Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.098275Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.447326Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.458973Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.807945Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.819913Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.170714Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.182582Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.543965Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.555959Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.918185Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.933787Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:32.289834Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:32.301839Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:32.652978Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:32.664784Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.015832Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:33.028142Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.423437Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:33.435760Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.798161Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:33.810427Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:34.161860Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.174210Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:34.525931Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.537898Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:34.887748Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.899872Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Com ... tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:56.086437Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:56.460030Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:56.472272Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:56.829245Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:56.841689Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:57.281811Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:57.293866Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:57.654658Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:57.666761Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:58.028321Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:58.040314Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:58.404825Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:58.417698Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:58.778656Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:58.790691Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.171943Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.184139Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.544477Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.556183Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.916243Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.928050Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:00.288374Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:00.300526Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:00.661843Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:00.674084Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:01.046171Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-07T17:49:01.046258Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-07T17:49:01.046365Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-07T17:49:01.058418Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-07T17:49:01.069381Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:455:2414], cookie=644247778204344028, name="Sem1") 2026-01-07T17:49:01.069500Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:455:2414], cookie=644247778204344028) 2026-01-07T17:49:01.414886Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:49:01.414977Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:49:01.428498Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:49:01.428602Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:49:01.462438Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:49:01.462869Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=4008753245328967307, session=0, seqNo=0) 2026-01-07T17:49:01.462999Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:49:01.474789Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=4008753245328967307, session=1) 2026-01-07T17:49:01.475108Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=15702129241950853138, session=0, seqNo=0) 2026-01-07T17:49:01.475235Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:49:01.487270Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=15702129241950853138, session=2) 2026-01-07T17:49:01.487570Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=12768634016679284546, session=0, seqNo=0) 2026-01-07T17:49:01.487700Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2026-01-07T17:49:01.499780Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=12768634016679284546, session=3) 2026-01-07T17:49:01.500312Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=13248996217733301819, name="Sem1", limit=3) 2026-01-07T17:49:01.500471Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-07T17:49:01.512522Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=13248996217733301819) 2026-01-07T17:49:01.512886Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Sem1" count=2) 2026-01-07T17:49:01.513064Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-07T17:49:01.513262Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-07T17:49:01.513340Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-07T17:49:01.513424Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=3, semaphore="Sem1" count=1) 2026-01-07T17:49:01.525074Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2026-01-07T17:49:01.525141Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2026-01-07T17:49:01.525163Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2026-01-07T17:49:01.525721Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2180], cookie=8689795955326491258, name="Sem1") 2026-01-07T17:49:01.525816Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2180], cookie=8689795955326491258) 2026-01-07T17:49:01.526121Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2183], cookie=8202164580674875937, name="Sem1") 2026-01-07T17:49:01.526175Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2183], cookie=8202164580674875937) 2026-01-07T17:49:01.526350Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=444, session=1, semaphore="Sem1" count=1) 2026-01-07T17:49:01.526442Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-07T17:49:01.538442Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=444) 2026-01-07T17:49:01.539041Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:166:2188], cookie=10974746254913472865, name="Sem1") 2026-01-07T17:49:01.539141Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:166:2188], cookie=10974746254913472865) 2026-01-07T17:49:01.539608Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:169:2191], cookie=11047782907912131193, name="Sem1") 2026-01-07T17:49:01.539691Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:169:2191], cookie=11047782907912131193) 2026-01-07T17:49:01.550117Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:49:01.550194Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:49:01.550550Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:49:01.550926Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:49:01.596463Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:49:01.596650Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-07T17:49:01.596706Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-07T17:49:01.596733Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-07T17:49:01.597095Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:208:2221], cookie=18404172718803413576, name="Sem1") 2026-01-07T17:49:01.597183Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:208:2221], cookie=18404172718803413576) 2026-01-07T17:49:01.597706Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:217:2229], cookie=18250178480420421525, name="Sem1") 2026-01-07T17:49:01.597788Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:217:2229], cookie=18250178480420421525) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2026-01-07T17:48:53.368629Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678117544583011:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:53.368705Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:53.557678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:53.564489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:53.564627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:53.567823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:53.624493Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:53.625488Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678117544582982:2081] 1767808133367256 != 1767808133367259 2026-01-07T17:48:53.756720Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592678117544583250:2106] Handle TEvNavigate describe path dc-1 2026-01-07T17:48:53.756775Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592678117544583531:2264] HANDLE EvNavigateScheme dc-1 2026-01-07T17:48:53.756864Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592678117544583259:2109], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:48:53.757020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592678117544583443:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592678117544583259:2109], cookie# 1 2026-01-07T17:48:53.759038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678117544583473:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678117544583470:2206], cookie# 1 2026-01-07T17:48:53.759086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678117544583474:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678117544583471:2206], cookie# 1 2026-01-07T17:48:53.759106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678117544583475:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678117544583472:2206], cookie# 1 2026-01-07T17:48:53.759129Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678117544582950:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678117544583473:2206], cookie# 1 2026-01-07T17:48:53.759160Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678117544582956:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678117544583475:2206], cookie# 1 2026-01-07T17:48:53.759161Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678117544582953:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678117544583474:2206], cookie# 1 2026-01-07T17:48:53.759284Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678117544583473:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678117544582950:2049], cookie# 1 2026-01-07T17:48:53.759308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678117544583474:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678117544582953:2052], cookie# 1 2026-01-07T17:48:53.759321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678117544583475:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678117544582956:2055], cookie# 1 2026-01-07T17:48:53.759365Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678117544583443:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678117544583470:2206], cookie# 1 2026-01-07T17:48:53.759386Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592678117544583443:2206][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:48:53.759412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678117544583443:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678117544583471:2206], cookie# 1 2026-01-07T17:48:53.759438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592678117544583443:2206][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:48:53.759479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678117544583443:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678117544583472:2206], cookie# 1 2026-01-07T17:48:53.759496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592678117544583443:2206][/dc-1] Sync cookie mismatch: sender# [1:7592678117544583472:2206], cookie# 1, current cookie# 0 2026-01-07T17:48:53.759513Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592678117544583259:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:48:53.763400Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592678117544583259:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592678117544583443:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:48:53.763544Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592678117544583259:2109], cacheItem# { Subscriber: { Subscriber: [1:7592678117544583443:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:48:53.765340Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592678117544583532:2265], recipient# [1:7592678117544583531:2264], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:48:53.765425Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592678117544583531:2264] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:48:53.799073Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592678117544583259:2109], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:48:53.799128Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592678117544583259:2109], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-07T17:48:53.799171Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592678117544583259:2109], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-07T17:48:53.799184Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7592678117544583259:2109], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-07T17:48:53.799380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7592678117544583533:2266][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:48:53.799815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7592678117544583534:2267][/dc-1/.metadata/script_execution_leases] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:48:53.800038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7592678117544583535:2268][/dc-1/.metadata/result_sets] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:48:53.800301Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7592678117544582950:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7592678117544583539:2266] 2026-01-07T17:48:53.800318Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7592678117544582950:2049] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-07T17:48:53.800385Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7592678117544582950:2049] Subscribe: subscriber# [1:7592678117544583539:2266], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotificati ... ype: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767808139076 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 9 IsSync: true Partial: 0 } 2026-01-07T17:48:59.207890Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592678145998729493:2339], recipient# [3:7592678145998729492:2338], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2026-01-07T17:48:59.207921Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [3:7592678145998729492:2338] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:48:59.207966Z node 3 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [3:7592678145998729492:2338] txid# 281474976710662, Access denied for user2 on path /dc-1, with access AlterSchema 2026-01-07T17:48:59.208018Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678145998729492:2338] txid# 281474976710662, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2026-01-07T17:48:59.208036Z node 3 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [3:7592678145998729492:2338] txid# 281474976710662 SEND to# [3:7592678145998729491:2337] Source {TEvProposeTransactionStatus Status# 5} 2026-01-07T17:48:59.208742Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7592678141703761754:2104] Handle TEvProposeTransaction 2026-01-07T17:48:59.208764Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7592678141703761754:2104] TxId# 281474976710663 ProcessProposeTransaction 2026-01-07T17:48:59.208795Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [3:7592678141703761754:2104] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [3:7592678145998729495:2341] 2026-01-07T17:48:59.210676Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [3:7592678145998729495:2341] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user2" Password: "password" CanLogin: false } } } } UserToken: "\n\005user2\022\030\022\026\n\024all-users@well-known\032\322\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTMzOSwiaWF0IjoxNzY3ODA4MTM5LCJzdWIiOiJ1c2VyMiJ9.RdbINiq9WkYxGRKpRMCnTfA8TSD5HrsiijV5fX1NgSIWy-KD9HaZQKJTiDkWQvhTHBSfy7Drqbjhfr-uEtcmYdCOWiXxISCYoyfV8gC_odk0yKgb2yz4LEXwa8nQUJuhtkbK2FIqi09w7lNYOA_9Jgp-0HibDZfI6hqYCV1tYDWLj-W5CiNgwEbXGnV7C1JvZTbLQ-2Mv-WoAFhnj1EhfS-jzQyVm5mMDcxX5tqzH2neJVsF0d5boZxUNQK-Zq4TETn-53xYV3nKr4m6D6ToE3Fncym29b3jMa3IcLgI-gFJKKvYjp9LHbuGWHduMKYeyG0Dl2aHa-96szQIr07b-g\"\005Login*~eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2Nzg1MTMzOSwiaWF0IjoxNzY3ODA4MTM5LCJzdWIiOiJ1c2VyMiJ9.**0\000" PeerName: "" 2026-01-07T17:48:59.210733Z node 3 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [3:7592678145998729495:2341] txid# 281474976710663 Bootstrap, UserSID: user2 CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:48:59.210748Z node 3 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [3:7592678145998729495:2341] txid# 281474976710663 Bootstrap, UserSID: user2 IsClusterAdministrator: 1 2026-01-07T17:48:59.210784Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [3:7592678145998729495:2341] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:48:59.210848Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7592678141703761831:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:48:59.210945Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:7592678141703761862:2133][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:7592678141703761831:2121], cookie# 10 2026-01-07T17:48:59.211016Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7592678141703761874:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7592678141703761871:2133], cookie# 10 2026-01-07T17:48:59.211063Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7592678141703761875:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7592678141703761872:2133], cookie# 10 2026-01-07T17:48:59.211072Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7592678141703761518:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7592678141703761874:2133], cookie# 10 2026-01-07T17:48:59.211080Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7592678141703761881:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7592678141703761873:2133], cookie# 10 2026-01-07T17:48:59.211095Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7592678141703761521:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7592678141703761875:2133], cookie# 10 2026-01-07T17:48:59.211113Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7592678141703761874:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7592678141703761518:2049], cookie# 10 2026-01-07T17:48:59.211114Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7592678141703761524:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7592678141703761881:2133], cookie# 10 2026-01-07T17:48:59.211138Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7592678141703761875:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7592678141703761521:2052], cookie# 10 2026-01-07T17:48:59.211159Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7592678141703761881:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7592678141703761524:2055], cookie# 10 2026-01-07T17:48:59.211204Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7592678141703761862:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7592678141703761871:2133], cookie# 10 2026-01-07T17:48:59.211227Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:7592678141703761862:2133][/dc-1] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:48:59.211251Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7592678141703761862:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7592678141703761872:2133], cookie# 10 2026-01-07T17:48:59.211275Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:7592678141703761862:2133][/dc-1] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:48:59.211311Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7592678141703761862:2133][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7592678141703761873:2133], cookie# 10 2026-01-07T17:48:59.211335Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:7592678141703761862:2133][/dc-1] Sync cookie mismatch: sender# [3:7592678141703761873:2133], cookie# 10, current cookie# 0 2026-01-07T17:48:59.211357Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7592678141703761831:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:48:59.211423Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7592678141703761831:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7592678141703761862:2133] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767808139076 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:48:59.211492Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7592678141703761831:2121], cacheItem# { Subscriber: { Subscriber: [3:7592678141703761862:2133] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767808139076 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2026-01-07T17:48:59.211653Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7592678145998729496:2342], recipient# [3:7592678145998729495:2341], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2026-01-07T17:48:59.211694Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [3:7592678145998729495:2341] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:48:59.211734Z node 3 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [3:7592678145998729495:2341] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2026-01-07T17:48:59.211804Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678145998729495:2341] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2026-01-07T17:48:59.211831Z node 3 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [3:7592678145998729495:2341] txid# 281474976710663 SEND to# [3:7592678145998729494:2340] Source {TEvProposeTransactionStatus Status# 5} |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TKesusTest::TestAcquireSemaphore [GOOD] >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::SamePriorityRequest2 >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::WalleDisableMaintenance ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2026-01-07T17:48:22.225504Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:22.225635Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:22.251759Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:22.251865Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:22.277699Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:22.278144Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=16210617579696750201, session=0, seqNo=0) 2026-01-07T17:48:22.278853Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:48:22.290972Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=16210617579696750201, session=1) 2026-01-07T17:48:22.291228Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=7630216187953224216, session=0, seqNo=0) 2026-01-07T17:48:22.291300Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:48:22.302951Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=7630216187953224216, session=2) 2026-01-07T17:48:22.303645Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:48:22.305332Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:48:22.305417Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:48:22.306224Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=222, session=2, semaphore="Lock2" count=1) 2026-01-07T17:48:22.306282Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-07T17:48:22.306335Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2026-01-07T17:48:22.306395Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=333, session=1, semaphore="Lock2" count=1) 2026-01-07T17:48:22.306428Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2026-01-07T17:48:22.317928Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=111) 2026-01-07T17:48:22.317980Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=222) 2026-01-07T17:48:22.317997Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=333) 2026-01-07T17:48:22.318431Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2173], cookie=11091706693475433942, name="Lock1") 2026-01-07T17:48:22.319807Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2173], cookie=11091706693475433942) 2026-01-07T17:48:22.320205Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=9452297025287329795, name="Lock2") 2026-01-07T17:48:22.320259Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=9452297025287329795) 2026-01-07T17:48:22.333428Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:22.333512Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:22.333844Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:22.334243Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:22.348862Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:22.349015Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:48:22.349050Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2026-01-07T17:48:22.349066Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2026-01-07T17:48:22.349374Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:193:2206], cookie=4332625800948415348, name="Lock1") 2026-01-07T17:48:22.349431Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:193:2206], cookie=4332625800948415348) 2026-01-07T17:48:22.349907Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:201:2213], cookie=7146723688384909845, name="Lock2") 2026-01-07T17:48:22.349951Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:201:2213], cookie=7146723688384909845) 2026-01-07T17:48:22.813372Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:22.825135Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.184054Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.195566Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.544097Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.555766Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.905843Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.917915Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.288800Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:24.300437Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.650304Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:24.662398Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.992868Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.005054Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.356481Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.368752Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.720401Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.732614Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.135858Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.147854Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.508806Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.521068Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.882554Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.894734Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.256097Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.268275Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.628980Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.640776Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.054252Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.066261Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.428028Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.440358Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.802529Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.814773Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.176599Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.188776Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.549369Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.561411Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.954089Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.966353Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.328455Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.340812Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.701975Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.714062Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.075782Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.087919Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.448426Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.460542Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.831310Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [720575940379 ... .cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:58.280177Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:58.640503Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:58.652438Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.002267Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.014348Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.374768Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.386692Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.746567Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.758747Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:00.139530Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:00.151534Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:00.512481Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:00.524698Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:00.885883Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:00.898708Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:01.261392Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:01.273770Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:01.635363Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:01.647090Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:02.030516Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-07T17:49:02.030595Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-07T17:49:02.030637Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-07T17:49:02.030722Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-07T17:49:02.030769Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2026-01-07T17:49:02.030794Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-07T17:49:02.042701Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-07T17:49:02.043265Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:367:2347], cookie=10485638810497953661, name="Lock1") 2026-01-07T17:49:02.043342Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:367:2347], cookie=10485638810497953661) 2026-01-07T17:49:02.043742Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:370:2350], cookie=15320243178517173006, name="Lock2") 2026-01-07T17:49:02.043799Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:370:2350], cookie=15320243178517173006) 2026-01-07T17:49:02.044124Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:373:2353], cookie=6325899163783139883) 2026-01-07T17:49:02.044188Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:373:2353], cookie=6325899163783139883) 2026-01-07T17:49:02.059167Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:49:02.059248Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:49:02.059603Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:49:02.060074Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:49:02.106165Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:49:02.106314Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-07T17:49:02.106371Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-07T17:49:02.106761Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:412:2383], cookie=10843679278855229631) 2026-01-07T17:49:02.106834Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:412:2383], cookie=10843679278855229631) 2026-01-07T17:49:02.107392Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:419:2389], cookie=6725170780709952934, name="Lock1") 2026-01-07T17:49:02.107479Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:419:2389], cookie=6725170780709952934) 2026-01-07T17:49:02.107990Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:422:2392], cookie=16440862574025579061, name="Lock2") 2026-01-07T17:49:02.108060Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:422:2392], cookie=16440862574025579061) 2026-01-07T17:49:02.407193Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:49:02.407282Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:49:02.420781Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:49:02.420912Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:49:02.455127Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:49:02.455622Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=9940789813181773818, session=0, seqNo=0) 2026-01-07T17:49:02.455764Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:49:02.467656Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=9940789813181773818, session=1) 2026-01-07T17:49:02.467961Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=9441300258358758016, session=0, seqNo=0) 2026-01-07T17:49:02.468068Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:49:02.479808Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=9441300258358758016, session=2) 2026-01-07T17:49:02.480060Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-07T17:49:02.491948Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2026-01-07T17:49:02.492397Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=15830746584408292225, name="Sem1", limit=1) 2026-01-07T17:49:02.492552Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-07T17:49:02.504466Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=15830746584408292225) 2026-01-07T17:49:02.504917Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=1, semaphore="Sem1" count=100500) 2026-01-07T17:49:02.516805Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2026-01-07T17:49:02.517135Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=1, semaphore="Sem1" count=1) 2026-01-07T17:49:02.517272Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-07T17:49:02.517429Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=2, semaphore="Sem1" count=1) 2026-01-07T17:49:02.529283Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2026-01-07T17:49:02.529359Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2026-01-07T17:49:02.529850Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2182], cookie=3077147372860274549, name="Sem1") 2026-01-07T17:49:02.529928Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2182], cookie=3077147372860274549) 2026-01-07T17:49:02.530303Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2185], cookie=14424044977036856949, name="Sem1") 2026-01-07T17:49:02.530363Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2185], cookie=14424044977036856949) 2026-01-07T17:49:02.530728Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:166:2188], cookie=9730782213125257267, name="Sem1", force=0) 2026-01-07T17:49:02.542604Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:166:2188], cookie=9730782213125257267) 2026-01-07T17:49:02.543029Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:171:2193], cookie=9555872775404143364, name="Sem1", force=1) 2026-01-07T17:49:02.543128Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-01-07T17:49:02.555176Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:171:2193], cookie=9555872775404143364) |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TFlatTest::AutoSplitMergeQueue [GOOD] >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TMaintenanceApiTest::TestDrainAction [GOOD] >> TMaintenanceApiTest::TestCordonAction >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone >> TTxLocatorTest::TestWithReboot >> TTxLocatorTest::TestAllocateAll |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot >> TTxLocatorTest::TestZeroRange >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TTxLocatorTest::TestAllocateAll [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TTxLocatorTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2026-01-07T17:47:35.778027Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677784540501049:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:47:35.781061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:47:35.793992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:47:36.135419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:47:36.135558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:47:36.188026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:47:36.192625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592677784540500948:2081] 1767808055745973 != 1767808055745976 2026-01-07T17:47:36.203190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:47:36.215293Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:36.386035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:47:36.428119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:36.437553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:47:36.447685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:47:36.511186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) A-0 2026-01-07T17:47:36.806632Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; B-0 2026-01-07T17:47:36.981596Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.9, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.022s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-07T17:47:37.002252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2026-01-07T17:47:37.033485Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.032s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-07T17:47:37.038687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2026-01-07T17:47:37.105088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-07T17:47:37.106741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 6291502 row count 1 2026-01-07T17:47:37.106819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2026-01-07T17:47:37.106938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-07T17:47:37.109115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2026-01-07T17:47:37.345375Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 2} end=Done, 2 blobs 1r (max 1), put Spent{time=0.063s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-07T17:47:37.349684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2026-01-07T17:47:37.375130Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 3 blobs 2r (max 2), put Spent{time=0.025s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583142 0 0)b }, ecr=1.000 2026-01-07T17:47:37.377172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583020 rowCount 2 cpuUsage 0 2026-01-07T17:47:37.453064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-07T17:47:37.453176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 12583020 row count 2 2026-01-07T17:47:37.453215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583020 2026-01-07T17:47:37.453289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-07T17:47:37.453647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2026-01-07T17:47:37.655707Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 3} end=Done, 2 blobs 1r (max 1), put Spent{time=0.046s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-07T17:47:37.697285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874522 rowCount 3 cpuUsage 0 2026-01-07T17:47:37.789359Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 3r (max 3), put Spent{time=0.111s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874688 0 0)b }, ecr=1.000 2026-01-07T17:47:37.801152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-07T17:47:37.801348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 18874522 row count 3 2026-01-07T17:47:37.801422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874522 2026-01-07T17:47:37.801624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 2, Rows# 3, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:47:37.805091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 599 seconds 2026-01-07T17:47:37.805154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-07T17:47:37.805251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-07T17:47:37.813212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 11 ms, with status# 1, next wakeup in# 599.988346s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:47:37.844623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874538 rowCount 3 cpuUsage 0 2026-01-07T17:47:37.945028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-07T17:47:37.945117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 18874538 row count 3 2026-01-07T17:47:37.945153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874538 2026-01-07T17:47:37.945227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-07T17:47:37.945335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-2 2026-01-07T17:47:38.036570Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 4} end=Done, 2 blobs 1r (max 1), put Spent{time=0.039s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-07T17:47:38.061583Z n ... KVRow: 218 EraseKVRow: 188 WriteKVRow: 219 EraseKVRow: 189 WriteKVRow: 220 EraseKVRow: 190 WriteKVRow: 221 EraseKVRow: 191 WriteKVRow: 222 EraseKVRow: 192 WriteKVRow: 223 EraseKVRow: 193 WriteKVRow: 224 EraseKVRow: 194 WriteKVRow: 225 EraseKVRow: 195 WriteKVRow: 226 EraseKVRow: 196 WriteKVRow: 227 EraseKVRow: 197 WriteKVRow: 228 EraseKVRow: 198 WriteKVRow: 229 EraseKVRow: 199 WriteKVRow: 230 EraseKVRow: 200 WriteKVRow: 231 EraseKVRow: 201 WriteKVRow: 232 EraseKVRow: 202 WriteKVRow: 233 EraseKVRow: 203 WriteKVRow: 234 EraseKVRow: 204 WriteKVRow: 235 EraseKVRow: 205 WriteKVRow: 236 EraseKVRow: 206 WriteKVRow: 237 EraseKVRow: 207 WriteKVRow: 238 EraseKVRow: 208 WriteKVRow: 239 EraseKVRow: 209 2026-01-07T17:48:38.191289Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:48:38.191327Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded WriteKVRow: 240 EraseKVRow: 210 WriteKVRow: 241 EraseKVRow: 211 WriteKVRow: 242 EraseKVRow: 212 WriteKVRow: 243 EraseKVRow: 213 WriteKVRow: 244 EraseKVRow: 214 WriteKVRow: 245 EraseKVRow: 215 WriteKVRow: 246 EraseKVRow: 216 WriteKVRow: 247 EraseKVRow: 217 WriteKVRow: 248 EraseKVRow: 218 WriteKVRow: 249 EraseKVRow: 219 WriteKVRow: 250 EraseKVRow: 220 WriteKVRow: 251 EraseKVRow: 221 WriteKVRow: 252 EraseKVRow: 222 WriteKVRow: 253 EraseKVRow: 223 WriteKVRow: 254 EraseKVRow: 224 2026-01-07T17:48:40.008718Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2026-01-07T17:48:40.008756Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2026-01-07T17:48:40.008772Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2026-01-07T17:48:40.008791Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found WriteKVRow: 255 EraseKVRow: 225 2026-01-07T17:48:40.100127Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037900 not found 2026-01-07T17:48:40.100204Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2026-01-07T17:48:40.100230Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037899 not found WriteKVRow: 256 2026-01-07T17:48:40.113076Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037898 not found EraseKVRow: 226 2026-01-07T17:48:40.198482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037903 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715704:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037903 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\364\000\000\000" } SchemeshardId: 72057594046644480 WriteKVRow: 257 EraseKVRow: 227 WriteKVRow: 258 EraseKVRow: 228 2026-01-07T17:48:40.266268Z node 3 :TX_DATASHARD WARN: datashard__compaction.cpp:25: Background compaction tx at non-ready tablet 72075186224037902 state 5, requested from [3:7592677990288629511:2150] 2026-01-07T17:48:40.268521Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusInvalidParameter Reached MaxPartitionsCount limit: 6, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715706:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037905 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\344\000\000\000" } SchemeshardId: 72057594046644480 WriteKVRow: 259 2026-01-07T17:48:40.296123Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037903 not found 2026-01-07T17:48:40.297761Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037902 not found 2026-01-07T17:48:40.300712Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusInvalidParameter Reached MaxPartitionsCount limit: 6, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715707:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037904 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\333\000\000\000" } SchemeshardId: 72057594046644480 2026-01-07T17:48:40.301199Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusInvalidParameter Reached MaxPartitionsCount limit: 6, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715708:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037906 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\357\000\000\000" } SchemeshardId: 72057594046644480 2026-01-07T17:48:40.301703Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusInvalidParameter Reached MaxPartitionsCount limit: 6, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715709:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037905 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\344\000\000\000" } SchemeshardId: 72057594046644480 2026-01-07T17:48:40.302970Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037907 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715710:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037907 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\374\000\000\000" } SchemeshardId: 72057594046644480 EraseKVRow: 229 WriteKVRow: 260 EraseKVRow: 230 WriteKVRow: 261 EraseKVRow: 231 WriteKVRow: 262 2026-01-07T17:48:40.383533Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037907 not found EraseKVRow: 232 WriteKVRow: 263 EraseKVRow: 233 WriteKVRow: 264 EraseKVRow: 234 WriteKVRow: 265 EraseKVRow: 235 WriteKVRow: 266 EraseKVRow: 236 WriteKVRow: 267 EraseKVRow: 237 WriteKVRow: 268 EraseKVRow: 238 WriteKVRow: 269 EraseKVRow: 239 WriteKVRow: 270 EraseKVRow: 240 WriteKVRow: 271 EraseKVRow: 241 WriteKVRow: 272 EraseKVRow: 242 WriteKVRow: 273 EraseKVRow: 243 WriteKVRow: 274 EraseKVRow: 244 WriteKVRow: 275 EraseKVRow: 245 WriteKVRow: 276 EraseKVRow: 246 WriteKVRow: 277 EraseKVRow: 247 WriteKVRow: 278 EraseKVRow: 248 WriteKVRow: 279 EraseKVRow: 249 WriteKVRow: 280 EraseKVRow: 250 WriteKVRow: 281 EraseKVRow: 251 WriteKVRow: 282 EraseKVRow: 252 WriteKVRow: 283 EraseKVRow: 253 WriteKVRow: 284 EraseKVRow: 254 WriteKVRow: 285 EraseKVRow: 255 WriteKVRow: 286 EraseKVRow: 256 WriteKVRow: 287 EraseKVRow: 257 WriteKVRow: 288 EraseKVRow: 258 WriteKVRow: 289 EraseKVRow: 259 WriteKVRow: 290 EraseKVRow: 260 WriteKVRow: 291 EraseKVRow: 261 WriteKVRow: 292 EraseKVRow: 262 2026-01-07T17:48:41.281544Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037904 not found 2026-01-07T17:48:41.281574Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037901 not found WriteKVRow: 293 EraseKVRow: 263 WriteKVRow: 294 2026-01-07T17:48:41.368060Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037909 not found EraseKVRow: 264 WriteKVRow: 295 EraseKVRow: 265 WriteKVRow: 296 EraseKVRow: 266 WriteKVRow: 297 EraseKVRow: 267 WriteKVRow: 298 EraseKVRow: 268 WriteKVRow: 299 EraseKVRow: 269 EraseKVRow: 270 EraseKVRow: 271 EraseKVRow: 272 EraseKVRow: 273 EraseKVRow: 274 EraseKVRow: 275 EraseKVRow: 276 EraseKVRow: 277 EraseKVRow: 278 EraseKVRow: 279 EraseKVRow: 280 EraseKVRow: 281 EraseKVRow: 282 EraseKVRow: 283 EraseKVRow: 284 EraseKVRow: 285 EraseKVRow: 286 EraseKVRow: 287 EraseKVRow: 288 EraseKVRow: 289 EraseKVRow: 290 EraseKVRow: 291 EraseKVRow: 292 EraseKVRow: 293 EraseKVRow: 294 EraseKVRow: 295 EraseKVRow: 296 EraseKVRow: 297 EraseKVRow: 298 EraseKVRow: 299 2026-01-07T17:48:56.372549Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037906 not found 2026-01-07T17:48:56.376011Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037908 not found 2026-01-07T17:48:56.383269Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037910 not found 2026-01-07T17:48:56.383739Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037905 not found 2026-01-07T17:48:56.523447Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037914 not found 2026-01-07T17:48:56.525056Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037913 not found 2026-01-07T17:48:56.640239Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037911 not found 2026-01-07T17:48:56.650137Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037912 not found 2026-01-07T17:48:56.727171Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2026-01-07T17:48:57.646763Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037915 not found 2026-01-07T17:48:57.646819Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037916 not found 2026-01-07T17:48:57.649031Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037917 not found 2026-01-07T17:48:57.779033Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037918 not found 2026-01-07T17:48:57.779580Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037921 not found 2026-01-07T17:48:57.779607Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037920 not found 2026-01-07T17:48:57.779645Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037922 not found 2026-01-07T17:49:02.894717Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037923 not found 2026-01-07T17:49:02.894787Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037924 not found |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TMaintenanceApiTest::ForceAvailabilityMode >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] >> KqpService::CloseSessionsWithLoad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2026-01-07T17:49:04.533834Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:49:04.537012Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:49:04.539416Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:49:04.552886Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.557276Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:49:04.574129Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574257Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574321Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574457Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:49:04.574574Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574663Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:49:04.574858Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:49:04.577135Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#281474976710655 2026-01-07T17:49:04.578968Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.579055Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.579165Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2026-01-07T17:49:04.579209Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2026-01-07T17:49:04.585964Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#1 2026-01-07T17:49:04.586112Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2026-01-07T17:49:04.586152Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TCmsTest::BridgeModeStateStorage [GOOD] >> TCmsTest::BridgeModeSysTablets >> TCmsTest::ManageRequestsDry [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2026-01-07T17:49:04.533814Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:49:04.537011Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:49:04.539376Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:49:04.552811Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.557272Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:49:04.573600Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.573703Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.573758Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.573873Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:49:04.574077Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574164Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:49:04.574313Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2026-01-07T17:49:04.534375Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:49:04.537015Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:49:04.539394Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:49:04.552825Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.557328Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:49:04.573959Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574070Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574126Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574246Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:49:04.574392Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574471Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:49:04.574611Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:49:04.577104Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#0 2026-01-07T17:49:04.578958Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.579027Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.579133Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2026-01-07T17:49:04.579187Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 0 expected SUCCESS |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesDryRun [GOOD] Test command err: 2026-01-07T17:49:00.794990Z node 19 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008415s |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::SysTabletsNode |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesWrongHost [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TMaintenanceApiTest::TestCordonAction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2026-01-07T17:49:04.535549Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:49:04.537027Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:49:04.539414Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:49:04.552832Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.557275Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:49:04.573687Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.573800Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.573874Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.573993Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:49:04.574098Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.574180Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:49:04.574317Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:49:04.576963Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2026-01-07T17:49:04.578986Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2026-01-07T17:49:04.580466Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2026-01-07T17:49:04.580650Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2026-01-07T17:49:04.581168Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2026-01-07T17:49:04.581453Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2125] requested range size#100000 2026-01-07T17:49:04.581733Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.581934Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.582049Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2026-01-07T17:49:04.582266Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.582394Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.582553Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.582627Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2026-01-07T17:49:04.582803Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.582891Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2026-01-07T17:49:04.583048Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.583188Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.583272Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2026-01-07T17:49:04.583484Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2026-01-07T17:49:04.583536Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2026-01-07T17:49:04.583677Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.583747Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.583840Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2026-01-07T17:49:04.583870Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2026-01-07T17:49:04.583961Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.584048Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2026-01-07T17:49:04.584093Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2026-01-07T17:49:04.584208Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.584307Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2026-01-07T17:49:04.584328Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2026-01-07T17:49:04.584426Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.584499Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2026-01-07T17:49:04.584525Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2026-01-07T17:49:04.584588Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.584641Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.584697Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2026-01-07T17:49:04.584726Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:90:2125] TEvAllocateResult from# 500000 to# 600000 2026-01-07T17:49:04.584818Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2026-01-07T17:49:04.584843Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2026-01-07T17:49:04.584962Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.585054Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.585096Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2026-01-07T17:49:04.585138Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2026-01-07T17:49:04.585257Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.585330Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.585369Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2026-01-07T17:49:04.585391Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2026-01-07T17:49:04.585503Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.585589Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2026-01-07T17:49:04.585616Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-07T17:49:04.594469Z node 1 :TABLET_MAIN NOTICE: tablet_sys.cpp:1925: Tablet: 7205759404 ... 1:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.949804Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2026-01-07T17:49:04.949828Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:619:2556] TEvAllocateResult from# 9400000 to# 9500000 2026-01-07T17:49:04.949946Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.950023Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.950094Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2026-01-07T17:49:04.950126Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:621:2558] TEvAllocateResult from# 9500000 to# 9600000 2026-01-07T17:49:04.950225Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.950294Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2026-01-07T17:49:04.950331Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:623:2560] TEvAllocateResult from# 9600000 to# 9700000 2026-01-07T17:49:04.950427Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.950468Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2026-01-07T17:49:04.950494Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:625:2562] TEvAllocateResult from# 9700000 to# 9800000 2026-01-07T17:49:04.950661Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.950710Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.950828Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2026-01-07T17:49:04.950856Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:627:2564] TEvAllocateResult from# 9800000 to# 9900000 2026-01-07T17:49:04.950944Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.951002Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2026-01-07T17:49:04.951030Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:629:2566] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-07T17:49:04.955194Z node 1 :TABLET_MAIN NOTICE: tablet_sys.cpp:1925: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2026-01-07T17:49:04.956441Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:876: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2026-01-07T17:49:04.957200Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:421: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2026-01-07T17:49:04.957263Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2026-01-07T17:49:04.957422Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1485:0],] for 72057594046447617 2026-01-07T17:49:04.957481Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2026-01-07T17:49:04.957528Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2026-01-07T17:49:04.957586Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2026-01-07T17:49:04.957625Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2026-01-07T17:49:04.957709Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2026-01-07T17:49:04.957754Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2026-01-07T17:49:04.957820Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2026-01-07T17:49:04.957882Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2026-01-07T17:49:04.957921Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2026-01-07T17:49:04.958082Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:632: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2026-01-07T17:49:04.958118Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1485:0],] for 72057594046447617 2026-01-07T17:49:04.958208Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2026-01-07T17:49:04.958238Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2026-01-07T17:49:04.958260Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2026-01-07T17:49:04.958283Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2026-01-07T17:49:04.958318Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2026-01-07T17:49:04.958352Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2026-01-07T17:49:04.958383Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2026-01-07T17:49:04.958421Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2026-01-07T17:49:04.958453Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2026-01-07T17:49:04.958487Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2026-01-07T17:49:04.958736Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:49:04.959976Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.963449Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:49:04.963656Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:49:04.964402Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:49:04.964466Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1641:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.964540Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:04.964605Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TCmsTest::TestProcessingQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ManageRequestsDry [GOOD] Test command err: 2026-01-07T17:48:56.738413Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2026-01-07T17:48:56.739254Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2026-01-07T17:48:56.739303Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected 2026-01-07T17:48:57.097885Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2026-01-07T17:48:57.098634Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2026-01-07T17:48:57.098666Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::TestCordonAction [GOOD] Test command err: 2026-01-07T17:49:01.808182Z node 20 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action DRAIN_NODE is not supported 2026-01-07T17:49:01.808294Z node 20 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action DRAIN_NODE is not supported 2026-01-07T17:49:03.978060Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported 2026-01-07T17:49:03.978153Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 13976, MsgBus: 11980 2026-01-07T17:48:49.603994Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678100147362791:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:49.604058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:49.870874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:49.986058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:49.986171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:49.993591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:50.041028Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:50.076269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:50.345832Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:50.345914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:50.345921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:50.346053Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:50.610798Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:51.002997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:51.051175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:51.201907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:51.334357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:51.393753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:52.007724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678113032266515:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:52.007840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:52.008117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678113032266524:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:52.008164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:52.733748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:52.764840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:52.788131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:52.814301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:52.841194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:52.867545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:52.893497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:52.927690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:53.010501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678117327234694:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:53.010558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:53.010582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678117327234699:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:53.010705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678117327234701:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:53.010742Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:53.016712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:48:53.024585Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678117327234702:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:48:53.104681Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678117327234754:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:48:54.605363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678100147362791:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:54.605425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:48:54.692105Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=YThiOGFiNy0zNzg4YWJjYy1mNzhjMTM4OS0zOGNkZDg3Yg==, ActorId: [1:7592678108737299216:2396], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-07T17:48:54.693500Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=YThiOGFiNy0zNzg4YWJjYy1mNzhjMTM4OS0zOGNkZDg3Yg==, ActorId: [1:7592678108737299216:2396], ActorState: ReadyState, Cleanup start is_final ... DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592678130212137266:2754], owner: [1:7592678108737299187:2384], status: SUCCESS, issues: , uid: ceeea47c-27beeab9-b55c6508-d981b0ba 2026-01-07T17:48:57.581583Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592678125917169734:2624], status: SUCCESS, compileActor: [1:7592678130212137266:2754] 2026-01-07T17:48:57.581648Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592678125917169734:2624], queryUid: ceeea47c-27beeab9-b55c6508-d981b0ba, status:SUCCESS 2026-01-07T17:48:57.584399Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592678130212137326:2758]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGhAvUm9vdC9FaWdodFNoYXJkIgAqDAiAgpSEgICAgAEQJzABOAFKMwoERGF0YRADGgVJbnQzMiABMABCAEgAUgBYAGIWCP///////////wEQ////////////AUozCgNLZXkQARoGVWludDY0IAQwAEIASABSAFgAYhYI////////////ARD///////////8BSjUKBFRleHQQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767808137","query_text":"\\n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=1050699176 + 0;\\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=1050699176 + 1;\\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=1050699176 + 2;\\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=1050699176 + 3;\\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=1050699176 + 4;\\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=1050699176 + 5;\\n\\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\\n (-268425865ul, \\\"New\\\");\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":21,\"Plans\":[{\"PlanNodeId\":20,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{Key: 18446744073441125751,Text: \\\"New\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"},{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1050699176)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1050699177)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":10,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1050699178)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":7,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1050699179)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":4,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1050699180)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1050699181)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"lookup_by\":[\"Key (1050699176)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (1050699177)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (1050699178)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (1050699179)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (1050699180)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (1050699181)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Operators\":[{\"ReadRange\":[\"Key (1050699176)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"ReadRange\":[\"Key (1050699177)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Operators\":[{\"ReadRange\":[\"Key (1050699178)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"ReadRange\":[\"Key (1050699179)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"ReadRange\":[\"Key (1050699180)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":19,\"Operators\":[{\"ReadRange\":[\"Key (1050699181)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"c94f2413-35d6de82-3e5a158c-d841d2ef","version":"1.0"} 2026-01-07T17:48:57.585397Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592678130212137326:2758], duration: 0.564843s 2026-01-07T17:48:57.585419Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592678130212137326:2758], owner: [1:7592678108737299187:2384], status: SUCCESS, issues: , uid: c94f2413-35d6de82-3e5a158c-d841d2ef 2026-01-07T17:48:57.585482Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592678125917169738:2628], status: SUCCESS, compileActor: [1:7592678130212137326:2758] 2026-01-07T17:48:57.585532Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592678125917169738:2628], queryUid: c94f2413-35d6de82-3e5a158c-d841d2ef, status:SUCCESS still compiling... 0 still active sessions ... 0 received non-success status for session 12 received non-success status for session 13 received non-success status for session 14 received non-success status for session 15 received non-success status for session 10 received non-success status for session 11 received non-success status for session 1 received non-success status for session 17 received non-success status for session 18 received non-success status for session 4 received non-success status for session 22 received non-success status for session 21 received non-success status for session 2 received non-success status for session 5 received non-success status for session 8 received non-success status for session 9 received non-success status for session 31 received non-success status for session 0 received non-success status for session 29 received non-success status for session 35 received non-success status for session 19 received non-success status for session 37 received non-success status for session 38 received non-success status for session 23 received non-success status for session 24 received non-success status for session 7 received non-success status for session 27 received non-success status for session 28 received non-success status for session 46 received non-success status for session 30 received non-success status for session 16 received non-success status for session 33 received non-success status for session 3 received non-success status for session 39 received non-success status for session 40 received non-success status for session 6 received non-success status for session 32 received non-success status for session 47 received non-success status for session 41 received non-success status for session 42 received non-success status for session 43 received non-success status for session 26 received non-success status for session 25 received non-success status for session 44 received non-success status for session 20 received non-success status for session 36 received non-success status for session 45 received non-success status for session 49 received non-success status for session 48 received non-success status for session 34 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> TCmsTest::SysTabletsNode [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::WalleDisableMaintenance [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> TCdcStreamTests::CreateStream [GOOD] >> TCmsTest::TestProcessingQueue [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] >> TCdcStreamTests::AlterStream |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleDisableMaintenance [GOOD] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2026-01-07T17:48:59.921166Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-07T17:49:00.051043Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-07T17:49:00.066555Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-07T17:49:00.164670Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-07T17:49:06.574356Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 26:26 2026-01-07T17:49:06.574424Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 27:27 2026-01-07T17:49:06.574448Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 28:28 2026-01-07T17:49:06.574469Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 29:29 2026-01-07T17:49:06.574487Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 30:30 2026-01-07T17:49:06.574502Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 31:31 2026-01-07T17:49:06.574519Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 32:32 2026-01-07T17:49:06.574532Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 25:25 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2026-01-07T17:48:36.231962Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678047440285011:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:36.232040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:36.505328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:36.578699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:36.578778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:36.592481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:36.595736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:36.694157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:36.861865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:36.861895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:36.861910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:36.861989Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:37.158572Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:37.163288Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:37.163346Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:37.164298Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:61121, port: 61121 2026-01-07T17:48:37.168879Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:37.175819Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-07T17:48:37.225134Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****eBXg (B0921113) () has now valid token of ldapuser@ldap 2026-01-07T17:48:37.242896Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:38.818826Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678052901513012:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:38.818870Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:38.832200Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:38.895228Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:38.896168Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678052901512985:2081] 1767808118818031 != 1767808118818034 2026-01-07T17:48:38.939252Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:38.939365Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:38.941235Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:38.986492Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:38.986557Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:38.986563Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:38.986648Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:39.044328Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:39.046669Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:39.046692Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:39.047257Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:8610, port: 8610 2026-01-07T17:48:39.047374Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:39.052881Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:8610. Invalid credentials 2026-01-07T17:48:39.053117Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****ao-w (A544A200) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:8610. Invalid credentials)' 2026-01-07T17:48:39.116111Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:41.143390Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678066542467590:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:41.143442Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:41.153923Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:41.203480Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:41.204516Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678066542467563:2081] 1767808121142644 != 1767808121142647 2026-01-07T17:48:41.209625Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:41.209708Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:41.228820Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:41.257413Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:41.257436Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:41.257442Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:41.257517Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:41.322193Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:41.326100Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:41.328390Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:41.328413Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:41.328813Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:10219, port: 10219 2026-01-07T17:48:41.328880Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:41.331489Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:10219. Invalid credentials 2026-01-07T17:48:41.331652Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****UaVw (1C22854D) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:10219. Invalid credentials)' 2026-01-07T17:48:43.384761Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678077608506600:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:43.384816Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:43.394208Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:43.446174Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:43.494292Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:43.494357Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:43.495756Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:43.502546Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:43.502570Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:43.502577Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17 ... PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678086202209643:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:45.866367Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:45.889845Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:45.924060Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:45.925390Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592678086202209615:2081] 1767808125865761 != 1767808125865764 2026-01-07T17:48:45.973160Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:45.973239Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:45.974491Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:45.979864Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:45.979886Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:45.979893Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:45.979962Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:46.059376Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:46.062465Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:46.062485Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:46.062925Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:23902, port: 23902 2026-01-07T17:48:46.062978Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:46.066681Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:46.109329Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:46.109741Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:46.109794Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:46.153366Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:46.197287Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:46.197983Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****5nMw (B331BFF7) () has now valid token of ldapuser@ldap 2026-01-07T17:48:46.199368Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:46.870558Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:50.866712Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592678086202209643:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:50.866797Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:48:50.869313Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****5nMw (B331BFF7) 2026-01-07T17:48:50.869438Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:23902, port: 23902 2026-01-07T17:48:50.869543Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:50.873501Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:50.917210Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:50.917653Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:50.917696Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:50.961221Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:51.005220Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:51.006016Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****5nMw (B331BFF7) () has now valid token of ldapuser@ldap 2026-01-07T17:48:53.870556Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****5nMw (B331BFF7) 2026-01-07T17:48:56.686309Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592678133970054790:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:56.686410Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:56.698162Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:56.771073Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:56.772158Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592678133970054763:2081] 1767808136685339 != 1767808136685342 2026-01-07T17:48:56.810101Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:56.810187Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:56.817099Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:56.846567Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:56.846587Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:56.846613Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:56.846685Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:56.877535Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:56.906009Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:56.908696Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:56.908719Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:56.909321Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14571, port: 14571 2026-01-07T17:48:56.909389Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:56.912328Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:56.953362Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****aR2w (9B8EAB82) () has now valid token of ldapuser@ldap 2026-01-07T17:48:57.691669Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:00.688975Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****aR2w (9B8EAB82) 2026-01-07T17:49:00.689109Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14571, port: 14571 2026-01-07T17:49:00.689217Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:49:00.694978Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:49:00.737392Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****aR2w (9B8EAB82) () has now valid token of ldapuser@ldap 2026-01-07T17:49:01.686794Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7592678133970054790:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:01.686877Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:05.691090Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****aR2w (9B8EAB82) |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TSubDomainTest::ConsistentCopyTable |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TCmsTest::BridgeModeSysTablets [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks >> TGRpcConsoleTest::SimpleConfigTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2026-01-07T17:49:08.095922Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-07T17:49:08.096316Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-07T17:49:08.096865Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-07T17:49:08.098446Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.098863Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-07T17:49:08.108314Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.108400Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.108457Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.108526Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-07T17:49:08.108619Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.108680Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-07T17:49:08.108815Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-07T17:49:08.109908Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2026-01-07T17:49:08.110380Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2026-01-07T17:49:08.110710Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2026-01-07T17:49:08.110810Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2026-01-07T17:49:08.111153Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2026-01-07T17:49:08.111362Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2125] requested range size#100000 2026-01-07T17:49:08.111570Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.111730Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.111811Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2026-01-07T17:49:08.111958Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.112030Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.112148Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.112203Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2026-01-07T17:49:08.112342Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.112405Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2026-01-07T17:49:08.112508Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.112620Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.112692Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2026-01-07T17:49:08.112855Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2026-01-07T17:49:08.112886Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2026-01-07T17:49:08.113025Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.113087Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.113172Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2026-01-07T17:49:08.113191Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2026-01-07T17:49:08.113284Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.113359Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2026-01-07T17:49:08.113387Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2026-01-07T17:49:08.113466Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.113528Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2026-01-07T17:49:08.113542Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2026-01-07T17:49:08.113654Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.113736Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2026-01-07T17:49:08.113776Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2026-01-07T17:49:08.113823Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.113871Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.113922Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2026-01-07T17:49:08.113944Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:90:2125] TEvAllocateResult from# 500000 to# 600000 2026-01-07T17:49:08.114037Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2026-01-07T17:49:08.114060Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2026-01-07T17:49:08.114138Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.114249Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.114291Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2026-01-07T17:49:08.114334Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2026-01-07T17:49:08.114460Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.114529Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.114567Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2026-01-07T17:49:08.114590Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2026-01-07T17:49:08.114714Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.114789Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2026-01-07T17:49:08.114814Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-07T17:49:08.121248Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 720575 ... ender# [1:403:2437] TEvAllocateResult from# 8500000 to# 8600000 2026-01-07T17:49:08.173189Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.173262Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2026-01-07T17:49:08.173287Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8600000 to# 8700000 2026-01-07T17:49:08.173380Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.173444Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2026-01-07T17:49:08.173468Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8700000 to# 8800000 2026-01-07T17:49:08.173561Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2026-01-07T17:49:08.173615Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:409:2443] TEvAllocateResult from# 8800000 to# 8900000 2026-01-07T17:49:08.173667Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2026-01-07T17:49:08.173681Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:411:2445] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-07T17:49:08.177391Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2026-01-07T17:49:08.177886Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2026-01-07T17:49:08.178142Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2026-01-07T17:49:08.178349Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.178487Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2026-01-07T17:49:08.178611Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.178741Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.178862Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2026-01-07T17:49:08.178946Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.179050Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.179168Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.179321Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2026-01-07T17:49:08.179433Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.179558Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2026-01-07T17:49:08.179705Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.179938Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2026-01-07T17:49:08.180029Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.180109Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.180179Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:449:2483] requested range size#100000 2026-01-07T17:49:08.180435Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2026-01-07T17:49:08.180460Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9000000 to# 9100000 2026-01-07T17:49:08.180518Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.180652Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:451:2485] requested range size#100000 2026-01-07T17:49:08.180815Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.180994Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2026-01-07T17:49:08.181026Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9100000 to# 9200000 2026-01-07T17:49:08.181082Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.181229Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2026-01-07T17:49:08.181268Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9200000 to# 9300000 2026-01-07T17:49:08.181326Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.181457Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2026-01-07T17:49:08.181476Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9300000 to# 9400000 2026-01-07T17:49:08.181507Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.181612Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2026-01-07T17:49:08.181631Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9400000 to# 9500000 2026-01-07T17:49:08.181669Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.181747Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2026-01-07T17:49:08.181763Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9500000 to# 9600000 2026-01-07T17:49:08.181831Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2026-01-07T17:49:08.181859Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9600000 to# 9700000 2026-01-07T17:49:08.181894Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.181942Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2026-01-07T17:49:08.181959Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9700000 to# 9800000 2026-01-07T17:49:08.181995Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.182021Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.182090Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2026-01-07T17:49:08.182107Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:449:2483] TEvAllocateResult from# 9800000 to# 9900000 2026-01-07T17:49:08.182138Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-07T17:49:08.182208Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2026-01-07T17:49:08.182224Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:451:2485] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |93.9%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream |93.9%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2026-01-07T17:48:36.231807Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678046521215892:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:36.231882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:36.509957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:36.566778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:36.566870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:36.582429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:36.639905Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:36.728771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:36.861882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:36.861902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:36.861913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:36.862003Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:37.200272Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:37.204452Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:37.204498Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:37.205970Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:22168, port: 22168 2026-01-07T17:48:37.206058Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:37.261428Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-07T17:48:37.306637Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****nZBg (D6D3A8D5) () has now valid token of ldapuser@ldap 2026-01-07T17:48:37.306933Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:38.917297Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678056329436082:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:38.917401Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:38.933832Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:38.997116Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:38.998018Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678056329436058:2081] 1767808118916725 != 1767808118916728 2026-01-07T17:48:39.040154Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:39.040245Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:39.044574Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:39.084400Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:39.084428Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:39.084434Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:39.084530Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:39.167204Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:39.169543Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:39.169569Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:39.170030Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:20518, port: 20518 2026-01-07T17:48:39.170116Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:39.217354Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:20518. Invalid credentials 2026-01-07T17:48:39.217905Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****pAkw (AA4F1024) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:20518. Invalid credentials)' 2026-01-07T17:48:39.219224Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:41.329285Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678066653495852:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:41.329344Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:41.339391Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:41.392052Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678066653495827:2081] 1767808121328895 != 1767808121328898 2026-01-07T17:48:41.396372Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:41.438058Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:41.438145Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:41.451918Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:41.460901Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:41.460915Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:41.460937Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:41.460987Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:41.569957Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:41.572319Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:41.572338Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:41.572856Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:27685, port: 27685 2026-01-07T17:48:41.572911Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:41.621418Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:27685. Invalid credentials 2026-01-07T17:48:41.621879Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****9RNA (6171CC4C) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:27685. Invalid credentials)' 2026-01-07T17:48:41.622964Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.841779Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678076372509288:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:43.841844Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:43.855530Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:43.927638Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:43.986476Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:43.986556Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:43.989953Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:44.023208Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:44.023231Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:44.023239Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) ... IDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678087393341923:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:46.473127Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:46.484062Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:46.538246Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:46.539678Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592678087393341896:2081] 1767808126472216 != 1767808126472219 2026-01-07T17:48:46.544505Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:46.544581Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:46.565770Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:46.599216Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:46.599239Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:46.599247Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:46.599323Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:46.694754Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:46.697868Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:46.697905Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:46.698641Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:10574, port: 10574 2026-01-07T17:48:46.698748Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:46.749465Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:46.793258Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:46.793797Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:46.793861Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:46.837410Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:46.881331Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:46.882834Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****1VlQ (2197A9D4) () has now valid token of ldapuser@ldap 2026-01-07T17:48:46.883902Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:47.478451Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:50.475680Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****1VlQ (2197A9D4) 2026-01-07T17:48:50.475830Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:10574, port: 10574 2026-01-07T17:48:50.475922Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:50.537487Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:50.585240Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:50.585692Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:50.585723Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:50.633254Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:50.677242Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:50.678375Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****1VlQ (2197A9D4) () has now valid token of ldapuser@ldap 2026-01-07T17:48:51.473569Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592678087393341923:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:51.473717Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:48:54.477421Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****1VlQ (2197A9D4) 2026-01-07T17:48:57.608958Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592678135593415257:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:57.609042Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:57.622922Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:57.677362Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:57.679749Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592678135593415230:2081] 1767808137608006 != 1767808137608009 2026-01-07T17:48:57.705012Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:57.705071Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:57.706954Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:57.734730Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:57.734757Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:57.734768Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:57.734838Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:57.857348Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:58.032014Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:58.034628Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:58.034660Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:58.035381Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:7486, port: 7486 2026-01-07T17:48:58.035495Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:58.089417Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:58.137763Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****aRdA (A801C738) () has now valid token of ldapuser@ldap 2026-01-07T17:48:58.616984Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:02.609062Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7592678135593415257:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:02.609184Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:03.614610Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****aRdA (A801C738) 2026-01-07T17:49:03.614728Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:7486, port: 7486 2026-01-07T17:49:03.614803Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:49:03.669468Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:49:03.717788Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****aRdA (A801C738) () has now valid token of ldapuser@ldap 2026-01-07T17:49:07.616417Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****aRdA (A801C738) >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |93.9%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] Test command err: 2026-01-07T17:48:55.482595Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678128745775703:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:55.483300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:55.512460Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678126010447041:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:55.512874Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:55.521474Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678126858927157:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:55.521702Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:55.681317Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:55.682034Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:55.700837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:55.754570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:55.754846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:55.755959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:55.756012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:55.759366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:55.759438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:55.762683Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:48:55.762844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:55.765573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:55.768362Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:48:55.769348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:55.832451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:55.888392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:55.922109Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:55.945012Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:56.101376Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592678128745775908:2145] Handle TEvNavigate describe path dc-1 2026-01-07T17:48:56.101437Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592678133040743701:2483] HANDLE EvNavigateScheme dc-1 2026-01-07T17:48:56.101599Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592678128745775915:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:48:56.101728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592678128745776136:2296][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592678128745775915:2148], cookie# 1 2026-01-07T17:48:56.103116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678128745776147:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678128745776144:2296], cookie# 1 2026-01-07T17:48:56.103150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678128745776148:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678128745776145:2296], cookie# 1 2026-01-07T17:48:56.103163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678128745776149:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678128745776146:2296], cookie# 1 2026-01-07T17:48:56.103188Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678128745775554:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678128745776147:2296], cookie# 1 2026-01-07T17:48:56.103216Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678128745775557:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678128745776148:2296], cookie# 1 2026-01-07T17:48:56.103229Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678128745775560:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678128745776149:2296], cookie# 1 2026-01-07T17:48:56.103276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678128745776147:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678128745775554:2053], cookie# 1 2026-01-07T17:48:56.103289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678128745776148:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678128745775557:2056], cookie# 1 2026-01-07T17:48:56.103298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678128745776149:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678128745775560:2059], cookie# 1 2026-01-07T17:48:56.103338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678128745776136:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678128745776144:2296], cookie# 1 2026-01-07T17:48:56.103359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592678128745776136:2296][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:48:56.103373Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678128745776136:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678128745776145:2296], cookie# 1 2026-01-07T17:48:56.103393Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592678128745776136:2296][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:48:56.103414Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678128745776136:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678128745776146:2296], cookie# 1 2026-01-07T17:48:56.103424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592678128745776136:2296][/dc-1] Sync cookie mismatch: sender# [1:7592678128745776146:2296], cookie# 1, current cookie# 0 2026-01-07T17:48:56.103456Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592678128745775915:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:48:56.115480Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592678128745775915:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592678128745776136:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:48:56.115614Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592678128745775915:2148], cacheItem# { Subscriber: { Subscriber: [1:7592678128745776136:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:48:56.118745Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592678133040743702:2484], recipient# [1:7592678133040743701:2483], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:48:56.118824Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592678133040743701:2483] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:48:56.159815Z node 1 :TX_PROXY DEBUG: describe.cpp:433: ... em: self# [7:7592678166819086889:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7592678179703989765:2863] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:49:07.904986Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7592678166819086889:2121], cacheItem# { Subscriber: { Subscriber: [7:7592678179703989765:2863] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:07.905050Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [7:7592678166819086889:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-07T17:49:07.905097Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [7:7592678166819086889:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7592678179703989766:2864] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:49:07.905138Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [7:7592678166819086571:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [7:7592678179703989784:2865] 2026-01-07T17:49:07.905156Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [7:7592678166819086571:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-07T17:49:07.905176Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [7:7592678166819086571:2050] Subscribe: subscriber# [7:7592678179703989784:2865], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:49:07.905187Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7592678166819086889:2121], cacheItem# { Subscriber: { Subscriber: [7:7592678179703989766:2864] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:07.905217Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [7:7592678166819086574:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [7:7592678179703989785:2865] 2026-01-07T17:49:07.905223Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [7:7592678166819086574:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-07T17:49:07.905236Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [7:7592678166819086574:2053] Subscribe: subscriber# [7:7592678179703989785:2865], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:49:07.905264Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [7:7592678166819086577:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [7:7592678179703989786:2865] 2026-01-07T17:49:07.905283Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [7:7592678166819086577:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-07T17:49:07.905330Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [7:7592678166819086577:2056] Subscribe: subscriber# [7:7592678179703989786:2865], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-07T17:49:07.905331Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7592678179703989787:2866], recipient# [7:7592678179703989761:2307], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:49:07.905366Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7592678179703989784:2865][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7592678166819086571:2050] 2026-01-07T17:49:07.905402Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7592678166819086571:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7592678179703989784:2865] 2026-01-07T17:49:07.905405Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7592678179703989785:2865][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7592678166819086574:2053] 2026-01-07T17:49:07.905424Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7592678179703989786:2865][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7592678166819086577:2056] 2026-01-07T17:49:07.905439Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7592678166819086574:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7592678179703989785:2865] 2026-01-07T17:49:07.905458Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7592678166819086577:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7592678179703989786:2865] 2026-01-07T17:49:07.905485Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7592678179703989780:2865][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7592678179703989781:2865] 2026-01-07T17:49:07.905549Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7592678179703989780:2865][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7592678179703989782:2865] 2026-01-07T17:49:07.905613Z node 7 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][7:7592678179703989780:2865][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [7:7592678166819086889:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:07.905645Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7592678179703989780:2865][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7592678179703989783:2865] 2026-01-07T17:49:07.905666Z node 7 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][7:7592678179703989780:2865][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [7:7592678166819086889:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:07.905697Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [7:7592678166819086889:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-07T17:49:07.905743Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [7:7592678166819086889:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7592678179703989780:2865] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:49:07.905823Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7592678166819086889:2121], cacheItem# { Subscriber: { Subscriber: [7:7592678179703989780:2865] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:07.905884Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7592678179703989788:2867], recipient# [7:7592678179703989767:2311], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-07T17:48:36.231801Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678044850053478:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:36.231884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:36.517782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:36.545971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:36.546069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:36.597245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:36.599160Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:36.707231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:36.861986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:36.862012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:36.862026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:36.862100Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:37.197066Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:37.200856Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:37.200912Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:37.202239Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:24769, port: 24769 2026-01-07T17:48:37.202322Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:37.257538Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:37.305318Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:37.305934Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:48:37.305993Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:37.349513Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:37.397355Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-07T17:48:37.399514Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****oERA (449AE6BC) () has now valid token of ldapuser@ldap 2026-01-07T17:48:37.399759Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:41.232000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678044850053478:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:41.232067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:48:41.400444Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****oERA (449AE6BC) 2026-01-07T17:48:41.400586Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:24769, port: 24769 2026-01-07T17:48:41.400727Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:41.461504Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:41.461959Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:24769 return no entries 2026-01-07T17:48:41.462427Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****oERA (449AE6BC) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:24769 return no entries)' 2026-01-07T17:48:45.402267Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****oERA (449AE6BC) 2026-01-07T17:48:47.777394Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678091849061807:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:47.777477Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:47.788503Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:47.852102Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:47.853646Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678091849061780:2081] 1767808127776418 != 1767808127776421 2026-01-07T17:48:47.897785Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:47.897893Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:47.903874Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:47.930638Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:47.930662Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:47.930669Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:47.930740Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:48.015657Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:48:48.018970Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:48.019002Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:48.019699Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:64765, port: 64765 2026-01-07T17:48:48.019787Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:48.077403Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:48.077929Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldaps://localhost:64765. Server is busy 2026-01-07T17:48:48.078457Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****Q5TA (DEAB7A40) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:64765. Server is busy)' 2026-01-07T17:48:48.078755Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:48.078793Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:48.079616Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:48.079663Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:64765, port: 64765 2026-01-07T17:48:48.079712Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:48:48.133399Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:48:48.133897Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldaps://localhost:64765. Server is busy 2026-01-07T17:48:48.134350Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****Q5TA (DEAB7A40) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:64765. Server is busy)' 2026-01-07T17:48:48.781911Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:49.779310Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Q5TA (DEAB7A40) 2026-01-07T17:48:49.779636Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:48:49.779662Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:48:49.780721Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:64765, port: 64765 2026-01-07T17:48:49.780791Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind ... ovider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:48:59.121995Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****e-7g (3D54C316) () has now valid token of ldapuser@ldap 2026-01-07T17:49:00.991621Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678149202949925:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:00.991690Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:01.004557Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:01.061157Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:01.062657Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678149202949898:2081] 1767808140991054 != 1767808140991057 2026-01-07T17:49:01.107803Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:01.107896Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:01.119408Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:01.141157Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:01.141175Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:01.141183Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:01.141243Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:01.200786Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:01.346041Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:49:01.349807Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:49:01.349828Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:49:01.350318Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:19569, port: 19569 2026-01-07T17:49:01.350396Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:49:01.358697Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:49:01.401401Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-07T17:49:01.445853Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****bmCg (5E09E6FC) () has now valid token of ldapuser@ldap 2026-01-07T17:49:03.900192Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678160467232079:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:03.900248Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:03.911109Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:03.967699Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:04.014253Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:04.014331Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:04.020108Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:04.054541Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:04.054576Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:04.054584Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:04.054660Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:04.114434Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:49:04.118111Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:49:04.118143Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:49:04.118818Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:17107, port: 17107 2026-01-07T17:49:04.118908Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:49:04.127910Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:49:04.169403Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-07T17:49:04.213284Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-07T17:49:04.213784Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-07T17:49:04.213862Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:49:04.257266Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:49:04.301340Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-07T17:49:04.302661Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****v5Dw (675F8447) () has now valid token of ldapuser@ldap 2026-01-07T17:49:04.303786Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:06.909021Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7592678173253018756:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:06.909134Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:06.919704Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:06.975420Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:06.977752Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7592678173253018729:2081] 1767808146908209 != 1767808146908212 2026-01-07T17:49:07.017015Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:07.017082Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:07.018939Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:07.046832Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:07.046854Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:07.046863Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:07.046944Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:07.211958Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:07.264356Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-07T17:49:07.266895Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-07T17:49:07.266915Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-07T17:49:07.267398Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:65078, port: 65078 2026-01-07T17:49:07.267456Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-07T17:49:07.275118Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-07T17:49:07.317410Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-07T17:49:07.317468Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:65078. Bad search filter 2026-01-07T17:49:07.317884Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****Sw0g (B3E71519) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:65078. Bad search filter)' |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable |93.9%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Int8VectorKnnPushdown |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> TCmsTest::ActionIssue [GOOD] >> TCmsTest::ActionIssuePartialPermissions |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2026-01-07T17:48:22.225506Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:48:22.225637Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:48:22.248390Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:48:22.248492Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:48:22.277756Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:48:22.278155Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=4104375366489263175, session=0, seqNo=0) 2026-01-07T17:48:22.278851Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:48:22.291081Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=4104375366489263175, session=1) 2026-01-07T17:48:22.291767Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:135:2159], cookie=12497694304381271486 2026-01-07T17:48:22.292186Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:148:2170], cookie=17360741398960903171) 2026-01-07T17:48:22.292257Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:148:2170], cookie=17360741398960903171) 2026-01-07T17:48:22.708293Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:22.720443Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.079427Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.091063Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.431064Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.443080Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:23.792831Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:23.805059Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.175330Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:24.187393Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.536562Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:24.548603Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:24.887902Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:24.900030Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.250349Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.262325Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:25.612982Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:25.625057Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.027535Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.039657Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.400007Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.411712Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:26.771659Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:26.783693Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.143729Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.155534Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.516609Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.529001Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:27.932021Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:27.944311Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.304153Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.316086Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:28.676375Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:28.688231Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.048996Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.060995Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.422768Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.435101Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:29.827727Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:29.840057Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.201555Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.213685Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.574307Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.586352Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:30.947829Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:30.960175Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.321823Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.333708Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:31.694656Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:31.706817Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:32.079627Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:32.091603Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:32.452421Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:32.464248Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:32.825627Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:32.837916Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.200094Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:33.212330Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.606437Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:33.618505Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:33.994199Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.006148Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:34.368356Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.380738Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:34.741480Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:34.753323Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:35.113931Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:35.126120Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:35.496821Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:35.508895Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:35.880811Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:35.892717Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:36.253955Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:36.265989Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:36.624663Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:36.636807Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:37.000148Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:37.012483Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:37.386382Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:37.399044Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:37.760871Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:37.773058Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:38.144139Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:38.156081Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:38.516462Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:38.528569Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck: ... : tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:58.771196Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.132020Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.144140Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.504991Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.516770Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:48:59.878162Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:48:59.889901Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:00.250822Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:00.262797Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:00.646634Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:00.658936Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:01.020674Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:01.032739Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:01.394908Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:01.406844Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:01.768046Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:01.779784Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:02.141043Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:02.152867Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:02.534992Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:02.547016Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:02.918942Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:02.930973Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:03.292463Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:03.304438Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:03.665883Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:03.678035Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:04.038805Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:04.050955Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:04.416871Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:04.429393Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:04.792779Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:04.804948Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:05.155340Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:05.167409Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:05.518328Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:05.530722Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:05.883905Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:05.895643Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:06.247643Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:06.259767Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:06.621970Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:06.634042Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:06.975135Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:06.987150Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:07.327082Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:07.338988Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:07.689349Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:07.701244Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:08.177500Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:08.189674Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:08.551569Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:08.563514Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:08.928477Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:08.941060Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:09.313273Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:09.325859Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:09.700127Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:09.713109Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:10.086901Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:10.099344Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:10.461381Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:10.473443Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:10.846573Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:10.858958Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:11.224772Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:11.237400Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:11.596380Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:49:11.608426Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:49:11.934450Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-07T17:49:11.934525Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-07T17:49:11.946707Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-07T17:49:11.957681Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:644:2570], cookie=16275589287150257361) 2026-01-07T17:49:11.957802Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:644:2570], cookie=16275589287150257361) 2026-01-07T17:49:12.314859Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:49:12.314971Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:49:12.338152Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:49:12.338273Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:49:12.374906Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:49:12.375679Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=12345, session=0, seqNo=0) 2026-01-07T17:49:12.375838Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:49:12.391514Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=12345, session=1) 2026-01-07T17:49:12.392297Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-07T17:49:12.404704Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:144:2166], cookie=23456, session=1) 2026-01-07T17:49:12.725507Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:49:12.725625Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:49:12.743955Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:49:12.744256Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:49:12.769661Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:49:12.770509Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=12345, session=0, seqNo=0) 2026-01-07T17:49:12.770653Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:49:12.785626Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=12345, session=1) 2026-01-07T17:49:12.786326Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-07T17:49:12.799047Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:144:2166], cookie=23456, session=1) |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpKnn::FloatVectorKnnPushdown >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpQueryService::CloseSessionsWithLoad [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::BitVectorKnnPushdown |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> TSentinelTests::PDiskPileGuardFullPile |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssuePartialPermissions [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> TSubDomainTest::ConsistentCopyTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 1578, MsgBus: 4128 2026-01-07T17:48:53.026226Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678117200864491:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:53.026518Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:53.183962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:53.184048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:53.213249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:53.226140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:53.228022Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678117200864384:2081] 1767808133023714 != 1767808133023717 2026-01-07T17:48:53.235631Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:53.289214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:53.289245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:53.289257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:53.289348Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:53.441948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:53.636860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:53.691613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:53.820417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:53.922188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:53.968115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:54.085580Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:55.615609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678125790800843:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:55.615724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:55.615999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678125790800853:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:55.616082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:55.956046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:55.991782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:56.025401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:56.060281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:56.095474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:56.155776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:56.190201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:56.229025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:56.300050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678130085769020:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:56.300132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:56.300350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678130085769025:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:56.300388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678130085769026:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:56.300498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:56.304009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:48:56.314636Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678130085769029:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:48:56.407107Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678130085769080:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:48:57.628998Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=YWU3MTU0N2EtMmJjY2QxNmQtODZlZDBjMDEtNjE0MjdmN2E=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YWU3MTU0N2EtMmJjY2QxNmQtODZlZDBjMDEtNjE0MjdmN2E= temp_dir_name# bb6b9613-4b3b-c601-10c6-1fbb54fb63ac trace_id# 2026-01-07T17:48:57.629080Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=YWU3MTU0N2EtMmJjY2QxNmQtODZlZDBjMDEtNjE0MjdmN2E=, ActorId: [1:7592678134380736669:2530], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-07T17:48:57.641893Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id ... : 0}. Execution is complete results_size# 1 trace_id# 2026-01-07T17:49:11.220412Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-07T17:49:11.220468Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:99} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-01-07T17:49:11.220481Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:133} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-01-07T17:49:11.220491Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:144} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# trace_id# 2026-01-07T17:49:11.220507Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-07T17:49:11.221144Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:293} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-01-07T17:49:11.390952Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592678190215313015:3106]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGhAvUm9vdC9FaWdodFNoYXJkIgAqDAiAgpSEgICAgAEQJzABOAFKMwoERGF0YRADGgVJbnQzMiABMABCAEgAUgBYAGIWCP///////////wEQ////////////AUozCgNLZXkQARoGVWludDY0IAQwAEIASABSAFgAYhYI////////////ARD///////////8BSjUKBFRleHQQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767808151","query_text":"\\n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=165021440 + 0;\\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=165021440 + 1;\\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=165021440 + 2;\\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=165021440 + 3;\\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=165021440 + 4;\\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=165021440 + 5;\\n\\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\\n (-1426160259ul, \\\"New\\\");\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":21,\"Plans\":[{\"PlanNodeId\":20,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{Key: 18446744072283391357,Text: \\\"New\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"},{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (165021440)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (165021441)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":10,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (165021442)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":7,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (165021443)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":4,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (165021444)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (165021445)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_0_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"lookup_by\":[\"Key (165021440)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (165021441)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (165021442)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (165021443)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (165021444)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"},{\"lookup_by\":[\"Key (165021445)\"],\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Lookup\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Operators\":[{\"ReadRange\":[\"Key (165021440)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"ReadRange\":[\"Key (165021441)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Operators\":[{\"ReadRange\":[\"Key (165021442)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"ReadRange\":[\"Key (165021443)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"ReadRange\":[\"Key (165021444)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":19,\"Operators\":[{\"ReadRange\":[\"Key (165021445)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"EightShard\",\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet_0_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"20a4ea46-85760646-87a7246a-9a17444d","version":"1.0"} 2026-01-07T17:49:11.392622Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592678190215313015:3106], duration: 1.181826s 2026-01-07T17:49:11.392643Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592678190215313015:3106], owner: [1:7592678125790800805:2384], status: SUCCESS, issues: , uid: 20a4ea46-85760646-87a7246a-9a17444d 2026-01-07T17:49:11.392744Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592678134380736799:2659], status: SUCCESS, compileActor: [1:7592678190215313015:3106] 2026-01-07T17:49:11.392868Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=165021440 + 0;\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=165021440 + 1;\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=165021440 + 2;\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=165021440 + 3;\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=165021440 + 4;\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=165021440 + 5;\n\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\n (-1426160259ul, \"New\");\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:49:11.393048Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592678134380736799:2659], queryUid: 20a4ea46-85760646-87a7246a-9a17444d, status:SUCCESS still compiling... 0 still active sessions ... 0 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> TPQTest::TestManyConsumers [GOOD] |94.0%| [TA] $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTest::DisableMaintenance |94.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2026-01-07T17:48:53.628150Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678120649136098:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:53.629031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:53.791630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:53.826949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:53.827064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:53.835379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:53.878191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:54.070504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:54.090605Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592678120649136302:2146] Handle TEvNavigate describe path dc-1 2026-01-07T17:48:54.090663Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592678124944104074:2466] HANDLE EvNavigateScheme dc-1 2026-01-07T17:48:54.090760Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592678120649136309:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:48:54.090842Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592678120649136542:2305][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592678120649136309:2149], cookie# 1 2026-01-07T17:48:54.092003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678120649136558:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678120649136555:2305], cookie# 1 2026-01-07T17:48:54.092050Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678120649136559:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678120649136556:2305], cookie# 1 2026-01-07T17:48:54.092068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678120649136560:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678120649136557:2305], cookie# 1 2026-01-07T17:48:54.092092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678120649135950:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678120649136559:2305], cookie# 1 2026-01-07T17:48:54.092100Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678120649135947:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678120649136558:2305], cookie# 1 2026-01-07T17:48:54.092111Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678120649135953:2060] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678120649136560:2305], cookie# 1 2026-01-07T17:48:54.092154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678120649136559:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678120649135950:2057], cookie# 1 2026-01-07T17:48:54.092203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678120649136558:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678120649135947:2054], cookie# 1 2026-01-07T17:48:54.092220Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678120649136560:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678120649135953:2060], cookie# 1 2026-01-07T17:48:54.092287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678120649136542:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678120649136556:2305], cookie# 1 2026-01-07T17:48:54.092320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592678120649136542:2305][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:48:54.092335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678120649136542:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678120649136555:2305], cookie# 1 2026-01-07T17:48:54.092356Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592678120649136542:2305][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:48:54.092390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678120649136542:2305][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678120649136557:2305], cookie# 1 2026-01-07T17:48:54.092406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592678120649136542:2305][/dc-1] Sync cookie mismatch: sender# [1:7592678120649136557:2305], cookie# 1, current cookie# 0 2026-01-07T17:48:54.092457Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592678120649136309:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:48:54.098929Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592678120649136309:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592678120649136542:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:48:54.099061Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592678120649136309:2149], cacheItem# { Subscriber: { Subscriber: [1:7592678120649136542:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:48:54.106995Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592678124944104075:2467], recipient# [1:7592678124944104074:2466], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:48:54.107078Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592678124944104074:2466] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:48:54.140448Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592678124944104074:2466] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:48:54.143865Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592678124944104074:2466] Handle TEvDescribeSchemeResult Forward to# [1:7592678124944104073:2465] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:48:54.145376Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592678120649136302:2146] Handle TEvProposeTransaction 2026-01-07T17:48:54.145410Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592678120649136302:2146] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:48:54.145507Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592678120649136302:2146] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7592678124944104081:2472] 2026-01-07T17:48:54.243258Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592678124944104081:2472] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-07T17:48:54.243318Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592678124944104081:2472] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatab ... data/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:14.821881Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7592678207528006451:2247], recipient# [14:7592678207528006450:2318], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:49:14.822000Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:14.941556Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039069:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7592678203233039072:2236] 2026-01-07T17:49:14.941645Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039069:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:14.941683Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039069:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7592678203233039073:2236] 2026-01-07T17:49:14.941707Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039069:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:14.941728Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039069:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7592678203233039074:2236] 2026-01-07T17:49:14.941753Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039069:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:14.942440Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039070:2237][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7592678203233039075:2237] 2026-01-07T17:49:14.942478Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039071:2238][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7592678203233039084:2238] 2026-01-07T17:49:14.942517Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039070:2237][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:14.942524Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039071:2238][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:14.942546Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039070:2237][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7592678203233039076:2237] 2026-01-07T17:49:14.942552Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039071:2238][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7592678203233039085:2238] 2026-01-07T17:49:14.942578Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039070:2237][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:14.942579Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039071:2238][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:14.942600Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039071:2238][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7592678203233039086:2238] 2026-01-07T17:49:14.942606Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7592678203233039070:2237][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7592678203233039077:2237] 2026-01-07T17:49:14.942626Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039071:2238][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:14.942636Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7592678203233039070:2237][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7592678186053169612:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:15.078674Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7592678186053169612:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:49:15.078772Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7592678186053169612:2109], cacheItem# { Subscriber: { Subscriber: [14:7592678203233039070:2237] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:15.078812Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7592678186053169612:2109], cacheItem# { Subscriber: { Subscriber: [14:7592678203233039071:2238] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:15.078890Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7592678211822973757:2248], recipient# [14:7592678203233039065:2312], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-07T17:49:15.079175Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7592678203233039065:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> TBoardSubscriberTest::ManySubscribersManyPublisher >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriber2DCTest::SimpleSubscriber >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriber2DCTest::DropByDisconnect >> TBoardSubscriberTest::NotAvailableByShutdown >> TBoardSubscriber2DCTest::ReconnectReplica >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: 2026-01-07T17:48:12.605359Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:48:12.671817Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:12.675856Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:12.676151Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:12.676203Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:12.676267Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:12.676317Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:12.676361Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:12.676431Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:48:12.693152Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:183:2195], now have 1 active actors on pipe 2026-01-07T17:48:12.693265Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:12.715204Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:48:12.717962Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:48:12.718117Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:12.718921Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:48:12.719050Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:12.719409Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:12.719786Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:48:12.722231Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:12.722294Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-07T17:48:12.722340Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:48:12.722387Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:12.722448Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:12.723801Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:12.725026Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:12.725076Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.725114Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:12.725166Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:12.725232Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-07T17:48:12.725264Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.725300Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:12.725365Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-07T17:48:12.725424Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:48:12.725457Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:12.725483Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-07T17:48:12.725503Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-07T17:48:12.725525Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-07T17:48:12.725557Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-07T17:48:12.725595Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.725843Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:12.725886Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:12.725956Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:12.726154Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:12.726322Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:12.728375Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:12.728480Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:12.728531Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:12.728562Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.728594Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:12.728628Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:12.728681Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:12.728727Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:12.729078Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:197:2203], now have 1 active actors on pipe 2026-01-07T17:48:12.742391Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:200:2205], now have 1 active actors on pipe 2026-01-07T17:48:12.742523Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:48:12.742570Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1-- ... : 72057594037927937] server connected, pipe [28:1001:2990], now have 1 active actors on pipe 2026-01-07T17:49:14.974927Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:14.989899Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.034682Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1004:2993], now have 1 active actors on pipe 2026-01-07T17:49:15.036437Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.048119Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.074067Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1007:2996], now have 1 active actors on pipe 2026-01-07T17:49:15.075852Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.091131Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.113373Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1010:2999], now have 1 active actors on pipe 2026-01-07T17:49:15.115324Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.130365Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.150672Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1013:3002], now have 1 active actors on pipe 2026-01-07T17:49:15.152350Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.167856Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.241041Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1016:3005], now have 1 active actors on pipe 2026-01-07T17:49:15.242745Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.255582Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.292977Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1019:3008], now have 1 active actors on pipe 2026-01-07T17:49:15.295283Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.311637Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.336848Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1022:3011], now have 1 active actors on pipe 2026-01-07T17:49:15.338591Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.352512Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.372562Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1025:3014], now have 1 active actors on pipe 2026-01-07T17:49:15.374345Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.385595Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.411803Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1028:3017], now have 1 active actors on pipe 2026-01-07T17:49:15.413460Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.424515Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.466170Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1031:3020], now have 1 active actors on pipe 2026-01-07T17:49:15.467732Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.479244Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.501134Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1034:3023], now have 1 active actors on pipe 2026-01-07T17:49:15.502747Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.515612Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.536380Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1037:3026], now have 1 active actors on pipe 2026-01-07T17:49:15.538011Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.551060Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-07T17:49:15.569627Z node 28 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [28:1040:3029] connected; active server actors: 1 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TBoardSubscriberTest::ReconnectReplica >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::ReconnectReplica [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TSentinelTests::PDiskPileGuardFullPile [GOOD] >> TSentinelTests::PDiskPileGuardConfig |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2026-01-07T17:48:53.664001Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678119637430072:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:53.664665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:53.852170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:48:53.896971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:53.897096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:53.901549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:53.959810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:54.026501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:48:54.119663Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7592678119637430314:2143] Handle TEvNavigate describe path dc-1 2026-01-07T17:48:54.119703Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7592678123932398079:2457] HANDLE EvNavigateScheme dc-1 2026-01-07T17:48:54.119813Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592678119637430321:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:48:54.119904Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7592678119637430538:2289][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7592678119637430321:2146], cookie# 1 2026-01-07T17:48:54.121535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678119637430547:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678119637430544:2289], cookie# 1 2026-01-07T17:48:54.121575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678119637430548:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678119637430545:2289], cookie# 1 2026-01-07T17:48:54.121628Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678119637429963:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678119637430547:2289], cookie# 1 2026-01-07T17:48:54.121631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7592678119637430549:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678119637430546:2289], cookie# 1 2026-01-07T17:48:54.121663Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678119637429966:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678119637430548:2289], cookie# 1 2026-01-07T17:48:54.121675Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7592678119637429969:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7592678119637430549:2289], cookie# 1 2026-01-07T17:48:54.121712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678119637430547:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678119637429963:2051], cookie# 1 2026-01-07T17:48:54.121728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678119637430548:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678119637429966:2054], cookie# 1 2026-01-07T17:48:54.121777Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7592678119637430549:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678119637429969:2057], cookie# 1 2026-01-07T17:48:54.121812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678119637430538:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678119637430544:2289], cookie# 1 2026-01-07T17:48:54.121841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7592678119637430538:2289][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:48:54.121863Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678119637430538:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678119637430545:2289], cookie# 1 2026-01-07T17:48:54.121884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7592678119637430538:2289][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:48:54.121911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7592678119637430538:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7592678119637430546:2289], cookie# 1 2026-01-07T17:48:54.121924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7592678119637430538:2289][/dc-1] Sync cookie mismatch: sender# [1:7592678119637430546:2289], cookie# 1, current cookie# 0 2026-01-07T17:48:54.122000Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7592678119637430321:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-07T17:48:54.126770Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7592678119637430321:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7592678119637430538:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:48:54.126860Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7592678119637430321:2146], cacheItem# { Subscriber: { Subscriber: [1:7592678119637430538:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-07T17:48:54.129024Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7592678123932398080:2458], recipient# [1:7592678123932398079:2457], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T17:48:54.129084Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7592678123932398079:2457] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-07T17:48:54.163245Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7592678123932398079:2457] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-07T17:48:54.165849Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7592678123932398079:2457] Handle TEvDescribeSchemeResult Forward to# [1:7592678123932398078:2456] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-07T17:48:54.167150Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7592678119637430314:2143] Handle TEvProposeTransaction 2026-01-07T17:48:54.167190Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7592678119637430314:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:48:54.167243Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7592678119637430314:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7592678123932398085:2462] 2026-01-07T17:48:54.244392Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7592678123932398085:2462] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-07T17:48:54.244447Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7592678123932398085:2462] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatab ... _BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][6:7592678218649862840:2773][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7592678205764959731:2053] 2026-01-07T17:49:16.382293Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][6:7592678218649862821:2771][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [6:7592678205764960057:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:16.382306Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][6:7592678218649862839:2773][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7592678205764959728:2050] 2026-01-07T17:49:16.382312Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7592678218649862821:2771][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [6:7592678218649862826:2771] 2026-01-07T17:49:16.382321Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][6:7592678218649862841:2773][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7592678205764959734:2056] 2026-01-07T17:49:16.382331Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7592678218649862821:2771][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [6:7592678205764960057:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:16.382345Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7592678218649862823:2773][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7592678218649862837:2773] 2026-01-07T17:49:16.382364Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7592678218649862823:2773][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7592678218649862836:2773] 2026-01-07T17:49:16.382379Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [6:7592678205764960057:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-07T17:49:16.382380Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][6:7592678218649862823:2773][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [6:7592678205764960057:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:16.382393Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7592678218649862823:2773][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7592678218649862838:2773] 2026-01-07T17:49:16.382407Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7592678218649862823:2773][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [6:7592678205764960057:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:49:16.382425Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959728:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862833:2772] 2026-01-07T17:49:16.382425Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [6:7592678205764960057:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7592678218649862822:2772] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:49:16.382436Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959728:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862830:2771] 2026-01-07T17:49:16.382448Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959728:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862839:2773] 2026-01-07T17:49:16.382458Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959731:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862834:2772] 2026-01-07T17:49:16.382472Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959731:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862831:2771] 2026-01-07T17:49:16.382483Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959731:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862840:2773] 2026-01-07T17:49:16.382486Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7592678205764960057:2126], cacheItem# { Subscriber: { Subscriber: [6:7592678218649862822:2772] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:16.382493Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959734:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862835:2772] 2026-01-07T17:49:16.382503Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [6:7592678205764960057:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-07T17:49:16.382504Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959734:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862832:2771] 2026-01-07T17:49:16.382512Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7592678205764959734:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7592678218649862841:2773] 2026-01-07T17:49:16.382526Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [6:7592678205764960057:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7592678218649862821:2771] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:49:16.382555Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7592678205764960057:2126], cacheItem# { Subscriber: { Subscriber: [6:7592678218649862821:2771] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:16.382589Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [6:7592678205764960057:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-07T17:49:16.382632Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [6:7592678205764960057:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7592678218649862823:2773] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-07T17:49:16.382662Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7592678218649862842:2774], recipient# [6:7592678218649862816:2305], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-07T17:49:16.382664Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7592678205764960057:2126], cacheItem# { Subscriber: { Subscriber: [6:7592678218649862823:2773] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-07T17:49:16.382716Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7592678218649862843:2775], recipient# [6:7592678218649862818:2307], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |94.1%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions |94.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::DisableMaintenance [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession >> RemoteTopicReader::ReadTopic |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::DisableMaintenance [GOOD] Test command err: 2026-01-07T17:48:57.512242Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } DisableMaintenance: false } } 2026-01-07T17:48:57.512688Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2026-01-07T17:48:57.541494Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-07T17:48:57.541676Z node 2 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-07T17:48:57.543683Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 5 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-6-6" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 6 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-7-7" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 7 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-8-8" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 8 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-07T17:48:57.544485Z node 2 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 5 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-6-6" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 6 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-7-7" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 7 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-8-8" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 8 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-07T17:48:57.544698Z node 2 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.085000s 2026-01-07T17:48:57.544749Z node 2 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:48:57.544834Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-07T17:48:57.544884Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-07T17:48:57.544914Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-07T17:48:57.544973Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-07T17:48 ... inel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 2 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-2.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-07T17:49:07.559316Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 3 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-3.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-07T17:49:07.559401Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-07T17:49:07.559486Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-07T17:49:07.559555Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-07T17:49:07.559617Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-07T17:49:07.559676Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-07T17:49:07.559735Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-07T17:49:07.559791Z node 2 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:49:07.560049Z node 2 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 2:2, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2026-01-07T17:49:07.560128Z node 2 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-07T17:49:07.560361Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-07T17:49:07.560577Z node 2 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-01-07T17:49:07.560620Z node 2 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:2 2026-01-07T17:49:07.573370Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-07T17:49:07.598591Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-07T17:49:07.598677Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-07T17:49:07.598727Z node 2 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:04:00Z 2026-01-07T17:49:07.599347Z node 2 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 2 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:49:07.599419Z node 2 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 2 has not yet been completed" } 2026-01-07T17:49:07.599466Z node 2 :CMS DEBUG: cms.cpp:416: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2026-01-07T17:49:07.599577Z node 2 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-07T17:49:07.599709Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-07T17:49:07.611573Z node 2 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-07T17:49:07.611803Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2026-01-07T17:49:07.612213Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2026-01-07T17:49:07.624099Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2026-01-07T17:49:07.624337Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } DisableMaintenance: false 2026-01-07T17:49:12.580058Z node 2 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:49:12.580153Z node 2 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:49:12.580316Z node 2 :CMS DEBUG: cms.cpp:1178: Running CleanupWalleTasks 2026-01-07T17:49:12.580592Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-07T17:49:12.580675Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-07T17:49:12.580717Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-07T17:49:12.580742Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-07T17:49:12.580765Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-07T17:49:12.580792Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-07T17:49:12.580817Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-07T17:49:12.580841Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-07T17:49:12.581135Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 2 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-2.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-07T17:49:12.581895Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-07T17:49:12.581991Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-07T17:49:12.582051Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-07T17:49:12.582113Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-07T17:49:12.582182Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-07T17:49:12.582244Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-07T17:49:12.582294Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 3 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-3.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-07T17:49:12.582356Z node 2 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:49:12.582688Z node 2 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 2:2, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-07T17:49:12.582788Z node 2 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-07T17:49:12.583161Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-07T17:49:12.583454Z node 2 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2026-01-07T17:49:12.583511Z node 2 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:2 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSentinelTests::PDiskPileGuardConfig [GOOD] >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardConfig [GOOD] Test command err: 2026-01-07T17:49:15.783586Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-07T17:49:15.783665Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-07T17:49:15.783722Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:49:15.783749Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-07T17:49:15.783791Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-07T17:49:15.783874Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-07T17:49:15.784675Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-07T17:49:15.789845Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2026-01-07T17:49:19.783156Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2026-01-07T17:49:19.783249Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2026-01-07T17:49:19.783287Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:49:19.793774Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T17:49:19.793823Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T17:49:19.793903Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-07T17:49:19.793944Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-07T17:49:19.793963Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-07T17:49:19.793981Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-07T17:49:19.793996Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-07T17:49:19.794012Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-07T17:49:19.794032Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-07T17:49:19.794048Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-07T17:49:19.794281Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-07T17:49:19.794706Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-07T17:49:19.794882Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-07T17:49:19.795002Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-07T17:49:19.795090Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-07T17:49:19.795209Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-07T17:49:19.795290Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-07T17:49:19.795367Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-07T17:49:19.795407Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T17:49:19.795664Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:61, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:49:19.795697Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:63, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:49:19.795726Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:60, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:49:19.795752Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:62, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T17:49:19.795773Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 4 2026-01-07T17:49:19.795937Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2026-01-07T17:49:19.795960Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:60 2026-01-07T17:49:19.795979Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:61 2026-01-07T17:49:19.796000Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:62 2026-01-07T17:49:19.796019Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:63 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream |94.2%| [TA] $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2026-01-07T17:48:12.831445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:12.831509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:12.831978Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:48:12.843934Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:48:12.844296Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-07T17:48:12.844492Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:48:12.852814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:48:12.891687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:48:12.892379Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:48:12.894087Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T17:48:12.894162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T17:48:12.894232Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T17:48:12.894626Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:48:12.894810Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:48:12.894875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:205:2157] in generation 2 2026-01-07T17:48:12.963948Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:12.999151Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T17:48:12.999351Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:12.999452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-07T17:48:12.999504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T17:48:12.999544Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T17:48:12.999580Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:12.999738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:12.999809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:13.000118Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T17:48:13.000232Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T17:48:13.000373Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:13.000417Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:13.000462Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T17:48:13.000497Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T17:48:13.000544Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T17:48:13.000591Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T17:48:13.000637Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T17:48:13.000728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:13.000770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:13.000831Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-07T17:48:13.007331Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T17:48:13.007416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:48:13.007528Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T17:48:13.007679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T17:48:13.007753Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T17:48:13.007807Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T17:48:13.007861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T17:48:13.007897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T17:48:13.007932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T17:48:13.007969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:13.008251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T17:48:13.008296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T17:48:13.008337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T17:48:13.008369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:13.008439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T17:48:13.008470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T17:48:13.008502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T17:48:13.008534Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:13.008566Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T17:48:13.020734Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T17:48:13.020818Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T17:48:13.020852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T17:48:13.020905Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T17:48:13.020984Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T17:48:13.021462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:13.021517Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:48:13.021579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-07T17:48:13.021705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T17:48:13.021761Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T17:48:13.021885Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T17:48:13.021928Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T17:48:13.021966Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T17:48:13.022002Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T17:48:13.029433Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T17:48:13.029524Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T17:48:13.029770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:13.029813Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:48:13.029877Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T17:48:13.029921Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:48:13.029955Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T17:48:13.029996Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T17:48:13.030040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [32:347:2315]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-07T17:49:22.381897Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.381925Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-07T17:49:22.382001Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-07T17:49:22.382031Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.382060Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-07T17:49:22.382141Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-07T17:49:22.382171Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.382199Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-07T17:49:22.382276Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-07T17:49:22.382305Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.382331Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-07T17:49:22.382403Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-07T17:49:22.382432Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.382459Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-07T17:49:22.382537Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-07T17:49:22.382566Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.382592Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-07T17:49:22.382663Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-07T17:49:22.382693Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.382718Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-07T17:49:22.382789Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-07T17:49:22.382817Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.382845Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-07T17:49:22.382939Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-07T17:49:22.382968Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.382997Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-07T17:49:22.383070Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-07T17:49:22.383099Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.383126Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-07T17:49:22.383182Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-07T17:49:22.383211Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.383238Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-07T17:49:22.383313Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-07T17:49:22.383342Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.383369Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-07T17:49:22.383458Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-07T17:49:22.383488Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.383515Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-07T17:49:22.383567Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-07T17:49:22.383594Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.383620Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-07T17:49:22.383693Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-07T17:49:22.383721Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.383747Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-07T17:49:22.383818Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-07T17:49:22.383846Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.383871Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-07T17:49:22.383944Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-07T17:49:22.383974Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.384001Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-07T17:49:22.384070Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-07T17:49:22.384100Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.384127Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-07T17:49:22.384204Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-07T17:49:22.384232Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.384258Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-07T17:49:22.384387Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-07T17:49:22.384419Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T17:49:22.384459Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 26 31 27 23 29 28 31 31 30 30 7 31 31 31 31 30 26 31 26 26 22 23 31 22 22 23 20 19 - - - - actual 26 31 27 23 29 28 31 31 30 30 7 31 31 31 31 30 26 31 26 26 22 23 31 22 22 23 20 19 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |94.2%| [TA] {RESULT} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete >> StatisticsSaveLoad::ForbidAccess >> StatisticsSaveLoad::Simple |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> KqpScan::ScanPg [GOOD] >> AssignTxId::Basic [GOOD] >> KqpKnn::Int8VectorKnnPushdown [GOOD] >> KqpKnn::Uint8VectorKnnPushdown [GOOD] >> KqpKnn::BitVectorKnnPushdown [GOOD] >> KqpKnn::FloatVectorKnnPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] Test command err: 2026-01-07T17:49:21.308126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678240698221767:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:21.308166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:21.506341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:21.592148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:21.592284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:21.644373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:21.703875Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:21.708813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:22.032625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:22.032646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:22.032687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:22.032775Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:22.315194Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:22.445795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:22.450500Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7592678244993189863:2492] Handshake: worker# [1:7592678244993189861:2490] 2026-01-07T17:49:22.450677Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7592678244993189863:2492] Create read session: session# [1:7592678244993189864:2493] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2026-01-07T17:49:21.051883Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678239806180860:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:21.051940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:21.335753Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:21.368556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:21.368686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:21.426697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:21.436806Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:21.533060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:21.846416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:21.846490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:21.846495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:21.846553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:22.057457Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:22.250426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:23.489573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678248396116293:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.489678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.489898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678248396116302:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.489923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:24.076632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-01-07T17:49:24.092982Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:41: [controller 72075186224037888] OnActivateExecutor 2026-01-07T17:49:24.093048Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:17: [controller 72075186224037888][TxInitSchema] Execute 2026-01-07T17:49:24.095878Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:26: [controller 72075186224037888][TxInitSchema] Complete 2026-01-07T17:49:24.095957Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:245: [controller 72075186224037888][TxInit] Execute 2026-01-07T17:49:24.096259Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:250: [controller 72075186224037888][TxInit] Complete 2026-01-07T17:49:24.096279Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:128: [controller 72075186224037888] SwitchToWork 2026-01-07T17:49:24.103403Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:171: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976715658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:12366" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2026-01-07T17:49:24.103693Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:22: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976715658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:12366" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2026-01-07T17:49:24.103803Z node 1 :REPLICATION_CONTROLLER NOTICE: tx_create_replication.cpp:43: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T17:49:24.104333Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:58: [controller 72075186224037888][TxCreateReplication] Complete 2026-01-07T17:49:24.104411Z node 1 :REPLICATION_CONTROLLER INFO: tx_create_replication.cpp:68: [controller 72075186224037888][TxCreateReplication] Discover tenant nodes: tenant# /Root 2026-01-07T17:49:24.106586Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:335: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2026-01-07T17:49:24.106641Z node 1 :REPLICATION_CONTROLLER DEBUG: controller.cpp:359: [controller 72075186224037888] Create session: nodeId# 1 2026-01-07T17:49:24.131892Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2026-01-07T17:49:24.131933Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 2026-01-07T17:49:24.132056Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2026-01-07T17:49:24.132130Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2026-01-07T17:49:24.132195Z node 1 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2026-01-07T17:49:24.132522Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete 2026-01-07T17:49:24.159486Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2026-01-07T17:49:24.159570Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2026-01-07T17:49:24.159615Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2026-01-07T17:49:24.159709Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:49:24.159736Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2026-01-07T17:49:24.160079Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-07T17:49:24.160327Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2026-01-07T17:49:24.160353Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-07T17:49:24.160386Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-07T17:49:24.160560Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2026-01-07T17:49:24.160576Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-07T17:49:24.160614Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-07T17:49:24.160795Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2026-01-07T17:49:24.160836Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-07T17:49:24.161126Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2026-01-07T17:49:24.161323Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2026-01-07T17:49:24.161348Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2026-01-07T17:49:24.161383Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2026-01-07T17:49:24.161557Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2026-01-07T17:49:24.161585Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2026-01-07T17:49:24.161793Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2026-01-07T17:49:24.161870Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-07T17:49:24.161891Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2026-01-07T17:49:24.161951Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2026-01-07T17:49:24.162004Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2026-01-07T17:49:24.162045Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2026-01-07T17:49:24.162265Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Int8VectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 9664, MsgBus: 4554 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:14.705310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:14.827823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:14.863201Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:14.863691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:14.863751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:15.261896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:15.262028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:15.348311Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808152291645 != 1767808152291649 2026-01-07T17:49:15.455362Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:15.522076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:15.748327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:16.491337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:16.491476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:16.491525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:16.492087Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:16.503872Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:16.786571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:16.845086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.151639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.514404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.757360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.586639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.586797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.587661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.587862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.706408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.910410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.167892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.394064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.691302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.935815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.193142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.472599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.824559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.824702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.825158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.825233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.825380Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.831428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:21.009201Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:21.068246Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:22.395169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::FloatVectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 30820, MsgBus: 19195 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:15.821525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:15.899378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:15.915569Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:15.916002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:15.916061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:16.174697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:16.174807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:16.239712Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808153819332 != 1767808153819336 2026-01-07T17:49:16.249020Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:16.292706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:16.414613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:16.701193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:16.701275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:16.701317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:16.701828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:16.713460Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:16.976876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:17.071904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.305722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.601676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.845789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.587167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1902:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.587334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.588247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.588354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.705544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.908731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.134768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.395207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.623624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.864395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.103816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.361105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.697382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.697505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.697929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.698019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.698110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.705849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:20.866193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:20.936003Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:22.395020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:48:56.053922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:48:56.197208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:48:56.223023Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:48:56.229585Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:373:2293], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:48:56.230252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:48:56.230524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:48:56.232717Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:56.234376Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:692:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:48:56.234546Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:48:56.234735Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:48:56.793306Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:56.911142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:56.911258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:56.911829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:56.911893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:56.956860Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:48:56.957728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:56.958821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:57.059318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:57.138799Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:48:57.699607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:57.764297Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:57.766134Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:58.093526Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:225:2183] Handle TEvProposeTransaction 2026-01-07T17:48:58.093612Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:225:2183] TxId# 281474976710657 ProcessProposeTransaction 2026-01-07T17:48:58.094511Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:225:2183] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1573:3027] 2026-01-07T17:48:58.211823Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:1573:3027] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:48:58.211933Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:1573:3027] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:48:58.213765Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:1573:3027] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:48:58.213871Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:1573:3027] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:48:58.214356Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:1573:3027] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:48:58.214545Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:1573:3027] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:48:58.214730Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1573:3027] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-07T17:48:58.222970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:58.224063Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:1573:3027] txid# 281474976710657 HANDLE EvClientConnected 2026-01-07T17:48:58.226807Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:1573:3027] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-07T17:48:58.226887Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:1573:3027] txid# 281474976710657 SEND to# [1:1498:2982] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-07T17:48:58.298568Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1624:2399] 2026-01-07T17:48:58.298883Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:48:58.354397Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:48:58.354702Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:48:58.357351Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:48:58.357455Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:48:58.357538Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:48:58.359499Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:48:58.360770Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:48:58.360857Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1648:2399] in generation 1 2026-01-07T17:48:58.376360Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:48:58.418590Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:48:58.419930Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:48:58.420076Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1651:2416] 2026-01-07T17:48:58.420116Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:48:58.420161Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:48:58.420203Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:48:58.421639Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:48:58.421745Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:48:58.421818Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:48:58.421854Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:48:58.421890Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:48:58.421931Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:48:58.422058Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1607:3057], serverId# [2:1621:2397], sessionId# [0:0:0] 2026-01-07T17:48:58.422694Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:48:58.423135Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:48:58.425479Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-07T17:48:58.429603Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057 ... d=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-07T17:49:13.975215Z node 4 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-07T17:49:13.975533Z node 4 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:04.009000Z, after 1.052500s 2026-01-07T17:49:13.976041Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:1905:3210] TxId: 281474976715662. Ctx: { TraceId: 01kecs7d1v6caaez1ev3rbz7yw, Database: , SessionId: ydb://session/3?node_id=3&id=MTFkMzQwYWQtZDNlYzk3NzktYWZkYTgwZDYtODU2MzUyMGQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:1914:2440] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 247738 DurationUs: 9000 Tasks { TaskId: 1 CpuTimeUs: 245111 FinishTimeMs: 1767808153972 OutputRows: 1 OutputBytes: 7 Tables { TablePath: "/Root/table-1" ReadRows: 100 ReadBytes: 800 } ResultRows: 1 ResultBytes: 7 ComputeCpuTimeUs: 189 BuildCpuTimeUs: 244922 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-z2c6jslqle" NodeId: 4 StartTimeMs: 1767808153963 CreateTimeMs: 1767808153690 UpdateTimeMs: 1767808153972 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:49:13.976109Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kecs7d1v6caaez1ev3rbz7yw, Database: , SessionId: ydb://session/3?node_id=3&id=MTFkMzQwYWQtZDNlYzk3NzktYWZkYTgwZDYtODU2MzUyMGQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:1914:2440] 2026-01-07T17:49:13.976187Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [3:1905:3210] TxId: 281474976715662. Ctx: { TraceId: 01kecs7d1v6caaez1ev3rbz7yw, Database: , SessionId: ydb://session/3?node_id=3&id=MTFkMzQwYWQtZDNlYzk3NzktYWZkYTgwZDYtODU2MzUyMGQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:49:13.976257Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1509} ActorId: [3:1905:3210] TxId: 281474976715662. Ctx: { TraceId: 01kecs7d1v6caaez1ev3rbz7yw, Database: , SessionId: ydb://session/3?node_id=3&id=MTFkMzQwYWQtZDNlYzk3NzktYWZkYTgwZDYtODU2MzUyMGQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-07T17:49:13.976327Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [3:1905:3210] TxId: 281474976715662. Ctx: { TraceId: 01kecs7d1v6caaez1ev3rbz7yw, Database: , SessionId: ydb://session/3?node_id=3&id=MTFkMzQwYWQtZDNlYzk3NzktYWZkYTgwZDYtODU2MzUyMGQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.638842s ReadRows: 100 ReadBytes: 800 RequestUnits# 425 ForceFlag# true trace_id# 2026-01-07T17:49:13.977333Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-01-07T17:49:13.977436Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:225:2183] Handle TEvProposeTransaction 2026-01-07T17:49:13.977472Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:225:2183] TxId# 0 ProcessProposeTransaction 2026-01-07T17:49:13.977581Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [3:225:2183] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1936:3250] SnapshotReq marker# P0 2026-01-07T17:49:13.978572Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [3:1939:3250] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-07T17:49:13.978894Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [3:1939:3250] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-07T17:49:13.978998Z node 3 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [3:1936:3250] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:19.382171Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:19.382843Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:19.392097Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:19.394226Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:19.394408Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:19.394761Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:19.394877Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:19.396144Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:691:2350], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:19.396294Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:19.396363Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:19.619440Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:19.700466Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:19.700575Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:19.701116Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:19.701186Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:19.768764Z node 5 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2026-01-07T17:49:19.769186Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:19.769518Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:19.805941Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:19.860001Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:20.477299Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:20.544774Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:20.544900Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:20.889173Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:21.404038Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1725:3108], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.404137Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1736:3113], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.404207Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.405124Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1741:3118], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.405214Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.409629Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:21.861301Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1739:3116], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:21.950847Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:1833:3182] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:23.587719Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::BitVectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 5276, MsgBus: 6119 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:16.018281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:16.095979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:16.107450Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:16.107798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:16.107857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:16.297558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:16.297657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:16.355221Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808154016150 != 1767808154016154 2026-01-07T17:49:16.364572Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:16.406421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:16.527299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:16.814578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:16.814649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:16.814681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:16.815096Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:16.827601Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:17.062275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:17.101409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.308648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.617019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.854770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.622158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.622321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.623014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.623115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.705283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.910866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.165261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.390064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.678283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.915931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.183793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.475317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.838723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.838827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.839099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.839156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.839202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.843358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:20.989921Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:21.050970Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:22.394804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 6654, MsgBus: 1936 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:14.678039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:14.828038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:14.863197Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:14.863877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:14.863951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:15.262115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:15.262247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:15.346694Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808152292582 != 1767808152292586 2026-01-07T17:49:15.455355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:15.522535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:15.748524Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:16.491546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:16.491643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:16.491692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:16.492245Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:16.503892Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:16.781731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:16.896749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.142611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.452709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.707068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.585406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1902:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.585531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.586235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.586309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.705954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.910233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.147215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.416153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.648132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.897295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.145685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.425578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.763781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.763869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.764133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.764215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.764276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.768483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:20.950218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2797:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:21.019681Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:22.435237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.2%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::LostUpdate+IsOlap >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> KqpSnapshotIsolation::ConflictWrite-IsOlap-FillTables >> KqpSinkTx::ExplicitTcl >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> KqpSinkTx::OlapLocksAbortOnCommit >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink >> KqpSinkLocks::DifferentKeyUpdateOlap >> KqpLocks::InvalidateOnCommit >> KqpTx::RollbackManyTx >> KqpSinkMvcc::WriteSkewInsert+IsOlap >> KqpSinkLocks::TInvalidate >> KqpSnapshotIsolation::TSimpleOltp >> KqpSinkTx::SnapshotRO >> KqpSinkMvcc::WriteSkewUpsert+IsOlap >> KqpSinkTx::OlapSnapshotRO >> KqpLocks::Invalidate >> KqpSinkMvcc::TxReadsCommitted-IsOlap >> KqpSinkMvcc::SnapshotExpiration >> KqpTx::RollbackTx >> KqpSinkLocks::OlapVisibleUncommittedRows >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> KqpSinkLocks::VisibleUncommittedRows |94.2%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 26237, MsgBus: 12825 2026-01-07T17:48:42.760614Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678073853971962:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:42.760722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:42.975572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:42.997540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.997695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:43.048201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:43.071282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:43.074358Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678073853971925:2081] 1767808122756325 != 1767808122756328 2026-01-07T17:48:43.162563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.265640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:43.265679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:43.265686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:43.265778Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:43.767815Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:43.837998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:48:43.841565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:48:43.855704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:43.856664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:48:43.859092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808123907, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:48:43.860159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-07T17:48:43.860206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2026-01-07T17:48:43.860441Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592678073853972453:2248] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-01-07T17:48:43.860597Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073853971896:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.860705Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073853971899:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.860832Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678073853972372:2202][/Root] Path was updated to new version: owner# [1:7592678073853972214:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.860849Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678078148939864:2289][/Root] Path was updated to new version: owner# [1:7592678078148939857:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.861075Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678078148939862:2288][/Root] Path was updated to new version: owner# [1:7592678078148939856:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.861135Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592678073853972453:2248] Ack update: ack to# [1:7592678073853972273:2144], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:48:43.861370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2026-01-07T17:48:43.861656Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073853971893:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:45.109556Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:48:45.110548Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002dd3/r3tmp/spilling-tmp-runner/node_1_c32d62ef-a620a755-b25fbda6-8dbd6908, actor: [1:7592678086738874654:2305] 2026-01-07T17:48:45.110714Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002dd3/r3tmp/spilling-tmp-runner 2026-01-07T17:48:45.111372Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086738874661:2492][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592678073853972214:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0107 17:48:45.112616216 394783 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.112762530 394783 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0107 17:48:45.114349178 394783 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.114437388 394783 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.115306Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecs6gyy75bgn19vmyhfa0zc", Request has 18444976265584.436341s seconds to be completed 2026-01-07T17:48:45.116426Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:48:45.116695Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086738874685:2494][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592678073853972214:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.116704Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086738874686:2495][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592678073853972214:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.118736Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecs6gyy75bgn19vmyhfa0zc", Created new session, sessionId: ydb://session/3?node_id=1&id=MmE0ZjBjMTgtZjMwNTM5OTItYzhjNzY4Mi0xN2ViZGQ2OA==, workerId: [1:7592678086738874700:2327], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:48:45.118875Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecs6gyy75bgn19vmyhfa0zc 2026-01-07T17:48:45.118921Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:48:45.118939Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:48:45.118962Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0107 17:48:45.125152375 394778 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.125301049 394778 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.126369Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:48:45.141931Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecs6j6n3xzgqfw32m79tgsp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmE0ZjBjMTgtZjMwNTM5OTItYzhjNzY4Mi0xN2ViZGQ2OA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7592678086738874700:2327] 2026-01-07T17:48:45.142008Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7592678086738874702:2497] 2026-01-07T17:48:45.142792Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086738874704:2498][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7592678073853972214:2120], state# { Deleted: 1 Strong: 1 Version: : Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:49:24.855647Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592678253790124786:2337] TxId: 281474976710662. Ctx: { TraceId: 01kecs7rjpfabj5h8cw5rfh9jp, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzE4ZGMyZGYtODk1ZTkwY2QtOGZiNWZmMTgtNDI2M2VlODQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:49:24.860555Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecs7rjpfabj5h8cw5rfh9jp, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzE4ZGMyZGYtODk1ZTkwY2QtOGZiNWZmMTgtNDI2M2VlODQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678253790124790:2350] 2026-01-07T17:49:24.861608Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecs7rjpfabj5h8cw5rfh9jp, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzE4ZGMyZGYtODk1ZTkwY2QtOGZiNWZmMTgtNDI2M2VlODQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678253790124791:2351] 2026-01-07T17:49:24.862327Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecs7rjpfabj5h8cw5rfh9jp", Forwarded response to sender actor, requestId: 4, sender: [9:7592678253790124687:2336], selfId: [9:7592678236610254810:2266], source: [9:7592678253790124688:2337] 2026-01-07T17:49:24.862816Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=MzE4ZGMyZGYtODk1ZTkwY2QtOGZiNWZmMTgtNDI2M2VlODQ=, workerId: [9:7592678253790124688:2337], local sessions count: 0 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 3385, MsgBus: 21693 2026-01-07T17:48:42.760528Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678070356215694:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:42.760584Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:43.008011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:43.008157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:43.019231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:43.029124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:43.040900Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:43.053080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678070356215657:2081] 1767808122756354 != 1767808122756357 2026-01-07T17:48:43.265634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:43.265667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:43.265683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:43.265780Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:43.291205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.767689Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:43.874252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:48:43.874680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:48:43.876643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:43.877551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:48:43.879747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808123928, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:48:43.880967Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592678070356216189:2249] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-01-07T17:48:43.881042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-07T17:48:43.881088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2026-01-07T17:48:43.881197Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678070356215625:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.881241Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678070356215628:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.881308Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678070356215631:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.881468Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678074651183591:2285][/Root] Path was updated to new version: owner# [1:7592678074651183587:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.881503Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592678070356216189:2249] Ack update: ack to# [1:7592678070356216013:2147], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:48:43.881554Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678070356216096:2200][/Root] Path was updated to new version: owner# [1:7592678070356215946:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.881671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2026-01-07T17:48:43.881687Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678074651183592:2286][/Root] Path was updated to new version: owner# [1:7592678074651183588:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.288728Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:48:45.290292Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002dda/r3tmp/spilling-tmp-runner/node_1_7f75c941-e61bce60-306a645f-b6d6b9b7, actor: [1:7592678083241118386:2305] 2026-01-07T17:48:45.290425Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002dda/r3tmp/spilling-tmp-runner 2026-01-07T17:48:45.291233Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecs6gzn9wcg1sapmy96584a", Request has 18444976265584.260419s seconds to be completed 2026-01-07T17:48:45.294116Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecs6gzn9wcg1sapmy96584a", Created new session, sessionId: ydb://session/3?node_id=1&id=OTE2MzQzMGYtNTE0OWRhYjYtMmNlMWJjNjctYTAyNzI2NTI=, workerId: [1:7592678083241118408:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:48:45.294270Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecs6gzn9wcg1sapmy96584a 2026-01-07T17:48:45.294313Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:48:45.294335Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:48:45.294354Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:48:45.295432Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678083241118410:2493][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592678070356215946:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.295433Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678083241118409:2492][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592678070356215946:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0107 17:48:45.295681570 394769 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.295831759 394769 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.295823Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678083241118423:2494][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592678070356215946:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0107 17:48:45.297479645 394769 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.297569236 394769 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.298404Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:48:45.305751604 394769 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.305860266 394769 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.306712Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:48:45.324186Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecs6jcb0371zf9hepnnw33f, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTE2MzQzMGYtNTE0OWRhYjYtMmNlMWJjNjctYTAyNzI2NTI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7592678083241118408:2326] 2026-01-07T17:48:45.324248Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7592678083241118434:2497] 2026-01-07T17:48:45.325080Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678083241118436:2498][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7592678070356215946:2120], state# { Deleted: 1 Strong: 1 Version: : Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-07T17:49:24.756437Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592678250273509140:2337] TxId: 281474976710662. Ctx: { TraceId: 01kecs7rjxey6wwjpcq5mazmbz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGU5ZjM3ZDktYmFjNjNhNmUtNzA0NTFlYTAtYWFlNzMwNGQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:49:24.761741Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecs7rjxey6wwjpcq5mazmbz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGU5ZjM3ZDktYmFjNjNhNmUtNzA0NTFlYTAtYWFlNzMwNGQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678250273509144:2350] 2026-01-07T17:49:24.762771Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecs7rjxey6wwjpcq5mazmbz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGU5ZjM3ZDktYmFjNjNhNmUtNzA0NTFlYTAtYWFlNzMwNGQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678250273509145:2351] 2026-01-07T17:49:24.763521Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecs7rjxey6wwjpcq5mazmbz", Forwarded response to sender actor, requestId: 4, sender: [9:7592678250273509041:2336], selfId: [9:7592678233093639135:2250], source: [9:7592678250273509042:2337] 2026-01-07T17:49:24.764137Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NGU5ZjM3ZDktYmFjNjNhNmUtNzA0NTFlYTAtYWFlNzMwNGQ=, workerId: [9:7592678250273509042:2337], local sessions count: 0 E0107 17:49:24.946486159 413455 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:49:24.946688561 413455 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 23880, MsgBus: 30485 2026-01-07T17:48:42.762208Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678072414608718:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:42.762414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:42.948159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:42.989590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.989706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.991521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:43.051845Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678072414608681:2081] 1767808122756706 != 1767808122756709 2026-01-07T17:48:43.066967Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:43.222155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.265447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:43.265472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:43.265477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:43.265542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:43.771839Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:43.842336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:48:43.842748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:48:43.844761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:43.845693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:48:43.847983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808123893, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:48:43.849161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-07T17:48:43.849203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-07T17:48:43.849404Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592678076709576518:2258] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-07T17:48:43.849821Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678072414608649:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.849988Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678072414608655:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.850003Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678072414608652:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.850085Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678076709576544:2286][/Root] Path was updated to new version: owner# [1:7592678076709576537:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.850226Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678072414609103:2193][/Root] Path was updated to new version: owner# [1:7592678072414608961:2111], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.850412Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592678076709576518:2258] Ack update: ack to# [1:7592678072414609050:2153], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:48:43.850446Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678076709576543:2285][/Root] Path was updated to new version: owner# [1:7592678076709576536:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.850598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-07T17:48:45.085173Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:48:45.086360Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002cd6/r3tmp/spilling-tmp-runner/node_1_72a7d0a7-f37ae3e1-ef48e5fd-1df158a9, actor: [1:7592678085299511407:2305] 2026-01-07T17:48:45.086549Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002cd6/r3tmp/spilling-tmp-runner 2026-01-07T17:48:45.088266Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecs6gyvdqg2e1trqhzdgmj7", Request has 18444976265584.463399s seconds to be completed 2026-01-07T17:48:45.091124Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678085299511430:2490][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592678072414608961:2111], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.091236Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678085299511428:2489][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592678072414608961:2111], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.091401Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678085299511431:2491][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592678072414608961:2111], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0107 17:48:45.091422203 394768 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.091585134 394768 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.092511Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecs6gyvdqg2e1trqhzdgmj7", Created new session, sessionId: ydb://session/3?node_id=1&id=N2RmNDgzNWMtODAyYmIwMGQtYjY0NDhjZmItNWU1ZGRmYWE=, workerId: [1:7592678085299511452:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:48:45.092782Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecs6gyvdqg2e1trqhzdgmj7 2026-01-07T17:48:45.092847Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:48:45.092893Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:48:45.092980Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0107 17:48:45.093445355 394768 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.093549149 394768 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.094638Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:48:45.105133575 394768 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.105315204 394768 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.106458Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:48:45.120233577 394768 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.120370371 394768 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.121130Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:48:45.128860Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecs6j638tg9ew57v1m4xfqf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2RmNDgzNWMtODAyYmIwMGQtYjY0NDhjZmItNWU1ZGRmYWE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7592678 ... } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } E0107 17:49:25.217756732 413740 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:49:25.217897550 413740 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:49:25.218980Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 E0107 17:49:25.426627768 413740 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:49:25.426780652 413740 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:49:25.427884Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:49:25.447349Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592678254364560666:2340] TxId: 281474976710663. Ctx: { TraceId: 01kecs7s4saefts50b9c4wfays, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NDRiMWFhNGQtZjNjZmUxYWItMzgwMjVlMDMtOWU5NGFkYTE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:49:25.451683Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01kecs7s4saefts50b9c4wfays, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NDRiMWFhNGQtZjNjZmUxYWItMzgwMjVlMDMtOWU5NGFkYTE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678254364560670:2354] 2026-01-07T17:49:25.452603Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01kecs7s4saefts50b9c4wfays, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NDRiMWFhNGQtZjNjZmUxYWItMzgwMjVlMDMtOWU5NGFkYTE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678254364560671:2355] 2026-01-07T17:49:25.453278Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecs7s4saefts50b9c4wfays", Forwarded response to sender actor, requestId: 4, sender: [9:7592678254364560563:2339], selfId: [9:7592678237184690645:2266], source: [9:7592678254364560564:2340] 2026-01-07T17:49:25.453883Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NDRiMWFhNGQtZjNjZmUxYWItMzgwMjVlMDMtOWU5NGFkYTE=, workerId: [9:7592678254364560564:2340], local sessions count: 0 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> KqpTx::ExplicitTcl >> KqpTx::TooManyTx >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp-EnableIndexStreamWrite >> KqpSinkMvcc::TxReadsCommitted+IsOlap >> KqpLocksTricky::TestNoWrite >> RemoteTopicReader::ReadTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 16384, MsgBus: 22683 2026-01-07T17:48:42.760575Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678073649539506:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:42.760646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:42.992228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:42.993391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:42.993502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:42.997469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:43.071890Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:43.072623Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678073649539470:2081] 1767808122756327 != 1767808122756330 2026-01-07T17:48:43.081692Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:48:43.152958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.265223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:43.265285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:43.265290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:43.265388Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:43.768227Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:43.848686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:48:43.849060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:48:43.851629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:43.852545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:48:43.854595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808123900, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:48:43.855742Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592678077944507311:2259] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-07T17:48:43.855822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-07T17:48:43.855871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-07T17:48:43.855912Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073649539438:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.856046Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073649539441:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.856071Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073649539444:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.856235Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678073649539896:2192][/Root] Path was updated to new version: owner# [1:7592678073649539759:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.856234Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678077944507317:2289][/Root] Path was updated to new version: owner# [1:7592678077944507310:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.856395Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592678077944507311:2259] Ack update: ack to# [1:7592678073649539831:2152], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:48:43.856554Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678077944507313:2285][/Root] Path was updated to new version: owner# [1:7592678077944507309:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.856560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-07T17:48:45.143101Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:48:45.144255Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002dc3/r3tmp/spilling-tmp-runner/node_1_c3b6ad4a-7c481394-a8a30d70-a027e700, actor: [1:7592678086534442200:2305] 2026-01-07T17:48:45.144391Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002dc3/r3tmp/spilling-tmp-runner 2026-01-07T17:48:45.145244Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086534442207:2492][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592678073649539759:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.145865Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecs6gyw6s7nj32e5ecdcqsd", Request has 18444976265584.405785s seconds to be completed E0107 17:48:45.148890929 394788 dns_resolver_ares.cc:452] no server name supplied in dns URI 2026-01-07T17:48:45.148890Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecs6gyw6s7nj32e5ecdcqsd", Created new session, sessionId: ydb://session/3?node_id=1&id=N2FlZTRiLTkxYmUwODY0LTkyNDFiMTcwLTRlY2ZkMzkx, workerId: [1:7592678086534442244:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:48:45.148898Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086534442231:2495][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592678073649539759:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.148909Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086534442230:2494][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592678073649539759:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0107 17:48:45.149059102 394788 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.149078Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecs6gyw6s7nj32e5ecdcqsd 2026-01-07T17:48:45.149137Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:48:45.149174Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:48:45.149210Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0107 17:48:45.150678541 394788 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.150764372 394788 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.151669Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:48:45.160713281 394788 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.160830493 394788 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.161709Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:48:45.164914Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kecs6j7c1nrb7qb5hgcfpyng, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2FlZTRiLTkxYmUwODY0LTkyNDFiMTcwLTRlY2ZkMzkx, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7592678086534442244:2326] 2026-01-07T17:48:45.164968Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7592678086534442248:2497] 2026-01-07T17:48:45.165833Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086534442250:2498][/Root/.metadata/workload_manager/po ... "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 E0107 17:49:25.704456171 414027 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:49:25.704667218 414027 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:49:25.706170Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:49:25.757870Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592678258554323515:2337] TxId: 281474976710662. Ctx: { TraceId: 01kecs7sdcc67kh7z7zf8mt2nm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MTI1OTNjNDYtYWQwYzYwZWQtY2M0OWJmMzgtZmVmYmU4Yjk=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:49:25.763591Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecs7sdcc67kh7z7zf8mt2nm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MTI1OTNjNDYtYWQwYzYwZWQtY2M0OWJmMzgtZmVmYmU4Yjk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678258554323519:2351] 2026-01-07T17:49:25.764711Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecs7sdcc67kh7z7zf8mt2nm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MTI1OTNjNDYtYWQwYzYwZWQtY2M0OWJmMzgtZmVmYmU4Yjk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678258554323520:2352] 2026-01-07T17:49:25.765505Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecs7sdcc67kh7z7zf8mt2nm", Forwarded response to sender actor, requestId: 4, sender: [9:7592678258554323414:2336], selfId: [9:7592678241374453540:2266], source: [9:7592678258554323415:2337] 2026-01-07T17:49:25.766131Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=MTI1OTNjNDYtYWQwYzYwZWQtY2M0OWJmMzgtZmVmYmU4Yjk=, workerId: [9:7592678258554323415:2337], local sessions count: 0 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 4178, MsgBus: 7563 2026-01-07T17:48:42.760594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678071937345991:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:42.760674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:43.020411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:43.021254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:43.021367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:43.044477Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678071937345954:2081] 1767808122756292 != 1767808122756295 2026-01-07T17:48:43.060591Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:43.067120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:43.265511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:43.265578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:43.265592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:43.265654Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:43.316513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.767733Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:43.858811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:48:43.859206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:48:43.860526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:43.861348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:48:43.863305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808123907, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:48:43.864482Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592678071937346486:2250] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-07T17:48:43.864497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-07T17:48:43.864532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-07T17:48:43.864797Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678071937345922:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.864802Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678071937345925:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.864899Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678071937345928:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.865071Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678076232314006:2289][/Root] Path was updated to new version: owner# [1:7592678076232313994:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.865084Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678071937346420:2209][/Root] Path was updated to new version: owner# [1:7592678071937346228:2109], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.865115Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592678071937346486:2250] Ack update: ack to# [1:7592678071937346312:2151], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:48:43.865311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-07T17:48:43.865331Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678076232313999:2288][/Root] Path was updated to new version: owner# [1:7592678076232313993:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.053057Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:48:45.053904Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002dd5/r3tmp/spilling-tmp-runner/node_1_fd761317-41b645b0-69f254d5-99668103, actor: [1:7592678084822248681:2305] 2026-01-07T17:48:45.054137Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002dd5/r3tmp/spilling-tmp-runner E0107 17:48:45.056703844 394770 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.056859477 394770 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.058358Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecs6gz70czds007r9pjfnbf", Request has 18444976265584.493303s seconds to be completed E0107 17:48:45.058711758 394770 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.058815408 394770 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.059092Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678084822248704:2490][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592678071937346228:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.062129Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678084822248713:2493][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592678071937346228:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.062221Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678084822248712:2492][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592678071937346228:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.064259Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:48:45.070229731 394770 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.070358169 394770 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.071423Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:48:45.084415Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecs6gz70czds007r9pjfnbf", Created new session, sessionId: ydb://session/3?node_id=1&id=YWU5NmMyYzctMmIzYjgwODQtOTFiMzZhOC03NDMyMGNmMQ==, workerId: [1:7592678084822248727:2327], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:48:45.084651Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecs6gz70czds007r9pjfnbf 2026-01-07T17:48:45.084793Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:48:45.084826Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:48:45.084850Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0107 17:48:45.087469559 394765 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.087553737 394765 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.088281Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:48:45.110565292 394765 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.110688036 394765 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.111481Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): ... atusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } E0107 17:49:26.274719439 414298 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:49:26.274911191 414298 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:49:26.277210Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 E0107 17:49:26.518084230 414300 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:49:26.518273395 414300 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:49:26.520219Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ReadSplits. 2026-01-07T17:49:26.525553Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592678261022010734:2337] TxId: 281474976715662. Ctx: { TraceId: 01kecs7t6h52rh7jf23kzex8ef, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2VlMGI2YzQtMmJlYmE3OTUtYjk4MTQ5MjYtNTAxOWExNDM=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:49:26.531146Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kecs7t6h52rh7jf23kzex8ef, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2VlMGI2YzQtMmJlYmE3OTUtYjk4MTQ5MjYtNTAxOWExNDM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678261022010738:2351] 2026-01-07T17:49:26.532102Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kecs7t6h52rh7jf23kzex8ef, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2VlMGI2YzQtMmJlYmE3OTUtYjk4MTQ5MjYtNTAxOWExNDM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678261022010739:2352] 2026-01-07T17:49:26.532749Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecs7t6h52rh7jf23kzex8ef", Forwarded response to sender actor, requestId: 4, sender: [9:7592678261022010633:2336], selfId: [9:7592678243842140754:2266], source: [9:7592678261022010634:2337] 2026-01-07T17:49:26.533428Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=Y2VlMGI2YzQtMmJlYmE3OTUtYjk4MTQ5MjYtNTAxOWExNDM=, workerId: [9:7592678261022010634:2337], local sessions count: 0 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 5872, MsgBus: 11371 2026-01-07T17:48:42.760552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678073187927092:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:42.760633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:43.040138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:43.040247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:43.065567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:43.094967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:43.097606Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678073187927055:2081] 1767808122756303 != 1767808122756306 2026-01-07T17:48:43.110229Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:43.265474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:48:43.265506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:48:43.265512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:48:43.265606Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:43.374991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:43.767036Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:43.842535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:48:43.842943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:48:43.844902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:43.846170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:48:43.848567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808123893, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:48:43.849862Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7592678073187927575:2242] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-01-07T17:48:43.849922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-07T17:48:43.849997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2026-01-07T17:48:43.850174Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073187927023:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.850358Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073187927026:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.850380Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7592678073187927029:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-07T17:48:43.850481Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678077482895032:2288][/Root] Path was updated to new version: owner# [1:7592678077482895021:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.850516Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7592678073187927575:2242] Ack update: ack to# [1:7592678073187927402:2144], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-07T17:48:43.850767Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678073187927486:2198][/Root] Path was updated to new version: owner# [1:7592678073187927342:2118], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:43.850786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2026-01-07T17:48:43.850810Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7592678077482895037:2289][/Root] Path was updated to new version: owner# [1:7592678077482895022:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:44.991828Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-07T17:48:44.993767Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678081777862485:2489][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7592678073187927342:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:44.994207Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ec9d/002d91/r3tmp/spilling-tmp-runner/node_1_eb2b037d-b55e8388-20f18fff-74eb2ce2, actor: [1:7592678081777862493:2307] 2026-01-07T17:48:44.994498Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ec9d/002d91/r3tmp/spilling-tmp-runner E0107 17:48:45.011664550 394797 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.011831242 394797 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.027071Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kecs6gysamqg16s1ydee4h3y", Request has 18444976265584.524596s seconds to be completed E0107 17:48:45.055761977 394797 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.055873208 394797 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.057728Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086072829810:2491][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7592678073187927342:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.057727Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7592678086072829811:2492][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7592678073187927342:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:48:45.064298Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:48:45.074623683 394797 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.074752280 394797 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.075866Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-07T17:48:45.084328Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kecs6gysamqg16s1ydee4h3y", Created new session, sessionId: ydb://session/3?node_id=1&id=MjI3ZDZhNzQtYmYyNDQxNWYtNTE0NDM4YTktNmQyOTdjYjk=, workerId: [1:7592678086072829825:2327], database: /Root, longSession: 1, local sessions count: 1 2026-01-07T17:48:45.084555Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kecs6gysamqg16s1ydee4h3y 2026-01-07T17:48:45.084636Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T17:48:45.084669Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T17:48:45.084695Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0107 17:48:45.086779786 394790 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.086887891 394790 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.087531Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0107 17:48:45.114987766 394790 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:48:45.115073658 394790 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:48:45.115977Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2) ... { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 E0107 17:49:26.539659332 414280 dns_resolver_ares.cc:452] no server name supplied in dns URI E0107 17:49:26.539817146 414280 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-07T17:49:26.541239Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-07T17:49:26.661834Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7592678260971391938:2337] TxId: 281474976710662. Ctx: { TraceId: 01kecs7tanbb7k93mbjahxaz7z, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Njg5ZWEzNmMtZjM2YmNiNTUtOTYyMWZlMGYtOWEwZmVjZmY=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# 2026-01-07T17:49:26.667875Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecs7tanbb7k93mbjahxaz7z, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Njg5ZWEzNmMtZjM2YmNiNTUtOTYyMWZlMGYtOWEwZmVjZmY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678260971391942:2351] 2026-01-07T17:49:26.668014Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kecs7tanbb7k93mbjahxaz7z, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Njg5ZWEzNmMtZjM2YmNiNTUtOTYyMWZlMGYtOWEwZmVjZmY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7592678260971391943:2352] 2026-01-07T17:49:26.669771Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kecs7tanbb7k93mbjahxaz7z", Forwarded response to sender actor, requestId: 4, sender: [9:7592678260971391837:2336], selfId: [9:7592678243791521959:2266], source: [9:7592678260971391838:2337] 2026-01-07T17:49:26.670442Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=Njg5ZWEzNmMtZjM2YmNiNTUtOTYyMWZlMGYtOWEwZmVjZmY=, workerId: [9:7592678260971391838:2337], local sessions count: 0 >> KqpSinkLocks::InvalidateOnCommit |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpSnapshotIsolation::TSnapshotTwoInsertOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2026-01-07T17:49:21.308425Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678239212634661:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:21.308481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:21.572190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:21.611265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:21.611403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:21.648832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:21.681183Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:21.753065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:22.032344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:22.032364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:22.032421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:22.032484Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:22.315453Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:22.441452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:22.655671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:23.463554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678247802570345:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.463556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678247802570330:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.463622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678247802570346:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.463670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.464002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678247802570351:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.464087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:23.469129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:23.509004Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678247802570354:2634] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:49:23.512204Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678247802570350:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:49:23.512219Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678247802570349:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:49:23.583260Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678247802570400:2664] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:23.603202Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678247802570418:2672] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:24.627250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:49:24.913820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:25.217082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:49:25.475673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:49:25.842153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:49:26.308607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678239212634661:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.308672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:26.613714Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7592678260687472900:2945] Handshake: worker# [1:7592678243507602757:2492] 2026-01-07T17:49:26.619122Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7592678260687472900:2945] Create read session: session# [1:7592678260687472901:2491] 2026-01-07T17:49:26.625400Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7592678260687472900:2945] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:49:26.692059Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7592678260687472900:2945] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_17110307651215378405_v1 } } 2026-01-07T17:49:26.725388Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7592678260687472900:2945] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2026-01-07T17:49:26.505000Z WriteTime: 2026-01-07T17:49:26.511000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-07T17:49:26.729146Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7592678260687472900:2945] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:49:27.625249Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7592678260687472900:2945] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2026-01-07T17:49:27.516000Z WriteTime: 2026-01-07T17:49:27.528000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-07T17:49:27.625753Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7592678264982440332:2997] Handshake: worker# [1:7592678243507602757:2492] 2026-01-07T17:49:27.630709Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7592678264982440332:2997] Create read session: session# [1:7592678264982440333:2491] 2026-01-07T17:49:27.631038Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7592678264982440332:2997] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-07T17:49:27.654259Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7592678264982440332:2997] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_2_3355742356876196555_v1 } } 2026-01-07T17:49:27.657082Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7592678264982440332:2997] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2026-01-07T17:49:27.516000Z WriteTime: 2026-01-07T17:49:27.528000Z MessageGroupId: producer ProducerId: producer }] } } |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink |94.2%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::CommitRequired ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] Test command err: Trying to start YDB, gRPC: 18993, MsgBus: 25686 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:16.482428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:16.564712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:16.579229Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:16.579720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:16.579770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:16.799493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:16.799603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:16.855760Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808154460461 != 1767808154460465 2026-01-07T17:49:16.866343Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:16.911927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:17.033336Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:17.333299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:17.333382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:17.333428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:17.334084Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:17.346237Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:17.556406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:17.608061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.815944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.130465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.376511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.965934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.966088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.966744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.966847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.988673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.178508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.424706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.646767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.937259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.170943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.430863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.712584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:21.078588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.078722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.079166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.079251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.079323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:21.084984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:21.238862Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:21.300484Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:22.516468Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037893 2026-01-07T17:49:22.516596Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037894 2026-01-07T17:49:22.516662Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186 ... Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:18] at 72075186224037927 on unit CompletedOperations 2026-01-07T17:49:28.346072Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:18] at 72075186224037927 is Executed 2026-01-07T17:49:28.346099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit CompletedOperations 2026-01-07T17:49:28.346125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:18] at 72075186224037927 has finished 2026-01-07T17:49:28.346159Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037927 2026-01-07T17:49:28.346255Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037927 2026-01-07T17:49:28.346503Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3762:4925], Recipient [1:3179:4477]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-07T17:49:28.346682Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3762:4925], Recipient [1:3181:4479]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-07T17:49:28.346890Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037928, FollowerId 0 2026-01-07T17:49:28.346950Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CheckRead 2026-01-07T17:49:28.347033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-07T17:49:28.347064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CheckRead 2026-01-07T17:49:28.347092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit BuildAndWaitDependencies 2026-01-07T17:49:28.347122Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit BuildAndWaitDependencies 2026-01-07T17:49:28.347173Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037928 2026-01-07T17:49:28.347209Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-07T17:49:28.347235Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit BuildAndWaitDependencies 2026-01-07T17:49:28.347260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit ExecuteRead 2026-01-07T17:49:28.347285Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit ExecuteRead 2026-01-07T17:49:28.347405Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037928 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-07T17:49:28.347556Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037928 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-07T17:49:28.347592Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037928 Complete read# {[1:3762:4925], 1} after executionsCount# 1 2026-01-07T17:49:28.347629Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037928 read iterator# {[1:3762:4925], 1} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:49:28.347749Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037928 read iterator# {[1:3762:4925], 1} finished in read 2026-01-07T17:49:28.347807Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-07T17:49:28.347835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit ExecuteRead 2026-01-07T17:49:28.347861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit CompletedOperations 2026-01-07T17:49:28.347888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CompletedOperations 2026-01-07T17:49:28.347929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-07T17:49:28.347952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CompletedOperations 2026-01-07T17:49:28.347977Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037928 has finished 2026-01-07T17:49:28.348005Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037928 2026-01-07T17:49:28.348077Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037928 2026-01-07T17:49:28.348142Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037929, FollowerId 0 2026-01-07T17:49:28.348185Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CheckRead 2026-01-07T17:49:28.348241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-07T17:49:28.348270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CheckRead 2026-01-07T17:49:28.348293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit BuildAndWaitDependencies 2026-01-07T17:49:28.348317Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit BuildAndWaitDependencies 2026-01-07T17:49:28.348352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037929 2026-01-07T17:49:28.348385Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-07T17:49:28.348411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit BuildAndWaitDependencies 2026-01-07T17:49:28.348433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit ExecuteRead 2026-01-07T17:49:28.348460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit ExecuteRead 2026-01-07T17:49:28.348574Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037929 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-07T17:49:28.348701Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037929 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-07T17:49:28.348734Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037929 Complete read# {[1:3762:4925], 2} after executionsCount# 1 2026-01-07T17:49:28.348768Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037929 read iterator# {[1:3762:4925], 2} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:49:28.348875Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037929 read iterator# {[1:3762:4925], 2} finished in read 2026-01-07T17:49:28.352987Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-07T17:49:28.353053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit ExecuteRead 2026-01-07T17:49:28.353085Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit CompletedOperations 2026-01-07T17:49:28.353118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CompletedOperations 2026-01-07T17:49:28.353168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-07T17:49:28.353191Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CompletedOperations 2026-01-07T17:49:28.353215Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037929 has finished 2026-01-07T17:49:28.353246Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037929 2026-01-07T17:49:28.353327Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037929 2026-01-07T17:49:28.355153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3762:4925], Recipient [1:3175:4475]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:49:28.355217Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037927 ReadCancel: { ReadId: 0 } 2026-01-07T17:49:28.355474Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3762:4925], Recipient [1:3179:4477]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-07T17:49:28.355516Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037928 ReadCancel: { ReadId: 1 } 2026-01-07T17:49:28.355566Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3762:4925], Recipient [1:3181:4479]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-07T17:49:28.355597Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037929 ReadCancel: { ReadId: 2 } |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] Test command err: Trying to start YDB, gRPC: 12715, MsgBus: 5939 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:16.269541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:16.342827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:16.356316Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:16.356610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:16.356655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:16.549483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:16.549562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:16.602971Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808154329927 != 1767808154329931 2026-01-07T17:49:16.613522Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:16.657447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:16.771690Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:17.091838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:17.091924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:17.091959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:17.092542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:17.103868Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:17.335000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:17.395350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.619248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:17.916556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.158761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.772896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.773062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.773671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.773733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:18.794119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:18.976472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.213743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.430492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.709946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:19.946481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.206138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.466722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:20.790310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.790421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.790868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.790956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.791032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:20.796198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:20.946632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:21.007844Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:22.378738Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037925 2026-01-07T17:49:22.378938Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2627:4044], Recipient [1:1390:3126]: {TEvReadSet step# 8001 txid# 281474976715672 TabletSource# 72075186224037899 ... ution status for [0:18] at 72075186224037927 is Executed 2026-01-07T17:49:28.395805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit CompletedOperations 2026-01-07T17:49:28.395824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:18] at 72075186224037927 has finished 2026-01-07T17:49:28.395849Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037927 2026-01-07T17:49:28.395921Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037927 2026-01-07T17:49:28.396079Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3760:4925], Recipient [1:3177:4477]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-07T17:49:28.396205Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3760:4925], Recipient [1:3181:4481]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-07T17:49:28.396311Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037928, FollowerId 0 2026-01-07T17:49:28.396352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CheckRead 2026-01-07T17:49:28.396408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-07T17:49:28.396430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CheckRead 2026-01-07T17:49:28.396452Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit BuildAndWaitDependencies 2026-01-07T17:49:28.396474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit BuildAndWaitDependencies 2026-01-07T17:49:28.396505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037928 2026-01-07T17:49:28.396531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-07T17:49:28.396551Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit BuildAndWaitDependencies 2026-01-07T17:49:28.396569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit ExecuteRead 2026-01-07T17:49:28.396589Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit ExecuteRead 2026-01-07T17:49:28.396677Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037928 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-07T17:49:28.396792Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037928 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-07T17:49:28.396821Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037928 Complete read# {[1:3760:4925], 1} after executionsCount# 1 2026-01-07T17:49:28.396850Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037928 read iterator# {[1:3760:4925], 1} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:49:28.397812Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037928 read iterator# {[1:3760:4925], 1} finished in read 2026-01-07T17:49:28.397870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-07T17:49:28.397894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit ExecuteRead 2026-01-07T17:49:28.397919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit CompletedOperations 2026-01-07T17:49:28.397941Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CompletedOperations 2026-01-07T17:49:28.397979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-07T17:49:28.398000Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CompletedOperations 2026-01-07T17:49:28.398020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037928 has finished 2026-01-07T17:49:28.398043Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037928 2026-01-07T17:49:28.398108Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037928 2026-01-07T17:49:28.398159Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037929, FollowerId 0 2026-01-07T17:49:28.398195Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CheckRead 2026-01-07T17:49:28.398242Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-07T17:49:28.398265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CheckRead 2026-01-07T17:49:28.398284Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit BuildAndWaitDependencies 2026-01-07T17:49:28.398305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit BuildAndWaitDependencies 2026-01-07T17:49:28.398337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037929 2026-01-07T17:49:28.398362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-07T17:49:28.398385Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit BuildAndWaitDependencies 2026-01-07T17:49:28.398403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit ExecuteRead 2026-01-07T17:49:28.398424Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit ExecuteRead 2026-01-07T17:49:28.398518Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037929 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-07T17:49:28.398630Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037929 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-07T17:49:28.398659Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037929 Complete read# {[1:3760:4925], 2} after executionsCount# 1 2026-01-07T17:49:28.398684Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037929 read iterator# {[1:3760:4925], 2} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T17:49:28.398777Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037929 read iterator# {[1:3760:4925], 2} finished in read 2026-01-07T17:49:28.398821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-07T17:49:28.398842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit ExecuteRead 2026-01-07T17:49:28.398863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit CompletedOperations 2026-01-07T17:49:28.398886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CompletedOperations 2026-01-07T17:49:28.398915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-07T17:49:28.398936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CompletedOperations 2026-01-07T17:49:28.398957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037929 has finished 2026-01-07T17:49:28.398980Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037929 2026-01-07T17:49:28.399031Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037929 2026-01-07T17:49:28.400704Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3760:4925], Recipient [1:3174:4475]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T17:49:28.400750Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037927 ReadCancel: { ReadId: 0 } 2026-01-07T17:49:28.400801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3760:4925], Recipient [1:3177:4477]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-07T17:49:28.400829Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037928 ReadCancel: { ReadId: 1 } 2026-01-07T17:49:28.401056Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3760:4925], Recipient [1:3181:4481]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-07T17:49:28.401095Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037929 ReadCancel: { ReadId: 2 } 2026-01-07T17:49:28.430229Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:2068:3605]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> KqpSinkLocks::VisibleUncommittedRowsUpdate >> KqpSinkTx::OlapDeferredEffects |94.3%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TReadOnlyOlap >> KqpSnapshotRead::TestSnapshotExpiration-withSink >> KqpSinkLocks::VisibleUncommittedRows [GOOD] >> KqpTx::RollbackTx [GOOD] >> KqpSinkMvcc::TxReadsCommitted-IsOlap [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSnapshotIsolation::TSimpleOltp [GOOD] >> KqpLocks::Invalidate [GOOD] >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkLocks::UpdateLocksTwoShards >> KqpSnapshotIsolation::TSimpleOlap >> KqpSinkTx::InvalidateOnError >> KqpTx::RollbackTx2 >> KqpLocks::TwoPhaseTx >> KqpLocks::DifferentKeyUpdate >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive1 >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> KqpTx::ExplicitTcl [GOOD] >> KqpSinkLocks::TInvalidate [GOOD] >> KqpTx::InteractiveTx >> KqpSinkLocks::TInvalidateOlap >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates |94.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitRoTx |94.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp-EnableIndexStreamWrite [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryWriteIndexOltp >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::OlapUncommittedRead >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink [GOOD] >> KqpLocksTricky::TestSnapshotWithDependentReads-UseSink >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2026-01-07T17:42:34.971185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:34.971251Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:35.160577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:36.327420Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:36.327489Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:36.405404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:41.406121Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:41.406207Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:41.526496Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:42.920342Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:42.920428Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:43.018546Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:44.519262Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:44.519340Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:44.746597Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:46.256576Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:46.256656Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:46.389270Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:48.254936Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:48.255010Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:48.438727Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:50.194956Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:50.195045Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:50.354622Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:52.002243Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:52.002330Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:52.140187Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:53.803766Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:53.803870Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:53.954527Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:56.334603Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:56.334696Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:56.451178Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:42:58.801527Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:42:58.801627Z node 17 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:42:58.915888Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:01.207316Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:01.207396Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:01.310474Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:03.371672Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:03.371766Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:03.508396Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:05.563633Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:05.563738Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:05.674076Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:07.591539Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:07.591616Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:07.668629Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:09.481540Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:09.481618Z node 22 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:09.566939Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:11.384726Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:11.384804Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:11.452084Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:13.777311Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:13.777404Z node 24 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:13.872650Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:14.386597Z node 24 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1240: Unexpected config sender died for subscription id=1 2026-01-07T17:43:15.094925Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:15.095006Z node 25 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:15.176390Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:16.437600Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:16.437678Z node 26 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:16.511639Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:17.780522Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:43:17.780606Z node 27 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:43:17.862477Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:43:24.334008Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:43:24.334121Z node 27 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:31.341716Z node 27 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2026-01-07T17:47:32.313325Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:47:32.313405Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:47:32.391453Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:47:39.535700Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:47:39.535810Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:36.517511Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:49:36.517617Z node 29 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:36.622015Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> KqpSinkLocks::VisibleUncommittedRowsUpdate [GOOD] >> KqpSinkMvcc::DirtyReads+IsOlap >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx >> StatisticsSaveLoad::Simple [GOOD] >> StatisticsSaveLoad::Delete [GOOD] |94.3%| [TA] $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::RollbackTx2 [GOOD] >> KqpTx::SnapshotRO >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> StatisticsSaveLoad::ForbidAccess [GOOD] >> KqpLocksTricky::TestNoWrite [GOOD] >> KqpLocksTricky::TestSnapshotIfInsertRead >> KqpSnapshotIsolation::TSnapshotTwoInsertOltp [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert >> KqpSinkLocks::OlapVisibleUncommittedRows [GOOD] >> KqpSinkLocks::OlapUpdateLocksTwoShards >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail+useSink >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:49:27.005783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:27.159746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:27.185986Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:27.186439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:27.186604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:27.758423Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.931755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:27.931884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:27.976414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:28.108445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:29.193715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.194668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:29.194716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:29.194809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:29.195878Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:29.257616Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:29.810540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:49:33.653771Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:33.663009Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:49:33.667392Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:33.700735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:33.700863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:33.746451Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:49:33.748458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:33.966373Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:33.966473Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:33.967780Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.968520Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.969250Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.970174Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.970769Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.970983Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.971177Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.971363Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.971549Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.988272Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:34.232300Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.306221Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:49:34.306335Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:49:34.341615Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:49:34.349749Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:49:34.349964Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:49:34.350023Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:49:34.350074Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:49:34.350123Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:49:34.350172Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:49:34.350227Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:49:34.351229Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:49:34.373435Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:49:34.373530Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:49:34.414961Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:49:34.421011Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:49:34.518346Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:49:34.519812Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:49:34.533226Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:49:34.533285Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:49:34.533365Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:49:34.539984Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:49:34.550399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.559148Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:49:34.559286Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:49:34.573695Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:49:34.893918Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:49:34.919809Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:49:35.348263Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:49:35.513641Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:49:35.513720Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:49:36.345380Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:36.348393Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2749:3375] Owner: [1:2748:3374]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:49:36.348479Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2749:3375] Owner: [1:2748:3374]. Column diff is empty, finishing 2026-01-07T17:49:36.350829Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2758:3378], ActorId: [1:2759:3379], Starting query actor #1 [1:2760:3380] 2026-01-07T17:49:36.350920Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2759:3379], ActorId: [1:2760:3380], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:49:36.385520Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2759:3379], ActorId: [1:2760:3380], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZThkZGFkOTMtOTE1MTM3NWYtZWVhMGIwNTAtMzJiMTFmMzY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:49:37.088902Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2780:3394]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:49:37.093415Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:49:37.093529Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:2782:3396] 2026-01-07T17:49:37.101043Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:2782:3396] 2026-01-07T17:49:37.101902Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2783:2992] 2026-01-07T17:49:37.102223Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:2782:3396], server id = [2:2783:2992], tablet id = 72075186224037894, status = OK 2026-01-07T17:49:37.102448Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2783:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:49:37.103831Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:49:37.104206Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:49:37.104354Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:2780:3394], StatRequests.size() = 1 2026-01-07T17:49:37.705438Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:49:38.024153Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2759:3379], ActorId: [1:2760:3380], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZThkZGFkOTMtOTE1MTM3NWYtZWVhMGIwNTAtMzJiMTFmMzY=, TxId: 2026-01-07T17:49:38.024235Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2759:3379], ActorId: [1:2760:3380], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZThkZGFkOTMtOTE1MTM3NWYtZWVhMGIwNTAtMzJiMTFmMzY=, TxId: 2026-01-07T17:49:38.024653Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2758:3378], ActorId: [1:2759:3379], Got response [1:2760:3380] SUCCESS 2026-01-07T17:49:38.025519Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2799:3401], ActorId: [1:2800:3402], Starting query actor #1 [1:2801:3403] 2026-01-07T17:49:38.025581Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2800:3402], ActorId: [1:2801:3403], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:49:38.028434Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2800:3402], ActorId: [1:2801:3403], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=MTA4YWUyMzAtYThjMGFhYzEtOTgzMzYwZWEtNDNhNzkyYmI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-07T17:49:38.065256Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2810:3412]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:49:38.065473Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:49:38.065524Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:2810:3412], StatRequests.size() = 1 2026-01-07T17:49:38.443336Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2800:3402], ActorId: [1:2801:3403], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MTA4YWUyMzAtYThjMGFhYzEtOTgzMzYwZWEtNDNhNzkyYmI=, TxId: 01kecs865pdab96147d43czrqm 2026-01-07T17:49:38.445895Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2800:3402], ActorId: [1:2801:3403], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MTA4YWUyMzAtYThjMGFhYzEtOTgzMzYwZWEtNDNhNzkyYmI=, TxId: 01kecs865pdab96147d43czrqm 2026-01-07T17:49:38.446306Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2799:3401], ActorId: [1:2800:3402], Got response [1:2801:3403] SUCCESS 2026-01-07T17:49:38.448211Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2831:3422], ActorId: [1:2832:3423], Starting query actor #1 [1:2833:3424] 2026-01-07T17:49:38.448277Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2832:3423], ActorId: [1:2833:3424], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:49:38.451106Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2832:3423], ActorId: [1:2833:3424], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZjIyNzM5YTEtYTkxZTY1MTktYmM3YjYxN2EtZjFlZWM0MDU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-07T17:49:38.479532Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2832:3423], ActorId: [1:2833:3424], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZjIyNzM5YTEtYTkxZTY1MTktYmM3YjYxN2EtZjFlZWM0MDU=, TxId: 01kecs868kbc4xsbkag81nq629 2026-01-07T17:49:38.479654Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2832:3423], ActorId: [1:2833:3424], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZjIyNzM5YTEtYTkxZTY1MTktYmM3YjYxN2EtZjFlZWM0MDU=, TxId: 01kecs868kbc4xsbkag81nq629 2026-01-07T17:49:38.479901Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2831:3422], ActorId: [1:2832:3423], Got response [1:2833:3424] SUCCESS |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:49:27.059104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:27.163263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:27.185987Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:27.186477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:27.186628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:27.758489Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.920101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:27.920245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:27.961102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:28.056291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:29.197734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.198931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:29.198981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:29.199027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:29.199217Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:29.268867Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:29.824136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:49:33.542374Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:33.547083Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:49:33.550770Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:33.623386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:33.623529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:33.667568Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:49:33.672196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:33.907466Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:33.907609Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:33.929258Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:33.993162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:49:34.022097Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.022805Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.023774Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.024417Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.024755Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.024892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.026958Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.027106Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.027206Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:34.319467Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.384749Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:49:34.384871Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:49:34.432802Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:49:34.433007Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2151:2592], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:49:34.434395Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:49:34.434847Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:49:34.435113Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:49:34.435186Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:49:34.435252Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:49:34.435312Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:49:34.435372Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:49:34.435437Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:49:34.436119Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:49:34.442876Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1938:2461] 2026-01-07T17:49:34.471632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2194:2633] 2026-01-07T17:49:34.472916Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2194:2633], schemeshard id = 72075186224037897 2026-01-07T17:49:34.473568Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:49:34.514521Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. Describe result: PathErrorUnknown 2026-01-07T17:49:34.514613Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. Creating table 2026-01-07T17:49:34.514707Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:49:34.563352Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2241:2647], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:49:34.593591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.607857Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:49:34.608021Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. Subscribe on create table tx: 281474976720657 2026-01-07T17:49:34.623294Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:49:34.821907Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:49:35.307805Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:49:35.429839Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:49:35.429970Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2197:2636] Owner: [2:2196:2635]. Column diff is empty, finishing 2026-01-07T17:49:36.219169Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:36.221871Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2723:3359] Owner: [1:2722:3358]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:49:36.221960Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2723:3359] Owner: [1:2722:3358]. Column diff is empty, finishing 2026-01-07T17:49:36.223737Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2732:3362], ActorId: [1:2733:3363], Starting query actor #1 [1:2734:3364] 2026-01-07T17:49:36.223830Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2733:3363], ActorId: [1:2734:3364], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:49:36.261303Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2733:3363], ActorId: [1:2734:3364], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=NmUyNWYyNTQtYjlhMTlkNWItN2RkNjNjOGEtYzU1NTMwZWI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:49:37.092873Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2754:3378]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:49:37.093173Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:49:37.093274Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:2756:3380] 2026-01-07T17:49:37.097023Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:2756:3380] 2026-01-07T17:49:37.097921Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2757:2982] 2026-01-07T17:49:37.098319Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:2756:3380], server id = [2:2757:2982], tablet id = 72075186224037894, status = OK 2026-01-07T17:49:37.098559Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2757:2982], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:49:37.105625Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:49:37.106129Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:49:37.106232Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:2754:3378], StatRequests.size() = 1 2026-01-07T17:49:37.703555Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:49:38.030645Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2733:3363], ActorId: [1:2734:3364], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NmUyNWYyNTQtYjlhMTlkNWItN2RkNjNjOGEtYzU1NTMwZWI=, TxId: 2026-01-07T17:49:38.030745Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2733:3363], ActorId: [1:2734:3364], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NmUyNWYyNTQtYjlhMTlkNWItN2RkNjNjOGEtYzU1NTMwZWI=, TxId: 2026-01-07T17:49:38.031184Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2732:3362], ActorId: [1:2733:3363], Got response [1:2734:3364] SUCCESS 2026-01-07T17:49:38.032131Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2773:3385], ActorId: [1:2774:3386], Starting query actor #1 [1:2775:3387] 2026-01-07T17:49:38.032209Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2774:3386], ActorId: [1:2775:3387], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:49:38.035585Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2774:3386], ActorId: [1:2775:3387], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=OTczYjZmNTAtOTMwN2VmMDMtZmE1MTAwMzEtMTk1NTIwNjI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2026-01-07T17:49:38.068647Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2784:3396]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:49:38.068869Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:49:38.068922Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:2784:3396], StatRequests.size() = 1 2026-01-07T17:49:38.476094Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2774:3386], ActorId: [1:2775:3387], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=OTczYjZmNTAtOTMwN2VmMDMtZmE1MTAwMzEtMTk1NTIwNjI=, TxId: 2026-01-07T17:49:38.476175Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2774:3386], ActorId: [1:2775:3387], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=OTczYjZmNTAtOTMwN2VmMDMtZmE1MTAwMzEtMTk1NTIwNjI=, TxId: 2026-01-07T17:49:38.476549Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2773:3385], ActorId: [1:2774:3386], Got response [1:2775:3387] SUCCESS 2026-01-07T17:49:38.477687Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2806:3401], ActorId: [1:2807:3402], Starting query actor #1 [1:2808:3403] 2026-01-07T17:49:38.477763Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2807:3402], ActorId: [1:2808:3403], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:49:38.481225Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2807:3402], ActorId: [1:2808:3403], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=Nzc0OTc4MDAtYjM4ZjA4M2MtMmJjZWNkZjYtYTI5MzVhNDY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-07T17:49:38.508124Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2817:3412]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:49:38.508333Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:49:38.508372Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:2817:3412], StatRequests.size() = 1 2026-01-07T17:49:38.645615Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2807:3402], ActorId: [1:2808:3403], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=Nzc0OTc4MDAtYjM4ZjA4M2MtMmJjZWNkZjYtYTI5MzVhNDY=, TxId: 01kecs86e13pdyhnfe41sykvyy 2026-01-07T17:49:38.645789Z node 1 :STATISTICS WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:2807:3402], ActorId: [1:2808:3403], Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=Nzc0OTc4MDAtYjM4ZjA4M2MtMmJjZWNkZjYtYTI5MzVhNDY=, TxId: 01kecs86e13pdyhnfe41sykvyy 2026-01-07T17:49:38.646061Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2806:3401], ActorId: [1:2807:3402], Got response [1:2808:3403] BAD_REQUEST >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> KqpSinkLocks::EmptyRange |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> KqpSinkTx::TIsolationSettingTest-IsOlap-UsePragma >> KqpTx::SnapshotROInteractive1 [GOOD] >> KqpTx::SnapshotROInteractive2 >> KqpSinkLocks::UpdateLocksTwoShards [GOOD] >> KqpSinkLocks::UpdateLocksOneShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:57.117689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:57.117791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:57.117831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:57.117867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:57.117899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:57.117927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:57.117976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:57.118033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:57.118840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:57.121727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:57.230662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:57.230717Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:57.247005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:57.247304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:57.250112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:57.256886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:57.258155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:57.261884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:57.265254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:57.272251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:57.273921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:57.280069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:57.280131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:57.280213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:57.280247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:57.280384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:57.281215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:57.405949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.407978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.408016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.408069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.408113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.408188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.408309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1120: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2026-01-07T17:49:34.323206Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2026-01-07T17:49:34.323253Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2026-01-07T17:49:34.323318Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2026-01-07T17:49:34.323343Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2026-01-07T17:49:34.323371Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2026-01-07T17:49:34.323395Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2026-01-07T17:49:34.323422Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2026-01-07T17:49:34.324020Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-07T17:49:34.324213Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-07T17:49:34.324284Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2026-01-07T17:49:34.324361Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], version: 5 2026-01-07T17:49:34.324432Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 4 2026-01-07T17:49:34.329450Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-07T17:49:34.329567Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-07T17:49:34.329599Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2026-01-07T17:49:34.329634Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 5 2026-01-07T17:49:34.329695Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 6 2026-01-07T17:49:34.329824Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2026-01-07T17:49:34.335335Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2026-01-07T17:49:34.335749Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2026-01-07T17:49:34.350177Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1328 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2026-01-07T17:49:34.350258Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2026-01-07T17:49:34.350460Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1328 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2026-01-07T17:49:34.350633Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1328 } } CommitVersion { Step: 400 TxId: 281474976715657 } debug: NTableState::TProposedWaitParts operationId# 281474976715657:1 2026-01-07T17:49:34.352303Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 1295 RawX2: 85899349102 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-07T17:49:34.352387Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2026-01-07T17:49:34.352641Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 1295 RawX2: 85899349102 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-07T17:49:34.352748Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 2026-01-07T17:49:34.353003Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 1295 RawX2: 85899349102 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-07T17:49:34.353136Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2026-01-07T17:49:34.353210Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-07T17:49:34.353279Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2026-01-07T17:49:34.353353Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:1 129 -> 240 2026-01-07T17:49:34.356864Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-07T17:49:34.359335Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-07T17:49:34.359867Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-07T17:49:34.359946Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2026-01-07T17:49:34.360154Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2026-01-07T17:49:34.360210Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-07T17:49:34.360283Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2026-01-07T17:49:34.360363Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-07T17:49:34.360463Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2026-01-07T17:49:34.360532Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-07T17:49:34.360598Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-07T17:49:34.360657Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:0 2026-01-07T17:49:34.360758Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 3 2026-01-07T17:49:34.360809Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:1 2026-01-07T17:49:34.360841Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:1 2026-01-07T17:49:34.360957Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 5 2026-01-07T17:49:34.361002Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:2 2026-01-07T17:49:34.361026Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:2 2026-01-07T17:49:34.361058Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpSinkTx::Interactive >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpSinkMvcc::OltpNamedStatement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:49:27.105344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:27.211309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:27.220049Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:27.220444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:27.220599Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:27.770130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.913519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:27.913670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:27.970531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:28.119480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:29.193780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.194810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:29.194855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:29.194898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:29.195326Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:29.274079Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:29.851558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:49:33.615874Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:33.624473Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:49:33.628436Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:33.661242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:33.661396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:33.709867Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:49:33.712763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:33.917387Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:33.917558Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:33.918953Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.928153Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.930406Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.931173Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.931619Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.931798Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.932151Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.932401Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.932592Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:49:33.949458Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:34.223873Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.291541Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:49:34.291641Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:49:34.343743Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:49:34.345217Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:49:34.345447Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:49:34.345526Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:49:34.345587Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:49:34.345646Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:49:34.345700Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:49:34.345767Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:49:34.346681Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:49:34.398266Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:49:34.398372Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:49:34.438996Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:49:34.440113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:49:34.516983Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:49:34.518762Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:49:34.535023Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:49:34.535084Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:49:34.535168Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:49:34.542465Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:49:34.547179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.563976Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:49:34.564143Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:49:34.579183Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:49:34.853074Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:49:34.873866Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:49:35.343858Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:49:35.481689Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:49:35.481798Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:49:36.341568Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:36.836264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2750:3376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.836587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.838555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2768:3381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.838703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.957128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.714997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3056:3429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.715333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.715897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3060:3432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.715997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.717199Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3063:3435]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:49:37.717385Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:49:37.717466Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3065:3437] 2026-01-07T17:49:37.717595Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3065:3437] 2026-01-07T17:49:37.718270Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3066:3188] 2026-01-07T17:49:37.718569Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3065:3437], server id = [2:3066:3188], tablet id = 72075186224037894, status = OK 2026-01-07T17:49:37.718757Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3066:3188], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:49:37.718838Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-07T17:49:37.719105Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-07T17:49:37.719184Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3063:3435], StatRequests.size() = 1 2026-01-07T17:49:37.719450Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:49:37.754735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3070:3441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.754864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.755557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3074:3445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.755686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.755797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3077:3448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.766849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:37.920952Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T17:49:37.921046Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T17:49:38.009393Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3065:3437], schemeshard count = 1 2026-01-07T17:49:38.302881Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3079:3450], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:49:38.439109Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3183:3517] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:38.454606Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3206:3533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:49:38.454786Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:49:38.454826Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3206:3533], StatRequests.size() = 1 2026-01-07T17:49:38.908012Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3284:3563], for# user@builtin, access# DescribeSchema 2026-01-07T17:49:38.908085Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3284:3563], for# user@builtin, access# DescribeSchema 2026-01-07T17:49:38.964580Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:3274:3559], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:49:38.967202Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTYzYzA3ZWItNTkzMDEzN2EtNWZlYWVkMDQtNjk5YjE0ZDg=, ActorId: [1:3265:3551], ActorState: ExecuteState, LegacyTraceId: 01kecs86n5cn07dysyzbceyw6q, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 17 } message: "At function: KiReadTable!" end_position { row: 2 column: 17 } severity: 1 issues { position { row: 2 column: 17 } message: "Cannot find table \'db.[/Root/Database/.metadata/_statistics]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSinkTx::SnapshotROInteractive2 >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> KqpSinkMvcc::WriteSkewUpsert+IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewUpsert-IsOlap >> KqpSinkMvcc::WriteSkewInsert+IsOlap [GOOD] >> KqpSinkMvcc::UpdateColumns+IsOlap |94.3%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> KqpSinkTx::OlapSnapshotRO [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive1 >> KqpSnapshotIsolation::TReadOnlyOltp >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapInvalidateOnError >> KqpSinkMvcc::DirtyReads-IsOlap >> KqpSinkMvcc::TxReadsCommitted+IsOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted+IsOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap >> KqpSinkMvcc::LostUpdate+IsOlap [GOOD] >> KqpSinkMvcc::LostUpdate-IsOlap >> KqpTx::DeferredEffects >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp+UpdateAfterInsert >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 >> KqpSnapshotIsolation::TUniqueSecondaryWriteIndexOltp [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> KqpTx::RollbackRoTx [GOOD] >> KqpTx::RollbackInvalidated >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit+IsOlap >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::OlapExplicitTcl >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp+IsSnapshotIsolation >> KqpTx::SnapshotRO [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken >> KqpTx::InvalidateOnError [GOOD] >> KqpLocks::MixedTxFail+useSink [GOOD] >> KqpLocks::MixedTxFail-useSink >> KqpLocks::EmptyRange [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap >> KqpSinkLocks::UpdateLocksOneShard [GOOD] >> KqpSinkLocks::UpdateLocksOneShardRowExists >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap [GOOD] >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 12851, MsgBus: 17507 2026-01-07T17:49:26.420221Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678262305643376:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.422012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.656240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.782560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.782684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.861031Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.861962Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678262305643261:2081] 1767808166407923 != 1767808166407926 2026-01-07T17:49:26.875874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.927311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.173545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.173564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.173571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.173638Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.428860Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.938535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.948898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:27.987592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.134318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.280847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.357613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.526178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275190547016:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.526295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.526589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275190547026:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.526645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.040530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.074793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.099362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.123131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.155768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.198851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.244238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.306401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.419875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678279485515192:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.419944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.420337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678279485515197:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.420370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678279485515198:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.420464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.426673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:30.440108Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678279485515201:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:30.530634Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678279485515256:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:31.419530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678262305643376:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.419604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.101328Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YzUwZDdjOWMtZDNkMzRmM2MtNDBkMzA0OWYtYTc ... undelivered_message; 2026-01-07T17:49:40.957012Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:41.037677Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:41.038633Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678322252487578:2081] 1767808180837230 != 1767808180837233 2026-01-07T17:49:41.061000Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:41.061078Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:41.074633Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:41.137008Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:41.153457Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:41.153475Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:41.153483Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:41.153549Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:41.682319Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:41.693629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:41.711788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:41.801168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:41.944563Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:41.996701Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:42.069659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.346445Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678339432358622:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.346529Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.346771Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678339432358632:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.346819Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.416456Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.451799Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.482501Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.519663Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.562987Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.602442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.645928Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.705203Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.792856Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678339432359505:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.792951Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.793207Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678339432359510:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.793235Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678339432359511:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.793271Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:44.796749Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:44.811602Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678339432359514:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:44.887088Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678339432359565:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:45.843385Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678322252487605:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:45.843442Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:47.191388Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=Mzg1ZmVlYTMtOTYwMjJjZjgtOTM4MmQ2MmMtNzAwYzIxYzE=, ActorId: [3:7592678348022294454:2531], ActorState: ExecuteState, LegacyTraceId: 01kecs8eq50702xf8mknrnnymp, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { position { row: 3 column: 25 } message: "Operation \'Upsert\' can\'t be performed in read only transaction" end_position { row: 3 column: 25 } issue_code: 2008 severity: 1 } trace_id#
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] >> KqpTx::CommitRoTx_TLI [GOOD] >> KqpSinkTx::Interactive [GOOD] >> KqpSinkTx::SnapshotROInteractive2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 9620, MsgBus: 20932 2026-01-07T17:49:28.062082Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678269991971576:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:28.062206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:28.366532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:28.366651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:28.405789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:28.453099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:28.506116Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:28.509873Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678269991971550:2081] 1767808168060768 != 1767808168060771 2026-01-07T17:49:28.609642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:28.609672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:28.609685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:28.609847Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:28.692135Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:29.015749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.023315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.073662Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:29.079322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.223024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.356069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.415859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.948154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678278581908016:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.948241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.948524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678278581908026:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.948579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.334628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.373430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.407287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.448990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.496161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.556756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.605926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.680177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.774834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678282876876196:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.774935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.778369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678282876876201:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.778421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678282876876202:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.778489Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.782280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:31.795197Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678282876876205:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:31.880907Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678282876876256:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:33.062201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678269991971576:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:33.062261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.921216Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTc4NjA3MTItNzg1OTBhMjItOGM3NzMyZWYtYmYy ... HARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:43.007020Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:43.036054Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:43.280905Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:43.364719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.849061Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678340364450325:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.849178Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.849668Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678340364450335:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.849725Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.931621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.964770Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.997085Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.032848Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.068723Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.114700Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.158540Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.216437Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.309035Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344659418500:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.309107Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.309162Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344659418505:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.309318Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344659418507:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.309371Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.313243Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:46.329134Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678344659418508:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:46.387997Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678344659418560:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:48.197664Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-07T17:49:48.197882Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037911 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:49:48.198019Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037911 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:49:48.198195Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7592678353249353473:2531], Table: `/Root/KeyValue` ([72057594046644480:42:1]), SessionActorId: [3:7592678353249353448:2531]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[3:7592678353249353473:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:49:48.198647Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592678353249353466:2531], SessionActorId: [3:7592678353249353448:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7592678353249353448:2531]. 2026-01-07T17:49:48.198885Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ZmQxZjdiMDMtM2JlNWYxZGItNzdmMjUzMmEtYWQ4ZWMyYWU=, ActorId: [3:7592678353249353448:2531], ActorState: ExecuteState, LegacyTraceId: 01kecs8fpsac5ft931jrxkknkx, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7592678353249353468:2531] from: [3:7592678353249353466:2531] trace_id# 2026-01-07T17:49:48.198999Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592678353249353468:2531] TxId: 281474976715673. Ctx: { TraceId: 01kecs8fpsac5ft931jrxkknkx, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZmQxZjdiMDMtM2JlNWYxZGItNzdmMjUzMmEtYWQ4ZWMyYWU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:49:48.199235Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZmQxZjdiMDMtM2JlNWYxZGItNzdmMjUzMmEtYWQ4ZWMyYWU=, ActorId: [3:7592678353249353448:2531], ActorState: ExecuteState, LegacyTraceId: 01kecs8fpsac5ft931jrxkknkx, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/KeyValue`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-07T17:49:48.323733Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZmQxZjdiMDMtM2JlNWYxZGItNzdmMjUzMmEtYWQ4ZWMyYWU=, ActorId: [3:7592678353249353448:2531], ActorState: ExecuteState, LegacyTraceId: 01kecs8fsp1vkcefy8nwky7b2w, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kecs8fp8f5whegfxk9x2bj23" issue_code: 2015 severity: 1 } trace_id# |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSinkLocks::UncommittedRead >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 30149, MsgBus: 22825 2026-01-07T17:49:28.040161Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678267265663330:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:28.040550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:28.317531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:28.317657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:28.332659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:28.360798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:28.386613Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:28.393052Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678267265663219:2081] 1767808168015032 != 1767808168015035 2026-01-07T17:49:28.490478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:28.490505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:28.490625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:28.490827Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:28.591370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:28.916867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:28.979889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.050588Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:29.102383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.244855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.306405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.210928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678280150566984:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.211039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.211406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678280150566994:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.211462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.598178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.636837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.700431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.743826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.785602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.827180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.907237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.962935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.089278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678284445535173:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.089351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.089796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678284445535178:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.089838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678284445535179:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.090103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.094263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:32.105533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:49:32.105742Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678284445535182:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:32.172864Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678284445535233:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:33.041420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678267265663330:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:33.041493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.752487Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MWQ1ODUwNDgtOTNhMzBhYmUtNGYxYjAzNjYtMmI ... HARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:38.723527Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592678312933408660:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:38.780367Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592678312933408711:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 63536, MsgBus: 10807 2026-01-07T17:49:42.069012Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:42.069179Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:42.150246Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:42.150329Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:42.151757Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:42.177520Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:42.260881Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:42.260909Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:42.260917Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:42.261006Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:42.369006Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:42.738234Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:42.749678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:42.771320Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:42.842821Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:43.032427Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:43.046567Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:43.111603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.497111Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344271453591:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.497182Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.497482Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344271453600:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.497517Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.572027Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.609292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.645001Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.679611Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.725219Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.779543Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.819497Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.867113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.954589Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344271454471:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.954711Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.955684Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344271454476:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.955758Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344271454477:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.955890Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.959611Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:45.970844Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678344271454480:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:46.073475Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678348566421827:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewUpsert-IsOlap [GOOD] >> KqpSinkTx::DeferredEffects >> KqpSnapshotIsolation::TReadOnlyOltp [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 11525, MsgBus: 21452 2026-01-07T17:49:30.665565Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678276054935426:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:30.665626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:30.958886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:30.960808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:31.007049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:31.029262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:31.033147Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678276054935307:2081] 1767808170659852 != 1767808170659855 2026-01-07T17:49:31.039369Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:31.197845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:31.197864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:31.197869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:31.197946Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:31.281018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:31.670736Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:31.704005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:31.713843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:31.771660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.955726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.120213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.204511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.958630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678288939839062:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.958726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.959326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678288939839072:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.959372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.355258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.396153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.436247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.478903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.507512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.537207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.581261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.673088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.773208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678293234807247:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.773291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.773561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678293234807252:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.773590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678293234807253:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.773616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.781198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:34.792265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:49:34.792736Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678293234807256:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:34.888799Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678293234807307:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:35.669031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678276054935426:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:35.670368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migra ... DATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16026, MsgBus: 6576 2026-01-07T17:49:43.557493Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678334681327179:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:43.557745Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:43.594126Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:43.714131Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:43.715737Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678334681327061:2081] 1767808183545190 != 1767808183545193 2026-01-07T17:49:43.729348Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:43.729420Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:43.747788Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:43.819834Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:43.837658Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:43.837676Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:43.837683Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:43.837751Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:44.180160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:44.184967Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:44.189098Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.248922Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.390602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.448632Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:44.568268Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:46.601058Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678347566230812:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.601169Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.605785Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678347566230822:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.605895Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.670003Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.703174Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.757069Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.795050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.830262Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.911802Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.959304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.019550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.113281Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678351861198989:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.113363Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.113719Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678351861198995:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.113773Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678351861198994:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.113818Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.117911Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:47.133080Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678351861198998:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:47.222355Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678351861199049:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:48.553026Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678334681327179:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:48.553098Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpTx::DeferredEffects [GOOD] >> KqpTx::EmptyTxOnCommit >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap [GOOD] >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert+UseBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 4599, MsgBus: 13576 2026-01-07T17:49:26.428917Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678262148780083:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.430296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.757121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.760524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.770618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.859447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.925309Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.086929Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.169551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.169571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.169583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.169665Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.438249Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:28.004228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.654814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275033682792:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.654815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275033682802:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.654913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.655496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275033682822:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.655574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.658917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.671685Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678275033682821:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.779866Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678275033682874:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.112533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.250859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.250053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.011138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678262148780083:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:32.122094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.560311Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YzQzMjUxYjctNWRlMjMzOTgtZTFhZmE5NmItYmViYTdmZTE=, ActorId: [1:7592678287918592669:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs81dj9901b83ykny8s9hw, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { position { row: 3 column: 29 } message: "Operation \'Upsert\' can\'t be performed in read only transaction" end_position { row: 3 column: 29 } issue_code: 2008 severity: 1 } trace_id#
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 4334, MsgBus: 8941 2026-01-07T17:49:34.719515Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678295233483667:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:34.719566Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:34.777053Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:34.876384Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:34.876610Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:34.911933Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.918507Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:35.125440Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:35.125458Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:35.125464Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:35.125525Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:35.133015Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:35.646304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:35.653024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:35.725432Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:38.287133Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312413353655:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.287200Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312413353675:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.287325Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.290715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:38.293383Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312413353687:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.293521Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.307078Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592678312413353686:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:38.405229Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592678312413353739:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:38.459096Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:38.510822Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:39.660529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:39.721055Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678295233483667:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:39.721123Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13411, MsgBus: 17145 2026-01-07T17:49:42.708312Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678330645652250:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:42.708358Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:42.732269Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:42.856154Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:42.876509Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678330645652226:2081] 1767808182706248 != 1767808182706251 2026-01-07T17:49:42.876613Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:42.902058Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:42.902136Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:42.910596Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:43.069494Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:43.069518Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:43.069525Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:43.069602Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:43.174960Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:43.717087Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:43.745051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:43.750412Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:46.223365Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678347825522273:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.223513Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.223886Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678347825522300:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.228643Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:46.253392Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T17:49:46.253564Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678347825522302:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:46.328274Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678347825522364:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:46.405492Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.466999Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.467849Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.302072Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678330645652250:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:48.432683Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 28843, MsgBus: 17466 2026-01-07T17:49:26.392575Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678262341985360:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.392883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.762592Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.788355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.792210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.842761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.854963Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.861102Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678262341985244:2081] 1767808166378832 != 1767808166378835 2026-01-07T17:49:26.939368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.177026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.177042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.177055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.177124Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.405013Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.859965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.712744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275226888023:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.712752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275226888034:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.712827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.713045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275226888038:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.713083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.715958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.724639Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678275226888037:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.835144Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678275226888090:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.112028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.275460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.387040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678262341985360:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.387121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:31.486074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.383846Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTZkNjUwMjMtM2UwODYzY2EtZTZhNDJlYjYtYTRkZmVkNTM=, ActorId: [1:7592678292406765165:2964], ActorState: ReadyState, LegacyTraceId: 01kecs819xcw02jypeyhs3dm4k, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kecs81049ehygcyqqpkyj28z" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 6412, MsgBus: 3688 2026-01-07T17:49:34.610284Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678295410989654:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:34.617386Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:34.694988Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:34.782191Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.794189Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:34.794271Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:34.826872Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:34.949908Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:34.949927Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:34.949935Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:34.950003Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:34.957200Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:35.396717Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:35.405986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:35.644289Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:38.165324Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312590859646:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.173225Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.173923Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312590859674:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.173993Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312590859675:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.174028Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.178274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operati ... n.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:39.411663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:39.953204Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678295410989654:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:40.302109Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:40.898425Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-07T17:49:40.898638Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:49:40.898786Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:49:40.898970Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [2:7592678321180802292:2962], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [2:7592678321180802269:2962]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7592678321180802292:2962].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:49:40.899460Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:7592678321180802286:2962], SessionActorId: [2:7592678321180802269:2962], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7592678321180802269:2962]. 2026-01-07T17:49:40.899712Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=MjU5N2E0ZDQtNTk2NDc1MjYtZjFkNWIwOWYtOTI4ZTc5MDQ=, ActorId: [2:7592678321180802269:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs88k1a3c04d3rxrde183x, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7592678321180802287:2962] from: [2:7592678321180802286:2962] trace_id# 2026-01-07T17:49:40.899837Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:7592678321180802287:2962] TxId: 281474976715664. Ctx: { TraceId: 01kecs88k1a3c04d3rxrde183x, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjU5N2E0ZDQtNTk2NDc1MjYtZjFkNWIwOWYtOTI4ZTc5MDQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:49:40.900107Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=MjU5N2E0ZDQtNTk2NDc1MjYtZjFkNWIwOWYtOTI4ZTc5MDQ=, ActorId: [2:7592678321180802269:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs88k1a3c04d3rxrde183x, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/KV`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id#
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Conflict with existing key., code: 2012 2026-01-07T17:49:40.973279Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=MjU5N2E0ZDQtNTk2NDc1MjYtZjFkNWIwOWYtOTI4ZTc5MDQ=, ActorId: [2:7592678321180802269:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs88nb8vr45r89my41k4nd, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kecs88jtdng2ratepxnbz94c" issue_code: 2015 severity: 1 } trace_id#
: Error: Transaction not found: 01kecs88jtdng2ratepxnbz94c, code: 2015 Trying to start YDB, gRPC: 4181, MsgBus: 17363 2026-01-07T17:49:42.511000Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678327672446906:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:42.513260Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:42.597119Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:42.725054Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:42.729097Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678327672446883:2081] 1767808182501845 != 1767808182501848 2026-01-07T17:49:42.809499Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:42.809599Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:42.813796Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:42.861407Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:42.968739Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:42.968764Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:42.968770Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:42.968851Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:43.541206Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:43.608996Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:43.625281Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:46.221903Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344852316944:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.221998Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.222052Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344852316963:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.225166Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:46.231172Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678344852316965:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.231347Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.243233Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678344852316966:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:46.299549Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678344852317018:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:46.351410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.398268Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.527797Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678327672446906:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:47.531713Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:47.608251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InvalidateOlapOnCommit >> KqpLocksTricky::TestSnapshotWithDependentReads-UseSink [GOOD] >> KqpRollback::DoubleUpdate >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink >> KqpSinkMvcc::LostUpdate-IsOlap [GOOD] >> KqpSinkMvcc::OlapMultiSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 23626, MsgBus: 13759 2026-01-07T17:49:26.534811Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678260644994316:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.534858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.872510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.918836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.918978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:27.003815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.054194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.141019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.172017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.172037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.172047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.172123Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.553055Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.852149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.859484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:27.939725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.089826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.237053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.313043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.924142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678273529897939:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.924259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.924600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678273529897949:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.924658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.197319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.235652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.334074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.371100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.414013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.452102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.502027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.580677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.688941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678277824866123:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.689058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.689611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678277824866129:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.689656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678277824866128:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.689689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.692833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:30.702930Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678277824866132:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:30.784601Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678277824866183:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:31.537046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678260644994316:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.537124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.264221Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715675; 2026-01-07T17:49:33.277754Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7592678290709768446:2533], Table: `/Root/TwoShard` ([72057594046644480 ... node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:42.229547Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:42.229558Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:42.229624Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:42.263197Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:42.818201Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:42.830590Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:42.837822Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:42.871720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:42.997902Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:43.169189Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:43.240718Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.614197Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678341087197081:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.614301Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.621075Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678341087197091:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.621185Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:45.688693Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.735727Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.766659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.796600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.837607Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.878606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.921413Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:45.976503Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.061324Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678345382165257:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.061411Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.061673Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678345382165262:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.061726Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678345382165263:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.061770Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.066186Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:46.078524Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678345382165266:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:46.151780Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678345382165317:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:46.817127Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678323907326265:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:46.817196Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:50.557141Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592678362562034897:2532], SessionActorId: [3:7592678349677132908:2532], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`, code: 2001 . sessionActorId=[3:7592678349677132908:2532]. 2026-01-07T17:49:50.557359Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=Y2E5MWE5ZjItNDhiOTQ3N2YtNzc5ZjhjYTYtODUwN2U5YWM=, ActorId: [3:7592678349677132908:2532], ActorState: ExecuteState, LegacyTraceId: 01kecs8hss19by84448hhkn62w, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592678362562034898:2532] from: [3:7592678362562034897:2532] trace_id# 2026-01-07T17:49:50.557438Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592678362562034898:2532] TxId: 281474976715675. Ctx: { TraceId: 01kecs8hss19by84448hhkn62w, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2E5MWE5ZjItNDhiOTQ3N2YtNzc5ZjhjYTYtODUwN2U5YWM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`, code: 2001 } trace_id# 2026-01-07T17:49:50.557678Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=Y2E5MWE5ZjItNDhiOTQ3N2YtNzc5ZjhjYTYtODUwN2U5YWM=, ActorId: [3:7592678349677132908:2532], ActorState: ExecuteState, LegacyTraceId: 01kecs8hss19by84448hhkn62w, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/EightShard`" issue_code: 2001 severity: 1 } trace_id# |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::DirtyReads-IsOlap [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge >> KqpSnapshotRead::TestReadOnly+withSink >> KqpTx::RollbackInvalidated [GOOD] >> KqpLocksTricky::TestSnapshotIfInsertRead [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot+StreamIndex >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] >> KqpSinkTx::TIsolationSettingTest+IsOlap-UsePragma ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 17413, MsgBus: 27355 2026-01-07T17:49:28.513356Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678269915407545:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:28.517055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:28.764461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:28.764615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:28.818981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:28.819051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:28.821284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:28.931319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:28.931350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:28.931364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:28.931477Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:29.080447Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:29.326831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.529160Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:31.488550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678282800310254:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.488680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.489386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678282800310288:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.489425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678282800310289:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.489616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.493826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:31.519882Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678282800310293:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:31.628625Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678282800310346:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:32.003623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:32.186901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:32.187142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:32.187347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:32.187451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:32.187551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:32.187642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:32.187740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:32.187859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:32.187947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:32.188063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:32.188158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:32.188243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:32.188356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678287095277799:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:32.198880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:32.199006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:32.199189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:32.199297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:32.199407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:32.199504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:32.199611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:32.199719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:32.199799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:32.199921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678287095277805:2340];tablet_id=72075186224037896;process=TTxIni ... x_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.173429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.176533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.176577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.176591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.181455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.181512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.181525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.184153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.184196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.184209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.189271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.189324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.189345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.191249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.191293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.191305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.196617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.196671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:40.196684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; Trying to start YDB, gRPC: 28237, MsgBus: 4058 2026-01-07T17:49:44.456193Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678336295860208:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:44.456240Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:44.473427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:44.569072Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678336295860180:2081] 1767808184455601 != 1767808184455604 2026-01-07T17:49:44.573416Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:44.597257Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:44.597334Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:44.603869Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:44.685507Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:44.685528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:44.685538Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:44.685614Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:44.688991Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:45.102391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:45.464636Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:47.785467Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678349180762948:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.785588Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.785896Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678349180762960:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.785932Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678349180762961:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.785970Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.789842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:47.802058Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592678349180762964:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:47.859942Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592678349180763015:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:47.937571Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.024356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.167835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.873049Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678336295860208:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:49.931729Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.253096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.253217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.253260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.253300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.253359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.253393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.253449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.254861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.255790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.257057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.357976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.358051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.377002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.377366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.380775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.390945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.394026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.396047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.400785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.410929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.412258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.421876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.421976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.422157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.422219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.422426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.422677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:32.613916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.614674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.614764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.614808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.614869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.614914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.614987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.615075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.615119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.615170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.615210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.615250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.615306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.615361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.615409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Reads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5648 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 132 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-07T17:49:53.162956Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:49:53.163042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.5648 2026-01-07T17:49:53.163186Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:49:53.163241Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-07T17:49:53.205423Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:49:53.205510Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:49:53.205565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-07T17:49:53.205651Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-07T17:49:53.205695Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-07T17:49:53.205864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-07T17:49:53.205947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-07T17:49:53.206000Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-07T17:49:53.206153Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-07T17:49:53.206314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 30 seconds 2026-01-07T17:49:53.206369Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:227: [BackgroundCompaction] [Update] Enqueued shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2026-01-07T17:49:53.206454Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-07T17:49:53.206556Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:49:53.206924Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [3:128:2152], Recipient [3:718:2703]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2026-01-07T17:49:53.207113Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:128:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2026-01-07T17:49:53.208518Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T17:49:53.208595Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 0, front# 1 2026-01-07T17:49:53.221351Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:708:2695], Recipient [3:718:2703]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-07T17:49:53.228969Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.243000Z 2026-01-07T17:49:53.229070Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 1 2026-01-07T17:49:53.229118Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:128:2152]pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:49:53.229503Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553211, Sender [3:718:2703], Recipient [3:128:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2026-01-07T17:49:53.229551Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvDataShard::TEvCompactTableResult 2026-01-07T17:49:53.229677Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 0, Rows# 0, Deletes# 0, Compaction# 1970-01-01T00:00:30.000000Z}, next wakeup in# 29.997000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-07T17:49:53.229777Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 3 ms, with status# 0, next wakeup in# 29.997000s, rate# 1, in queue# 0 shards, waiting after compaction# 0 shards, running# 1 shards at schemeshard 72057594046678944 2026-01-07T17:49:53.230412Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [3:128:2152], Recipient [3:718:2703]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2026-01-07T17:49:53.230549Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:128:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2026-01-07T17:49:53.232221Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.243000Z 2026-01-07T17:49:53.232265Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 2 2026-01-07T17:49:53.249588Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:708:2695], Recipient [3:718:2703]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-07T17:49:53.255628Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.243000Z 2026-01-07T17:49:53.256667Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:708:2695], Recipient [3:718:2703]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-07T17:49:53.275949Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 2, ts 1970-01-01T00:00:30.247000Z 2026-01-07T17:49:53.276053Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 2, front# 2 2026-01-07T17:49:53.276095Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:128:2152]pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T17:49:53.276849Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553211, Sender [3:718:2703], Recipient [3:128:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2026-01-07T17:49:53.276899Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvDataShard::TEvCompactTableResult 2026-01-07T17:49:53.277007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 0 seconds 2026-01-07T17:49:53.277076Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.993000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-07T17:49:53.286189Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:708:2695], Recipient [3:718:2703]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-07T17:49:53.301442Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:49:53.301531Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:49:53.301563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:49:53.327605Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.247000Z |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 13804, MsgBus: 20179 2026-01-07T17:49:26.458972Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678258690145491:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.459044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.860602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.860763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.862534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.943499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.967239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.138317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.173568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.173589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.173602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.173669Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.477255Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.847912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.858240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:27.910455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.140009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.272559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.334543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.810194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678271575049098:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.810293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.810605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678271575049108:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.810647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.135260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.169779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.204389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.235905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.270611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.329280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.378335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.457786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.534841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275870017279:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.534920Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.535318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275870017284:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.535353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275870017285:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.535637Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.539229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:30.551405Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678275870017288:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:30.615239Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678275870017339:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:31.459097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678258690145491:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.459139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9673, MsgBus: 19302 2026-01-07T17:49:39.953636Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:39.953905Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists ... d, will use file: (empty maybe) 2026-01-07T17:49:46.961498Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:46.961505Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:46.961576Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:46.980380Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:47.462379Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:47.474859Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:47.487680Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.551659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.694454Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:47.778105Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.888087Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.206231Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362693182329:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.206314Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.206729Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362693182339:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.206772Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.306277Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.350266Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.392163Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.432377Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.475356Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.528044Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.568687Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.625420Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.720337Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362693183214:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.720436Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.720882Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362693183219:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.720945Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362693183220:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.721191Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.725014Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:50.735239Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678362693183223:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:50.834373Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678362693183276:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:51.689035Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678345513311342:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:51.689117Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:52.691655Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7592678371283118186:2541], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:49:52.692112Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=ZWM3NzJiNjEtYjRmODIwMjMtNTQwYTRiNTMtNjU4ZmQ4YjU=, ActorId: [3:7592678371283118164:2531], ActorState: ExecuteState, LegacyTraceId: 01kecs8m4p1b9mc31sg7b51a2x, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/BadTable]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } } tx_id# 01kecs8m4444pttx427x7vs147 trace_id# 2026-01-07T17:49:52.714687Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZWM3NzJiNjEtYjRmODIwMjMtNTQwYTRiNTMtNjU4ZmQ4YjU=, ActorId: [3:7592678371283118164:2531], ActorState: ReadyState, LegacyTraceId: 01kecs8m6a769bvzx8rmst1gjq, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kecs8m4444pttx427x7vs147" issue_code: 2015 severity: 1 } trace_id# |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewInsert-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 32185, MsgBus: 17844 2026-01-07T17:49:28.374386Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678270314600513:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:28.374431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:28.452216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:28.695168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:28.695248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:28.715730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:28.782101Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:28.782797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:28.784566Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678270314600470:2081] 1767808168365985 != 1767808168365988 2026-01-07T17:49:28.858352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:28.858368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:28.858374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:28.858441Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:29.083951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:29.257162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.385724Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:31.457091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678283199503247:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.457293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.457968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678283199503259:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.458025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678283199503260:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.458065Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.459637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678283199503264:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.459708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:31.465637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:31.484039Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678283199503263:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:31.603328Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678283199503316:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:32.062259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.169528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.374670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678270314600513:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:33.374760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.403376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.418105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.165348Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YWQ4M2U5Y2YtODA4Y2UzYjktOGI1YWRhNjYtODMxYjVjOWI=, ActorId: [1:7592678296084413156:2963], ActorState: ExecuteState, LegacyTraceId: 01kecs83cney6vgcatd7tm6zzr, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 17346, MsgBus: 6019 2026-01-07T17:49:37.395624Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678308612555369:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:37.409074Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:37.429211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:37.495268Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:37.495352Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:37.499812Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:37.505099Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678308612555249:2081] 1767808177391438 != 1767808177391441 2026-01-07T17:49:37.517815Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:37.645254Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:37.725054Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:37.725075Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:37.725082Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:37.725160Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:38.310577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:38.321263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:38.412182Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:40.995165Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678321497458017:2323], D ... n request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:49:45.254968Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715671; 2026-01-07T17:49:45.255141Z node 2 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1767808185290 : 281474976715671] from 72075186224038089 at tablet 72075186224038089, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 13758, MsgBus: 18081 2026-01-07T17:49:46.521485Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:46.522484Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:46.588694Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:46.588773Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:46.609036Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:46.609719Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:46.625053Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678345524310002:2081] 1767808186423878 != 1767808186423881 2026-01-07T17:49:46.813023Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:46.833468Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:46.833488Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:46.833495Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:46.833587Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:47.354572Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:47.369457Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:47.387204Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.470572Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.486988Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:47.621107Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.725098Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.187497Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362704181060:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.187572Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.187812Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362704181069:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.187841Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.303082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.340316Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.377674Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.416202Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.464869Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.503693Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.546023Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.594359Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.702072Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362704181942:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.702159Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.702528Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362704181947:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.702566Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678362704181948:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.702676Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:50.706054Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:50.720197Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678362704181951:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:50.817846Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678362704182002:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp+UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> KqpTx::BeginTransactionBadMode [GOOD] >> KqpTx::CommitPrepared >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> KqpSinkMvcc::UpdateColumns+IsOlap [GOOD] >> KqpSinkMvcc::UpdateColumns-IsOlap >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap [GOOD] >> KqpSinkMvcc::DirtyReads+IsOlap [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture+IsOlap >> KqpSinkLocks::OlapUpdateLocksTwoShards [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShard >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> KqpLocks::MixedTxFail-useSink [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOltp [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOlap >> KqpSinkLocks::UpdateLocksOneShardRowExists [GOOD] >> KqpTx::EmptyTxOnCommit [GOOD] >> KqpTx::CommitStats >> KqpSinkTx::OlapInvalidateOnError [GOOD] >> KqpSinkTx::OlapInteractive >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 22890, MsgBus: 14269 2026-01-07T17:49:32.212112Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678288628382259:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:32.212320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:32.765615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:32.765768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:32.773289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:32.810570Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678288628382125:2081] 1767808172186053 != 1767808172186056 2026-01-07T17:49:32.812511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:32.825073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:33.066301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:33.080206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:33.080239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:33.080246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:33.080647Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:33.221140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:33.579861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:33.589824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:33.634062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.818116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.970395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.036026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.163893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678305808253189:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.164000Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.164580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678305808253199:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.164654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.455636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.488400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.518336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.553238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.589841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.672107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.714312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.766021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.849657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678305808254068:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.849731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.850118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678305808254072:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.850183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678305808254074:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.850213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.854268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:36.867610Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678305808254077:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:36.975042Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678305808254128:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:37.213066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678288628382259:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:37.213122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:41.409413Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODcyOGVmOTItYTU4MTVhNjgtZWUxMGIzNzItZTN ... action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:49.956675Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:50.058276Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:50.065002Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:50.082820Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:50.082915Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:50.113388Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:50.193857Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:50.193878Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:50.193886Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:50.193985Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:50.242869Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:50.671034Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:50.681715Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:50.687483Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.770251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.956256Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:50.987690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:51.095698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:53.777092Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678377843403514:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.777204Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.781047Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678377843403524:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.781125Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.855141Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:53.902127Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:53.946493Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:53.989784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.026004Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.135625Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.179388Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.253497Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.367917Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678382138371692:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.368015Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.368418Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678382138371697:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.368459Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678382138371698:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.368490Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.373145Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:54.390848Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678382138371701:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:54.444336Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678382138371752:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:54.953048Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678360663532702:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:54.953140Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:57.499236Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZjMwZWYzZDEtYzk0OTg3MzQtZDRiZWU4MzktYTM1ZmM3Y2Y=, ActorId: [3:7592678390728306632:2523], ActorState: ExecuteState, LegacyTraceId: 01kecs8rrr57n6w5kjq6fzkg9j, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TwoShard`" issue_code: 2001 severity: 1 } trace_id# |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 3526, MsgBus: 5511 2026-01-07T17:49:26.466478Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678260582081824:2239];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.466548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.877047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.939996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.940115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:27.037105Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.040740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.049001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.193679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.193728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.193747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.193846Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.468536Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.949513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:28.024771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.157964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.320097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.412735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.772102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678273466985353:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.772300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.772605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678273466985363:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.772671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.044816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.074826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.100506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.126867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.155099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.201541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.244894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.308285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.445055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678277761953538:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.445141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.445534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678277761953543:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.445570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678277761953544:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.445598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.453002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:30.469136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678277761953547:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:30.544899Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678277761953598:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:31.467327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678260582081824:2239];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.467401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.297444Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678290646855839:2533], SessionActorId: [1:7592678286351888490:2533], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7592678286351888490:2533]. 2026-01-07T17:49:33.297691Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZDA2YTk2MjctY2E0ZmY3ZGEtMWQ3NDM2YmEtNWM0NWNhODc=, ActorId: [1:7592678286351888490:2533], ActorState: ExecuteState, Le ... 17:49:49.919962Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:50.032101Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:50.034489Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:50.041669Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:50.041768Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:50.065802Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:50.217535Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:50.217560Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:50.217572Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:50.217649Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:50.259147Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:50.778353Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:50.791785Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:50.807916Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.876256Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.989666Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:51.057347Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:51.135723Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:53.714342Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678378060968543:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.714433Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.714854Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678378060968553:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.714906Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.838845Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:53.940549Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.009314Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.082105Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.117308Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.156780Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.201607Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.255225Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.351585Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678382355936724:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.351658Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.351952Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678382355936730:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.351952Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678382355936729:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.351995Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.355383Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:54.369218Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678382355936733:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:54.448007Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678382355936784:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:54.903312Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678360881097536:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:54.903382Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:57.145384Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=ZmUyZDJhYjEtMTRmYTg1MTMtNTdlMTc2YTEtOTdlMzA5ZTM=, ActorId: [4:7592678390945871696:2533], ActorState: ExecuteState, LegacyTraceId: 01kecs8r9s3xmndc5xs7v4hf1k, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate >> KqpSinkTx::DeferredEffects [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit+IsOlap [GOOD] >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28015, MsgBus: 62210 2026-01-07T17:49:26.496273Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678262060968907:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.496855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.909589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.958831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.958938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:27.055550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.115940Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.120790Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678262060968803:2081] 1767808166431712 != 1767808166431715 2026-01-07T17:49:27.187446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.199113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.199132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.199149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.199237Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.509095Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.847791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.853374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.788169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274945871583:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.788186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274945871575:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.788297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.792439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274945871590:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.792538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.792570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.803395Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678274945871589:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.935584Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678274945871642:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.197558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.312681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.346857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.641822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678262060968907:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:32.022432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21725, MsgBus: 19546 2026-01-07T17:49:34.180373Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678294694302494:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:34.180613Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:34.250807Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:34.350811Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678294694302272:2081] 1767808174171071 != 1767808174171074 2026-01-07T17:49:34.358821Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.365369Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:34.365438Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:34.375310Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:34.469466Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:34.469487Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:34.469496Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:34.469564Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:34.499085Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:34.946282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:34.953187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:35.181934Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:37.488905Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678307579205033:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.488992Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678307579205053:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.489037Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.492885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:37.493017Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678307579205058:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.493138Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_ ... 6;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.363138Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.363198Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.363218Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.369962Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.370017Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.370030Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.371408Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.371455Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.371471Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.377841Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.377900Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.377913Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.380590Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.380652Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.380669Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.387059Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.387118Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:45.387140Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; Trying to start YDB, gRPC: 18747, MsgBus: 22666 2026-01-07T17:49:50.083321Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678365433611738:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:50.083363Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:50.109538Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:50.208841Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:50.217222Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:50.223161Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:50.229671Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:50.261922Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:50.405588Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:50.405610Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:50.405618Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:50.405700Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:50.428770Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:50.940380Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:50.945834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:51.102071Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:53.940808Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678378318514455:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.940918Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.941420Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678378318514467:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.941472Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678378318514468:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.941765Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:53.946020Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:53.963422Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678378318514471:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:54.043771Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678382613481818:2530] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:54.107515Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.166186Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:55.085153Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678365433611738:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:55.085312Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:55.347875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22157, MsgBus: 23315 2026-01-07T17:49:26.383185Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678261315439014:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.383238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.455683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:26.831324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.840418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.840525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.904860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.986186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.991142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678261315438982:2081] 1767808166370814 != 1767808166370817 2026-01-07T17:49:27.124022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.173462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.173481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.173494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.173547Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.425187Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.913244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.978767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.120178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.264454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:28.332782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:29.668720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274200342745:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.668854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.669184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274200342755:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.669250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.040581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.075070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.099155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.132127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.165368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.198220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.241941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.324755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.433301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678278495310924:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.433428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.435495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678278495310930:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.435565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678278495310929:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.435699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.439247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:30.458616Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678278495310933:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:30.530797Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678278495310986:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:31.389382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678261315439014:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.389473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.213197Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678291380213218:2534], Sessi ... thod=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.705368Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.705383Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.714487Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.714543Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.714559Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.714615Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.714668Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.714684Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.723719Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.723730Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.723784Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.723804Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.724035Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.724051Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.733061Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.733124Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.733143Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.733199Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.733245Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.733261Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.742376Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.742376Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.742425Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.742425Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.742443Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.742443Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.751575Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.751638Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.751656Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.755434Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.755493Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.755509Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.761467Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.761528Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.761544Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.763508Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.763548Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.763563Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.769349Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.769406Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.769422Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.772117Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.772158Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:56.772173Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-07T17:49:58.015624Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=YmJiY2E4NGYtOTgxZDk4MzktYTNkM2IwMTEtMzVkYzI2M2Y=, ActorId: [4:7592678395493865099:2690], ActorState: ExecuteState, LegacyTraceId: 01kecs8s8n6kh456b7f689qqed, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/DataShard`" issue_code: 2001 severity: 1 } trace_id# 2026-01-07T17:49:58.022543Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976710670; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UpdateLocksOneShardRowExists [GOOD] Test command err: Trying to start YDB, gRPC: 11147, MsgBus: 26463 2026-01-07T17:49:26.504455Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678259502915215:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.504628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.592582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:26.983230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.983330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.984549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.036144Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.037107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:27.044488Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678259502914988:2081] 1767808166478029 != 1767808166478032 2026-01-07T17:49:27.171772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.171793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.171804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.171910Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.219676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.505028Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.957708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.967968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.760714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272387817766:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.760714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272387817774:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.760830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.761092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272387817781:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.761142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.764310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.775058Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678272387817780:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.887891Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678272387817834:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.112216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.209607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.271632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.899771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678259502915215:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.986722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13913, MsgBus: 20958 2026-01-07T17:49:34.220780Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678294692047001:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:34.225175Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:34.344982Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:34.363765Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678294692046761:2081] 1767808174162198 != 1767808174162201 2026-01-07T17:49:34.378887Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.381767Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:34.381825Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:34.391039Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:34.554531Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:34.554553Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:34.554561Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:34.554630Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:34.634150Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:35.045766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:35.069418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:35.223924Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:37.691928Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678307576949507:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.692117Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.692670Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678307576949543:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.703871Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:37.705058Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678307576949547:2328], Data ... is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7592678354008156374:2962]. 2026-01-07T17:49:48.608152Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ZjVlYjJiNjctYTJlNTgwMmEtOGFlNDQ5YTItNmZiMDQ0ZDg=, ActorId: [3:7592678354008156374:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs8g2nas1t0znf43mv141w, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592678354008156428:2962] from: [3:7592678354008156393:2962] trace_id# 2026-01-07T17:49:48.608238Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592678354008156428:2962] TxId: 281474976715666. Ctx: { TraceId: 01kecs8g2nas1t0znf43mv141w, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZjVlYjJiNjctYTJlNTgwMmEtOGFlNDQ5YTItNmZiMDQ0ZDg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:49:48.608429Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZjVlYjJiNjctYTJlNTgwMmEtOGFlNDQ5YTItNmZiMDQ0ZDg=, ActorId: [3:7592678354008156374:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs8g2nas1t0znf43mv141w, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 65079, MsgBus: 13991 2026-01-07T17:49:50.094283Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678364798294488:2242];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:50.094332Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:50.152670Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:50.247748Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:50.247827Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:50.273021Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678364798294259:2081] 1767808190047042 != 1767808190047045 2026-01-07T17:49:50.280828Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:50.288736Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:50.447306Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:50.479882Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:50.479905Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:50.479918Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:50.479997Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:51.065768Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:51.081253Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:51.097427Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:54.106386Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678381978164333:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.106493Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.107716Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678381978164345:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.107772Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678381978164346:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.108053Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:54.112097Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:54.128052Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678381978164349:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:54.207786Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678381978164402:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:54.304177Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.373425Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:55.354224Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678364798294488:2242];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:55.357765Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:55.772954Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.902884Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2026-01-07T17:49:57.903142Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037889 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-07T17:49:57.906947Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037889 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-07T17:49:57.907196Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [4:7592678394863074293:2964], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [4:7592678394863074268:2964]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[4:7592678394863074293:2964].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-07T17:49:57.907297Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7592678394863074287:2964], SessionActorId: [4:7592678394863074268:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7592678394863074268:2964]. 2026-01-07T17:49:57.907515Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=ODc3ZjZmZjAtNjJhN2RlZjgtNzg3ZWQ0YjUtZWE5ZGQ4YmY=, ActorId: [4:7592678394863074268:2964], ActorState: ExecuteState, LegacyTraceId: 01kecs8s66041gackadvma7pf9, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7592678394863074327:2964] from: [4:7592678394863074287:2964] trace_id# 2026-01-07T17:49:57.907600Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7592678394863074327:2964] TxId: 281474976710666. Ctx: { TraceId: 01kecs8s66041gackadvma7pf9, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ODc3ZjZmZjAtNjJhN2RlZjgtNzg3ZWQ0YjUtZWE5ZGQ4YmY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:49:57.907826Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=ODc3ZjZmZjAtNjJhN2RlZjgtNzg3ZWQ0YjUtZWE5ZGQ4YmY=, ActorId: [4:7592678394863074268:2964], ActorState: ExecuteState, LegacyTraceId: 01kecs8s66041gackadvma7pf9, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] Test command err: 2026-01-07T17:48:18.521185Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677966878583444:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:18.521263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:18.565602Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:48:18.566569Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677968584560456:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:48:18.566765Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:48:18.573933Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:48:18.723842Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:18.740436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:48:18.765559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:18.765656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:18.766772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:48:18.766860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:48:18.802756Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:48:18.803559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:18.804141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:48:18.825456Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:18.888953Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:48:18.923504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003671/r3tmp/yandexyI5wrt.tmp 2026-01-07T17:48:18.923619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003671/r3tmp/yandexyI5wrt.tmp 2026-01-07T17:48:18.923853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003671/r3tmp/yandexyI5wrt.tmp 2026-01-07T17:48:18.923996Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:48:18.954221Z INFO: TTestServer started on Port 30415 GrpcPort 23875 2026-01-07T17:48:18.993584Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:23875 2026-01-07T17:48:19.101658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:48:19.158885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:19.533964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:19.570765Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:48:21.245868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677979763486706:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:21.245955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677979763486714:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:21.246036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:21.246849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677979763486722:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:21.246938Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:48:21.250464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:48:21.265532Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677979763486721:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:48:21.459063Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677979763486810:3051] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:48:21.487330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:48:21.494912Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592677979763486820:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:48:21.495367Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Zjk4YmFiMjItNDUxNWFkZTMtYmQ1YmUwZjEtNzRkYjBjNTE=, ActorId: [1:7592677979763486703:2329], ActorState: ExecuteState, LegacyTraceId: 01kecs5tvwcm86msdrbcgv2hct, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:48:21.497259Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592677981469462651:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:48:21.497489Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:48:21.497627Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ODljMzdhNjktZmRjZjQ0MjYtNDJjZDUwMS03YmY5YzljNQ==, ActorId: [2:7592677981469462627:2304], ActorState: ExecuteState, LegacyTraceId: 01kecs5ty72mp71c3xp4x9qxxd, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:48:21.497986Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' be ... O `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (3059208687299993691, "Root", "00595F536F757263655F37", 1767808195065, 1767808195065, 0, 13); 2026-01-07T17:49:55.326572Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-07T17:49:55.326603Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-07T17:49:55.326616Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-07T17:49:55.326642Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:116: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2026-01-07T17:49:55.326793Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7592678385955166851:4258], Recipient [11:7592678355890394710:3619]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [11:7592678385955166850:4258] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-07T17:49:55.326891Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [11:7592678385955166850:4258], Recipient [11:7592678355890394710:3619]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "Y_Source_7" 2026-01-07T17:49:55.326969Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:139: StateOwnershipFast, received event# 271188558, Sender [11:7592678355890394710:3619], Recipient [11:7592678385955166850:4258]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-07T17:49:55.327003Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:88: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) InitTable: SourceId=Y_Source_7 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2026-01-07T17:49:55.327073Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7592678385955166850:4258], Recipient [11:7592678355890394710:3619]: NActors::TEvents::TEvPoison 2026-01-07T17:49:55.327589Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:101: StateInitTable, received event# 277020685, Sender [11:7592678304350784827:2073], Recipient [11:7592678385955166850:4258]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2026-01-07T17:49:55.327621Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) StartKqpSession 2026-01-07T17:49:55.333128Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:132: StateCreateKqpSession, received event# 271646728, Sender [11:7592678304350785030:2254], Recipient [11:7592678385955166850:4258]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=11&id=Nzk1M2I3YjEtMzM2NGIzMDMtYzE4MWYzNy02NDdlMzc4YQ==" NodeId: 11 } YdbStatus: SUCCESS ResourceExhausted: false 2026-01-07T17:49:55.333184Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Select from the table 2026-01-07T17:49:55.681463Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:163: StateSelect, received event# 271646721, Sender [11:7592678304350785030:2254], Recipient [11:7592678385955166850:4258]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=11&id=Nzk1M2I3YjEtMzM2NGIzMDMtYzE4MWYzNy02NDdlMzc4YQ==" PreparedQuery: "46a1d01b-4d2b1d65-7c7097cc-5384bca8" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01kecs8pyz32rtnwy0haegaxad" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1767808195065 } items { uint64_value: 1767808195065 } items { uint64_value: 13 } } format: FORMAT_VALUE } QueryDiagnostics: "" } YdbStatus: SUCCESS 2026-01-07T17:49:55.681716Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:151: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2026-01-07T17:49:55.681760Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:151: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOldSeqNo 2026-01-07T17:49:55.681975Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7592678385955166901:4258], Recipient [11:7592678355890394709:3618]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1000 Status: OK ServerId: [11:7592678385955166850:4258] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-07T17:49:55.682103Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271187968, Sender [11:7592678385955166850:4258], Recipient [11:7592678355890394709:3618]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000Y_Source_7" } PipeClient { RawX1: 7592678385955166901 RawX2: 47244644514 } } 2026-01-07T17:49:55.682283Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:209: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) OnPartitionChosen 2026-01-07T17:49:55.682408Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7592678385955166850:4258], Recipient [11:7592678355890394709:3618]: NActors::TEvents::TEvPoison 2026-01-07T17:49:55.682464Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7592678385955166902:4258], Recipient [11:7592678355890394711:3620]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1002 Status: OK ServerId: [11:7592678385955166850:4258] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-07T17:49:55.682521Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [11:7592678385955166850:4258], Recipient [11:7592678355890394711:3620]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 2 2026-01-07T17:49:55.682608Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:240: StateCheckPartition, received event# 271188558, Sender [11:7592678355890394711:3620], Recipient [11:7592678385955166850:4258]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-07T17:49:55.682651Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Update the table 2026-01-07T17:49:55.682929Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7592678385955166850:4258], Recipient [11:7592678355890394711:3620]: NActors::TEvents::TEvPoison Received TEvChooseResult: 2 2026-01-07T17:49:55.867472Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:212: StateUpdate, received event# 271646721, Sender [11:7592678304350785030:2254], Recipient [11:7592678385955166850:4258]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=11&id=Nzk1M2I3YjEtMzM2NGIzMDMtYzE4MWYzNy02NDdlMzc4YQ==" PreparedQuery: "5a0510bb-f6e44e69-377f514e-84e835d7" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 86 2026-01-07T17:49:55.867524Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-07T17:49:55.867559Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) ReplyResult: Partition=2, SeqNo=157 2026-01-07T17:49:55.867584Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [11:7592678385955166850:4258] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 3059208687299993691 AND Topic = "Root" AND ProducerId = "00595F536F757263655F37" 2026-01-07T17:49:57.372320Z node 11 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [11:7592678394545101672:2717] TxId: 281474976710708. Ctx: { TraceId: 01kecs8rgg5g450mcg7nm7vv39, Database: /Root, SessionId: ydb://session/3?node_id=11&id=NTM2YjRlYzgtZGMzNzdmOS1kYjIwM2QxNS0xZWRkMjM2Nw==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 trace_id# 2026-01-07T17:49:57.372477Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [11:7592678394545101676:2717], TxId: 281474976710708, task: 2. Ctx: { TraceId : 01kecs8rgg5g450mcg7nm7vv39. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=11&id=NTM2YjRlYzgtZGMzNzdmOS1kYjIwM2QxNS0xZWRkMjM2Nw==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7592678394545101672:2717], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::TestReadOnly-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 2662, MsgBus: 9587 2026-01-07T17:49:30.005496Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678279798736019:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:30.005621Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:30.241056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:30.248046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:30.248219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:30.271068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:30.390638Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:30.393497Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678275503768696:2081] 1767808170004127 != 1767808170004130 2026-01-07T17:49:30.465743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:30.613643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:30.613670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:30.613676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:30.613762Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:31.015130Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:31.101790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:31.109648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:31.179381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.365085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.559555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.695679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.686295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678292683639758:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.686447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.686732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678292683639768:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.686781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.002821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.035361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.075411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.105189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.193803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.241470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.290726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.356852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.463857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678296978607946:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.463947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.464520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678296978607951:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.464561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678296978607952:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.464666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.469250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:34.489219Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678296978607955:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:34.586832Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678296978608006:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:35.009287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678279798736019:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:35.009355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15614, MsgBus: 22406 2026-01-07T17:49:38.326386Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeS ... :49:48.870520Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:48.870600Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:48.875623Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:48.880713Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678355876050521:2081] 1767808188679755 != 1767808188679758 2026-01-07T17:49:48.902150Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:49.040709Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:49.040727Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:49.040734Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:49.040808Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:49.061783Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:49.542468Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:49.557946Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.625323Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.729934Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:49.771334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.840611Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.333056Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678373055921564:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.333152Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.333436Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678373055921574:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.333473Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.402842Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.445392Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.482835Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.532364Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.581552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.632334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.716286Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.770665Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.870732Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678373055922456:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.870845Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.871249Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678373055922461:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.871303Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678373055922462:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.871422Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:52.875634Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:52.892089Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678373055922465:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:52.991360Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678373055922516:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:53.684190Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678355876050590:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:53.684282Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:54.830346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.880257Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:54.935163Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] Test command err: Trying to start YDB, gRPC: 2877, MsgBus: 15110 2026-01-07T17:49:26.482124Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678260043200612:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.482172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.808244Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.898371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.898490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.953818Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678260043200506:2081] 1767808166479513 != 1767808166479516 2026-01-07T17:49:26.985780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.996143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.144961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.173137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.173153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.173164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.173225Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.486362Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.928440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.939789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.792283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272928103283:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.792289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272928103288:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.792355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.792651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272928103298:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.792691Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.795968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.806528Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678272928103297:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.944296Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678272928103350:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.209422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:30.394024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.394139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.394296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.394503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.394593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.394698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.394798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.394805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.394941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.394954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.395059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.395086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.395148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.395194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.395262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.395310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.395377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.395418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:30.395470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.395518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678277223070808:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:30.395577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592678277223070813:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.395635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_i ... te_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228140Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228163Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228237Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=YTFmYjEzNjMtMzZkOThhZTktMzY4NGUxYTgtZGYzNzEzMjQ=, ActorId: [2:7592678360265730576:2965], ActorState: ExecuteState, LegacyTraceId: 01kecs8hpy4cdpvm7929j21gr1, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7592678364560698485:2965] from: [2:7592678364560698484:2965] trace_id# 2026-01-07T17:49:50.228253Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228265Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228286Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228365Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:7592678364560698485:2965] TxId: 281474976710668. Ctx: { TraceId: 01kecs8hpy4cdpvm7929j21gr1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YTFmYjEzNjMtMzZkOThhZTktMzY4NGUxYTgtZGYzNzEzMjQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:49:50.228450Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228469Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228481Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.228734Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YTFmYjEzNjMtMzZkOThhZTktMzY4NGUxYTgtZGYzNzEzMjQ=, ActorId: [2:7592678360265730576:2965], ActorState: ExecuteState, LegacyTraceId: 01kecs8hpy4cdpvm7929j21gr1, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:49:50.229168Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.229220Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.229234Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.229245Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.229262Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.229316Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.229328Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 2026-01-07T17:49:50.229339Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7592678364560698484:2965], SessionActorId: [2:7592678360265730576:2965], StateRollback: unknown message 278003713 Trying to start YDB, gRPC: 7154, MsgBus: 5490 2026-01-07T17:49:51.831427Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678367017241096:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:51.832880Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:51.886316Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:52.014264Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:52.014355Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:52.022424Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:52.054195Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:52.136152Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:52.136174Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:52.136198Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:52.136283Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:52.167411Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:52.698752Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:52.709113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:52.847805Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:55.988117Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678384197111117:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.988190Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678384197111107:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.988332Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.991210Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678384197111122:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.991299Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.997178Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:56.012500Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678384197111121:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:56.084398Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678388492078470:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:56.150467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:56.200295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.093518Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678367017241096:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:57.095893Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:57.456576Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpRollback::DoubleUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdate >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> KqpSinkMvcc::WriteSkewInsert-IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewReplace+IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] Test command err: Trying to start YDB, gRPC: 3052, MsgBus: 17113 2026-01-07T17:49:26.389818Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678261631399056:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.390257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.669439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.710659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.710756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.784727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.837839Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678261631398953:2081] 1767808166371380 != 1767808166371383 2026-01-07T17:49:26.840137Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.885005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.172068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.172088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.172102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.172160Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.396719Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.895611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.921813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.553617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274516301727:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.553618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274516301737:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.553709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.553945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274516301743:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.553985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.558116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.570268Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678274516301742:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.709371Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678274516301795:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.112135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.206077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.253257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.548012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678261631399056:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:32.040273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13717, MsgBus: 24705 2026-01-07T17:49:34.747746Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678295510396924:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:34.748500Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:34.801132Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:34.894261Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:34.894340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:34.903824Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678295510396861:2081] 1767808174707132 != 1767808174707135 2026-01-07T17:49:34.910539Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.923345Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:34.968045Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:35.136139Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:35.136161Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:35.136167Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:35.136235Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:35.534951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:35.662123Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:35.677757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:35.746977Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:38.469266Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312690266907:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.469371Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.471227Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312690266942:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.471328Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678312690266944:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:38.471412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.358117Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.361669Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:46.375096Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678348189408965:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:46.430491Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678348189409016:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:46.483622Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.524151Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:47.594932Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.251209Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678331009538911:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:48.322552Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:51.480967Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592678369664253544:2962], SessionActorId: [3:7592678356779351573:2962], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[3:7592678356779351573:2962]. 2026-01-07T17:49:51.481180Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ZDUwM2E3YjUtY2Q2M2I1ZmEtMmEwMWFkZDQtNDYxNDc5NDM=, ActorId: [3:7592678356779351573:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs8jqaaapcq31wvyzzq4h1, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592678369664253545:2962] from: [3:7592678369664253544:2962] trace_id# 2026-01-07T17:49:51.481249Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592678369664253545:2962] TxId: 281474976710666. Ctx: { TraceId: 01kecs8jqaaapcq31wvyzzq4h1, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDUwM2E3YjUtY2Q2M2I1ZmEtMmEwMWFkZDQtNDYxNDc5NDM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } trace_id# 2026-01-07T17:49:51.481464Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZDUwM2E3YjUtY2Q2M2I1ZmEtMmEwMWFkZDQtNDYxNDc5NDM=, ActorId: [3:7592678356779351573:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs8jqaaapcq31wvyzzq4h1, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 6357, MsgBus: 21486 2026-01-07T17:49:52.898148Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678373770660417:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:52.898361Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:52.993364Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:53.013873Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:53.017143Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678373770660285:2081] 1767808192891452 != 1767808192891455 2026-01-07T17:49:53.033920Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:53.034014Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:53.067076Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:53.249770Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:53.249793Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:53.249801Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:53.249898Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:53.263883Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:53.733800Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:53.905379Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:56.866980Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678390950530348:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.867090Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.872042Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678390950530360:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.872123Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678390950530361:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.872297Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.875423Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:56.888489Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678390950530364:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:56.961111Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678390950530417:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:57.035484Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.109726Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:58.112811Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678373770660417:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:58.132026Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:58.371458Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> KqpTx::CommitPrepared [GOOD] >> KqpSinkTx::OlapExplicitTcl [GOOD] >> KqpSinkTx::LocksAbortOnCommit >> KqpTx::CommitStats [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot+StreamIndex [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex >> KqpSinkMvcc::UpdateColumns-IsOlap [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 6735, MsgBus: 15597 2026-01-07T17:49:33.631952Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678289557878856:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:33.632021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:33.890645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:33.890760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:33.924692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:33.940102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:33.971506Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678289557878707:2081] 1767808173596646 != 1767808173596649 2026-01-07T17:49:33.977507Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.151312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:34.151332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:34.151339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:34.151410Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:34.178426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:34.638315Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:34.815058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:34.914143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.110053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.308648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.388786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.194486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678306737749762:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.194594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.194942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678306737749772:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.195007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.503923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.539313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.580807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.616980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.660815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.713357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.771548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.822627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.917443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678306737750645:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.917519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.917894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678306737750650:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.917932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678306737750651:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.918050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.921480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:37.934536Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678306737750654:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:38.033763Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678311032718001:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:38.630890Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678289557878856:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:38.630950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:48.821176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:49:48.821208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:50.506011Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:7592678362572326190:2656], TxId: 28147 ... ATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11563, MsgBus: 27006 2026-01-07T17:49:58.487671Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678399319434649:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:58.487726Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:58.513010Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:58.602878Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:58.609107Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678399319434622:2081] 1767808198486502 != 1767808198486505 2026-01-07T17:49:58.633938Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:58.634025Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:58.636327Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:58.683188Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:58.683218Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:58.683227Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:58.683311Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:58.758518Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:59.212429Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:59.226050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:59.242413Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:59.297850Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:59.437719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:59.550913Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:59.578695Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.013087Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678416499305680:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.013206Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.013544Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678416499305690:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.013612Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.086953Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.114141Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.144237Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.170385Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.197131Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.225636Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.254245Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.293628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.373898Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678416499306557:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.373992Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.374112Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678416499306562:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.374185Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678416499306564:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.374253Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.378101Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:02.388413Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678416499306566:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:02.464117Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678416499306617:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:03.487943Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678399319434649:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:03.488006Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpSinkLocks::EmptyRangeOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 23554, MsgBus: 7423 2026-01-07T17:49:44.964548Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678339512974884:2233];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:44.964595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:45.229006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:45.257111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:45.257243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:45.326796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:45.332242Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:45.333236Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678339512974678:2081] 1767808184953378 != 1767808184953381 2026-01-07T17:49:45.421601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:45.421620Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:45.421647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:45.421722Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:45.514674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:45.974693Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:46.020258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:46.029815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:46.096549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.281047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.450602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.527355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.367226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678356692845741:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:48.367359Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:48.367738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678356692845751:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:48.367817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:48.674692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.726588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.775235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.817769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.858407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.901627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.944830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.008441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.111270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678360987813927:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.111370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.111595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678360987813932:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.111702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678360987813934:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.111745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.114621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:49.123717Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678360987813936:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:49.209445Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678360987813987:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:49.962236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678339512974884:2233];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:49.962304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25789, MsgBus: 15660 2026-01-07T17:49:52.287634Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActo ... accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:57.297035Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678373418388471:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:57.297112Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16399, MsgBus: 7632 2026-01-07T17:49:59.446267Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678401843301385:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:59.446419Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:59.465313Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:59.552043Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:59.572578Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:59.572661Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:59.574650Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:59.621747Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:59.621771Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:59.621780Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:59.621879Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:59.672353Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:00.038091Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:00.063691Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:00.158916Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:00.317160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:00.400484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:00.542750Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:02.733978Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678414728204994:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.734080Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.734323Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678414728205004:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.734366Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.793294Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.820281Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.857812Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.889038Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.912442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.946195Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.976260Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:03.020856Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:03.090608Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678419023173172:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:03.090693Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:03.090736Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678419023173177:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:03.090919Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678419023173179:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:03.090980Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:03.094408Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:03.104095Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678419023173180:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:03.194399Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678419023173232:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:04.443180Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678401843301385:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:04.443254Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:46:27.742517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:46:27.742594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:27.742620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:46:27.742645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:46:27.742675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:46:27.742699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:46:27.742753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:46:27.742818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:46:27.743519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:46:27.743764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:46:27.819526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:46:27.819583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:27.833245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:46:27.833556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:46:27.833750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:46:27.839866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:46:27.840218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:46:27.840869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:46:27.841113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:46:27.845238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:27.845468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:46:27.846636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:46:27.846688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:46:27.846798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:46:27.846840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:46:27.846886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:46:27.847079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:46:27.965325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:46:27.966759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 10:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T17:50:05.662909Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-07T17:50:05.663529Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:50:05.663599Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:50:05.663826Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:50:05.663896Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:50:05.663972Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:50:05.664030Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:50:05.664107Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:50:05.664193Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:50:05.664269Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:50:05.664329Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:50:05.664615Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:50:05.664701Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:50:05.664769Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-07T17:50:05.664820Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-07T17:50:05.666215Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:50:05.666322Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:50:05.666380Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:50:05.666456Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T17:50:05.666525Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:50:05.667742Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:50:05.667844Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:50:05.667881Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:50:05.667920Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-07T17:50:05.667961Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:50:05.668052Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:50:05.672852Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:50:05.672988Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:50:05.673314Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:50:05.673400Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:50:05.674072Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:50:05.674229Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:50:05.674304Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:887:2839] TestWaitNotification: OK eventTxId 102 2026-01-07T17:50:05.675027Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:05.675374Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 397us result status StatusSuccess 2026-01-07T17:50:05.676066Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:05.676970Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:50:05.677248Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 339us result status StatusSuccess 2026-01-07T17:50:05.677889Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:05.806567Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__find_subdomain_path_id.cpp:20: FindTabletSubDomainPathId for tablet 72075186233409546 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite >> TColumnShardTestReadWrite::RebootWriteRead >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> EvWrite::WriteInTransaction >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::UpdateColumns-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 9056, MsgBus: 29184 2026-01-07T17:49:26.386314Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678259874518540:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.386531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.693117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.711368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.711490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.761303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.933031Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678259874518442:2081] 1767808166373102 != 1767808166373105 2026-01-07T17:49:26.948558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:26.950033Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.173457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.173480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.173489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.173542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.389054Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.897836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.918058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.641602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272759421215:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.641612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272759421227:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.641687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.641966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272759421230:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.642022Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.645097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.653385Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678272759421229:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.767729Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678272759421282:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.126254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:30.317250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.317429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.317634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.317777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.317869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.317976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.318057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.318131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.318229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.318310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.318409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:30.318483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:30.318558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277054388750:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:30.321383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678277054388756:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.321424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678277054388756:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.321591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678277054388756:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.321683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678277054388756:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.321792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678277054388756:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.321881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678277054388756:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.321951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678277054388756:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.322040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678277054388756:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.322123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... 2026-01-07T17:49:56.029649Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.029657Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.029671Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.029693Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.037823Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.037900Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.037916Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.043593Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.043652Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.043670Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.045685Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.045738Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.045751Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.053805Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.053873Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.053891Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.054784Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.054818Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.054853Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.071581Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.071645Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:49:56.071663Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; Trying to start YDB, gRPC: 64241, MsgBus: 5737 2026-01-07T17:49:59.015592Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:59.015662Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:59.038067Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:59.038156Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:59.073743Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:59.087529Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:59.099573Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678399149154872:2081] 1767808198829033 != 1767808198829036 2026-01-07T17:49:59.180672Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:59.180693Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:59.180701Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:59.180782Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:59.279113Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:59.636578Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:59.909066Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:02.597790Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678416329024938:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.597791Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678416329024951:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.597858Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.598085Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678416329024962:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.598164Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:02.601234Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:02.610805Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678416329024961:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:50:02.701817Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678416329025014:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:02.763154Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:02.826450Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:03.857659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert+UseBulkUpsert [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert >> EvWrite::WriteInTransaction [GOOD] >> EvWrite::WriteWithLock |94.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 15883, MsgBus: 11011 2026-01-07T17:49:26.397375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678262752769784:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.397412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.707974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.708079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.787407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.790262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.904767Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.986522Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.175066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.175085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.175113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.175206Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.413071Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.874286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.474805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275637672429:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.474813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275637672407:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.474920Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.475157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275637672447:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.475220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.477647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.485218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678275637672446:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.601320Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678275637672499:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.119673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:30.315467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.315723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.315937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.316029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.316101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.316185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.316263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.316370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.316455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.316540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.316622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:30.316697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:30.316770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678279932639933:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:30.319179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.319247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.319390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.319475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.319547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.319640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.319715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.319783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.319853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.319919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279932639939:2340];tablet_id=72075186224037893;process=TTxIni ... 3165Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678387150848992:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.633365Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.633854Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678387150849020:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.633863Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678387150849019:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.633930Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.636180Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678387150849024:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.636277Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.638059Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:55.654148Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678387150849023:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:55.737411Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678387150849076:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:55.795680Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:55.886782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:56.757822Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678369970979137:2237];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:56.798372Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:57.183303Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 63072, MsgBus: 26464 2026-01-07T17:50:00.619285Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678408589949868:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:00.619474Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:00.634169Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:00.719546Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678408589949714:2081] 1767808200604782 != 1767808200604785 2026-01-07T17:50:00.719922Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:00.732300Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:00.732391Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:00.738439Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:00.817006Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:00.817602Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:00.817617Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:00.817629Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:00.817722Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:01.363401Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:01.370321Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:01.625791Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:04.666751Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678425769819788:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.666768Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678425769819770:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.666866Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.667155Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678425769819795:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.667221Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.672370Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:04.682876Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678425769819794:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:50:04.772475Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678425769819847:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:04.838809Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:04.882183Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:05.692253Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678408589949868:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:05.693478Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:05.964561Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 8091, MsgBus: 2287 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:29.185681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:29.303854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:29.325524Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:29.326218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:29.326292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:29.611856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:29.612015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:29.685417Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808166370158 != 1767808166370162 2026-01-07T17:49:29.700460Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:29.746508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:29.873526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:30.204568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:30.204634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:30.204686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:30.205338Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:30.217549Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:30.612393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:30.725591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.043915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.464019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.790220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.678004Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1907:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.678341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.679305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1980:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.679418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.725533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.931665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.255739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.557693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.801169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.089484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.392837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.729787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.171913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.172078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.172474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2798:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.172665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4180], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.172910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.178881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:35.391801Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2802:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:35.463572Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2862:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:38.369179Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NzQ0YmJiZDEtOTIyYjJkNjgtNzY2Y2M0NGItYjhhZjk4MTg=, ActorId: [1:3146:4457], ActorState: ExecuteState, LegacyTraceId: 01kecs8564fhefjc77smbe7xxr, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken s ... 1.505205Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000934":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"89,"}}]}}; 2026-01-07T17:50:01.505322Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; 2026-01-07T17:50:01.505391Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000949":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"8,"}}]}}; 2026-01-07T17:50:01.505528Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; 2026-01-07T17:50:01.505674Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000940":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"65,"}}]}}; 2026-01-07T17:50:01.505713Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000941":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"56,"}}]}}; 2026-01-07T17:50:01.505798Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; 2026-01-07T17:50:01.505916Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000940":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"65,"}}]}}; 2026-01-07T17:50:01.506044Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; 2026-01-07T17:50:01.506091Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000946":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"16,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"98,"}}]}}; 2026-01-07T17:50:01.506268Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000946":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"98,"}}]}}; 2026-01-07T17:50:01.506322Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000951":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"40,"}}]}}; 2026-01-07T17:50:01.506430Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000946":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"98,"}}]}}; 2026-01-07T17:50:01.506483Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; 2026-01-07T17:50:01.506553Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; Trying to start YDB, gRPC: 22807, MsgBus: 13295 2026-01-07T17:50:02.648412Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678416045062621:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:02.648464Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:02.660184Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:02.722679Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:02.770073Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:02.770157Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:02.772589Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:02.809781Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:02.809798Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:02.809803Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:02.809856Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:02.817501Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:03.281854Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:03.286701Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:03.675309Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:05.924641Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678428929965366:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:05.924731Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678428929965358:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:05.924881Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:05.925515Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678428929965374:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:05.925647Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:05.929318Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:05.940468Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678428929965372:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:50:06.007105Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678433224932721:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:06.072563Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:06.121043Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:07.175864Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:07.966423Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678416045062621:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:07.991515Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> EvWrite::WriteWithLock [GOOD] >> YdbTableSplit::RenameTablesAndSplit >> YdbTableSplit::SplitByLoadWithReads >> YdbTableSplit::SplitByLoadWithDeletes >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> YdbTableSplit::SplitByLoadWithUpdates >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> YdbTableSplit::MergeByNoLoadAfterSplit |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2026-01-07T17:50:09.614801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:50:09.648350Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:50:09.649802Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:50:09.681730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:50:09.682107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:50:09.682403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:50:09.682564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:50:09.682669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:50:09.682773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:50:09.682904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:50:09.683034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:50:09.683147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:50:09.683250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.683383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:50:09.683516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:50:09.683684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:50:09.727071Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:50:09.727437Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:50:09.727506Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:50:09.727719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:50:09.729849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:50:09.729994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:50:09.730056Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:50:09.730179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:50:09.730332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:50:09.730382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:50:09.730415Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:50:09.730606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:50:09.730675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:50:09.730719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:50:09.730750Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:50:09.730846Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:50:09.730902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:50:09.730964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:50:09.730994Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:50:09.731064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:50:09.731104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:50:09.731146Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:50:09.731241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:50:09.731298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:50:09.731332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:50:09.731586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:50:09.731640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:50:09.731670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:50:09.731791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:50:09.731834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.731864Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.731916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:50:09.731952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:50:09.731984Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:50:09.732028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:50:09.732064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:50:09.732093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:50:09.732273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:50:09.732334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-07T17:50:12.763880Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=54;finished=1; 2026-01-07T17:50:12.763909Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:50:12.763940Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:50:12.764157Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:50:12.764292Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:54;schema=key: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:12.764328Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:50:12.764424Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=54; 2026-01-07T17:50:12.764468Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=54;batch_columns=key,field; 2026-01-07T17:50:12.764590Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:206:2218];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2026-01-07T17:50:12.764701Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:12.764814Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:12.765005Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:12.765154Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:50:12.765253Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:12.765351Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:12.765623Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:207:2219] finished for tablet 9437184 2026-01-07T17:50:12.766176Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:206:2218];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.026},{"events":["l_task_result"],"t":0.16},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.162}],"full":{"a":4092998,"name":"_full_task","f":4092998,"d_finished":0,"c":0,"l":4255384,"d":162386},"events":[{"name":"bootstrap","f":4093276,"d_finished":1278,"c":1,"l":4094554,"d":1278},{"a":4254834,"name":"ack","f":4119336,"d_finished":58531,"c":86,"l":4254757,"d":59081},{"a":4254823,"name":"processing","f":4094723,"d_finished":126215,"c":173,"l":4254761,"d":126776},{"name":"ProduceResults","f":4094064,"d_finished":100681,"c":261,"l":4255073,"d":100681},{"a":4255079,"name":"Finish","f":4255079,"d_finished":0,"c":0,"l":4255384,"d":305},{"name":"task_result","f":4094742,"d_finished":65565,"c":87,"l":4253654,"d":65565}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:12.766256Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:206:2218];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:50:12.766735Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:206:2218];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.026},{"events":["l_task_result"],"t":0.16},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.162}],"full":{"a":4092998,"name":"_full_task","f":4092998,"d_finished":0,"c":0,"l":4255997,"d":162999},"events":[{"name":"bootstrap","f":4093276,"d_finished":1278,"c":1,"l":4094554,"d":1278},{"a":4254834,"name":"ack","f":4119336,"d_finished":58531,"c":86,"l":4254757,"d":59694},{"a":4254823,"name":"processing","f":4094723,"d_finished":126215,"c":173,"l":4254761,"d":127389},{"name":"ProduceResults","f":4094064,"d_finished":100681,"c":261,"l":4255073,"d":100681},{"a":4255079,"name":"Finish","f":4255079,"d_finished":0,"c":0,"l":4255997,"d":918},{"name":"task_result","f":4094742,"d_finished":65565,"c":87,"l":4253654,"d":65565}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:12.766841Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:50:12.601672Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=474480;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=474480;selected_rows=0; 2026-01-07T17:50:12.766904Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:50:12.767058Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture+IsOlap [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::LocksAbortOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 8657, MsgBus: 15000 2026-01-07T17:49:32.478004Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678284852490953:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:32.478312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:32.833018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:32.835650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:32.835770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:32.878234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:32.987482Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:33.070131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:33.109609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:33.109632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:33.109641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:33.109752Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:33.481122Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:33.669094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:33.685846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:36.022387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678302032360762:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.022480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.022688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678302032360775:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.022811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678302032360785:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.022877Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.026716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:36.037481Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678302032360787:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:36.134680Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678302032360838:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:36.425425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:36.609815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:36.610000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:36.610174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:36.610264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:36.610352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:36.610476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:36.610563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:36.610657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:36.610739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:36.610809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:36.610894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:36.611006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:36.611081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678302032361017:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:36.621961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:36.622012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:36.622151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:36.622239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:36.622309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:36.622378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:36.622441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:36.622511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:36.622577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678302032361005:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Restor ... 2026-01-07T17:50:01.028913Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.028968Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.031203Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.031271Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.031326Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.039801Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.039871Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.039887Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.041136Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.041201Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:01.041218Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:03.089014Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:03.089050Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:03.296533Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YjMwMzU0ZTgtN2IyNzJkN2YtNWM2ZmUzNGQtNWViMTU0OWE=, ActorId: [2:7592678413188459823:3547], ActorState: ReadyState, LegacyTraceId: 01kecs8ygk93wmexaeyag962cy, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kecs8wkj5cze8qzvpkc1f7ex" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 23529, MsgBus: 2362 2026-01-07T17:50:05.315231Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678429179498634:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:05.315336Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:05.333730Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:05.418131Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:05.458806Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:05.458911Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:05.468094Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:05.548347Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:05.569584Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:05.569608Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:05.569616Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:05.569692Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:06.063943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:06.327089Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:09.615866Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678446359368576:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:09.615928Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678446359368594:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:09.616006Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:09.619993Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678446359368601:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:09.620084Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:09.622857Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:09.638896Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678446359368600:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:09.698468Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678446359368653:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:09.765031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:09.921351Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:10.671829Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678429179498634:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:10.696491Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:11.050125Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:13.080344Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592678463539246007:2964], SessionActorId: [3:7592678459244278432:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 . sessionActorId=[3:7592678459244278432:2964]. 2026-01-07T17:50:13.080517Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=M2QyY2U5NTItZTVhYWZiNmEtMTljMzAxYzctNzIyMzc5OGY=, ActorId: [3:7592678459244278432:2964], ActorState: ExecuteState, LegacyTraceId: 01kecs982h6j0fr7sr0m4ycb3m, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592678463539246008:2964] from: [3:7592678463539246007:2964] trace_id# 2026-01-07T17:50:13.080598Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592678463539246008:2964] TxId: 281474976715667. Ctx: { TraceId: 01kecs982h6j0fr7sr0m4ycb3m, Database: /Root, SessionId: ydb://session/3?node_id=3&id=M2QyY2U5NTItZTVhYWZiNmEtMTljMzAxYzctNzIyMzc5OGY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } trace_id# 2026-01-07T17:50:13.080838Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=M2QyY2U5NTItZTVhYWZiNmEtMTljMzAxYzctNzIyMzc5OGY=, ActorId: [3:7592678459244278432:2964], ActorState: ExecuteState, LegacyTraceId: 01kecs982h6j0fr7sr0m4ycb3m, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOwnChangesOlap [GOOD] Test command err: Trying to start YDB, gRPC: 10588, MsgBus: 15260 2026-01-07T17:49:44.273233Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678336374559323:2219];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:44.273307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:44.507472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:44.507621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:44.569117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:44.583322Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:44.584913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:44.709665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:44.709684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:44.709711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:44.709819Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:44.817055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:45.219001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:45.225893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:45.277340Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:47.381852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678349259461896:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.381949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678349259461877:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.382169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.384358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678349259461917:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.384453Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.386462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:47.399518Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678349259461916:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:47.532806Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678349259461969:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:47.867938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.013565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.038733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.642585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678336374559323:2219];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:49.679843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20122, MsgBus: 9406 2026-01-07T17:49:51.814631Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678367995454720:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:51.814683Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:51.823916Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:51.888022Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:51.889744Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678367995454693:2081] 1767808191813807 != 1767808191813810 2026-01-07T17:49:51.910713Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:51.910779Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:51.921611Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:51.924872Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:52.036614Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:52.036639Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:52.036645Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:52.036719Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:52.130193Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:52.473998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:52.489080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:52.825040Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:55.203715Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678385175324734:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.203919Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.204168Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678385175324750:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.206291Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678385175324773:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.206356Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:55.207878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is ... ult=not_found; 2026-01-07T17:50:11.338147Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.338158Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.339095Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.339154Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.339169Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.345873Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.345947Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.345962Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.346661Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.346718Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.346738Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.353700Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.353752Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.353767Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.354119Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.354174Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.354192Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.361395Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.361455Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.361471Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.361616Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.361680Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.361700Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.369264Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.369321Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.369331Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.369347Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.369365Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.369379Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.377622Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.377690Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.377696Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.377705Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.377742Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.377755Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.386409Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.386474Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.386490Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.386859Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.386916Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.386935Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.395229Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.395293Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.395310Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.395435Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.395482Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.395503Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite [GOOD] Test command err: Trying to start YDB, gRPC: 22908, MsgBus: 2116 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:48.009308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:48.138344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:48.156721Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:48.157452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:48.157521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:48.451243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:48.451406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:48.529707Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808185077211 != 1767808185077215 2026-01-07T17:49:48.542046Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:48.599585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:48.755559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:49.142134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:49.142189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:49.142225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:49.142737Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:49.154961Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:49.473253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:49.545163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:912:2777], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.545338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.545462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.546261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:927:2786], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.546354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:49.551681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:49.654437Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:926:2785], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:49.719343Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:983:2823] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:50.010816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:50.278065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:52.775112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 18462, MsgBus: 16177 ... waiting for SysViewsRoster update finished 2026-01-07T17:50:00.263232Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:00.269484Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:50:00.273330Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:50:00.273625Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:00.273775Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:00.545181Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:00.545311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:00.574842Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767808197106973 != 1767808197106977 2026-01-07T17:50:00.578446Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:00.623508Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:00.779439Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:01.109538Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:01.109596Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:01.109637Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:01.109924Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:01.121119Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:01.465447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:01.509623Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:912:2777], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:01.509740Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:01.509821Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:01.510870Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:01.511230Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:01.515616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:01.610805Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:926:2785], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:01.645302Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:983:2823] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:01.712876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:01.924466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:04.194268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 18268, MsgBus: 19818 2026-01-07T17:50:08.724627Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:08.749140Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:08.809532Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592678441155169475:2081] 1767808208591066 != 1767808208591069 2026-01-07T17:50:08.839518Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:08.839623Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:08.840189Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:08.845512Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:08.972780Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:08.972803Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:08.972812Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:08.972911Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:08.993385Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:09.542582Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:09.552759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:09.670010Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:12.131700Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678458335039529:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.131708Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678458335039546:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.131807Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.132043Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678458335039558:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.132097Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.135394Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:12.142968Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678458335039557:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:50:12.224677Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678458335039610:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:12.276142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.354930Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:13.236759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:14.340401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:14.852255Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592678466924982296:2961], SessionActorId: [3:7592678466924982153:2961], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`., code: 2001 . sessionActorId=[3:7592678466924982153:2961]. 2026-01-07T17:50:14.853223Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=M2IzODY3NTgtNTA4NzJjMWItYjdiYzE5Y2QtNGY5YTQ0Y2I=, ActorId: [3:7592678466924982153:2961], ActorState: ExecuteState, LegacyTraceId: 01kecs99qgcfb0q35x47b51hkt, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592678466924982297:2961] from: [3:7592678466924982296:2961] trace_id# 2026-01-07T17:50:14.853401Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592678466924982297:2961] TxId: 281474976710666. Ctx: { TraceId: 01kecs99qgcfb0q35x47b51hkt, Database: /Root, SessionId: ydb://session/3?node_id=3&id=M2IzODY3NTgtNTA4NzJjMWItYjdiYzE5Y2QtNGY5YTQ0Y2I=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`., code: 2001 } trace_id# 2026-01-07T17:50:14.853722Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=M2IzODY3NTgtNTA4NzJjMWItYjdiYzE5Y2QtNGY5YTQ0Y2I=, ActorId: [3:7592678466924982153:2961], ActorState: ExecuteState, LegacyTraceId: 01kecs99qgcfb0q35x47b51hkt, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`." issue_code: 2001 severity: 1 } trace_id# |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUpdateLocksOneShard [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> KqpSinkTx::OlapInteractive [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 12981, MsgBus: 25233 2026-01-07T17:49:26.395893Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678259868566171:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.395991Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.851088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.851214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.947271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.960881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678259868566011:2081] 1767808166384570 != 1767808166384573 2026-01-07T17:49:26.962558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.970064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.174377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.174396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.174406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.174470Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.231293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.397104Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.877453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.881970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.703879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272753468791:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.703884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272753468802:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.703967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.704137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272753468806:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.704169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.706648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.715251Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678272753468805:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.793816Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678272753468858:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.121004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:30.299937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.300163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.300378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.300485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.300591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.300699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.300788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.300922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.301322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.301424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.301543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:30.301665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:30.301777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7592678277048436337:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:30.325574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678277048436328:2342];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.325715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678277048436328:2342];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.325911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678277048436328:2342];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.326015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678277048436328:2342];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.326117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678277048436328:2342];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.326212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678277048436328:2342];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.326310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678277048436328:2342];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.326440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7592678277048436328:2342];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.326561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:84 ... p:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.606821Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.613209Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.613279Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.613298Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.614712Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.614751Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.614767Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.623340Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.623413Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.623432Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.623789Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.623832Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.623848Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.632496Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.632562Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.632581Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.633336Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.633391Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.633407Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.646512Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.646586Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.646605Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:11.674931Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs8t0n70tp8pdy3dejw1nv", SessionId: ydb://session/3?node_id=4&id=NTk5ZWQ4ZDUtOGM0Y2JiNC1mMzVlY2U1My0xNWVkN2I2Yg==, Slow query, duration: 10.137102s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:50:13.034754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:13.034789Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:13.639905Z node 4 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[4:7592678412763719609:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976715671; 2026-01-07T17:50:13.640956Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7592678464303336551:3559], SessionActorId: [4:7592678455713401331:3559], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[4:7592678464303336551:3559].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-07T17:50:13.641091Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7592678464303336551:3559], SessionActorId: [4:7592678455713401331:3559], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[4:7592678455713401331:3559]. 2026-01-07T17:50:13.641254Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=NDFiNGU3MDUtZDFlYWQwMWUtOGM1ZDIxZWYtNDQ4MWU1Zjg=, ActorId: [4:7592678455713401331:3559], ActorState: ExecuteState, LegacyTraceId: 01kecs98508fy0exrasqj48z5k, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7592678464303336680:3559] from: [4:7592678464303336551:3559] trace_id# 2026-01-07T17:50:13.641343Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7592678464303336680:3559] TxId: 281474976715671. Ctx: { TraceId: 01kecs98508fy0exrasqj48z5k, Database: /Root, SessionId: ydb://session/3?node_id=4&id=NDFiNGU3MDUtZDFlYWQwMWUtOGM1ZDIxZWYtNDQ4MWU1Zjg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-07T17:50:13.641570Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=NDFiNGU3MDUtZDFlYWQwMWUtOGM1ZDIxZWYtNDQ4MWU1Zjg=, ActorId: [4:7592678455713401331:3559], ActorState: ExecuteState, LegacyTraceId: 01kecs98508fy0exrasqj48z5k, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 2026-01-07T17:50:13.649506Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:13.649687Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:13.650009Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:13.650132Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:13.650352Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:13.650489Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:13.650812Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:13.650848Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:13.651602Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> KqpSinkMvcc::WriteSkewReplace+IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewReplace-IsOlap >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2026-01-07T17:50:09.613144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:50:09.649300Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:50:09.649541Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:50:09.681955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:50:09.682263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:50:09.682511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:50:09.682661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:50:09.682767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:50:09.682896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:50:09.683037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:50:09.683274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:50:09.683395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:50:09.683508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.683655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:50:09.683786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:50:09.683891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:50:09.729336Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:50:09.729550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:50:09.729609Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:50:09.729814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:50:09.729969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:50:09.730048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:50:09.730091Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:50:09.730208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:50:09.730276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:50:09.730321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:50:09.730350Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:50:09.730537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:50:09.730601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:50:09.730642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:50:09.730689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:50:09.730830Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:50:09.730892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:50:09.730932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:50:09.730974Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:50:09.731034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:50:09.731080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:50:09.731111Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:50:09.731157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:50:09.731195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:50:09.731224Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:50:09.731449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:50:09.731504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:50:09.731532Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:50:09.731721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:50:09.731781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.731822Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.731883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:50:09.731925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:50:09.731971Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:50:09.732027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:50:09.732071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:50:09.732100Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:50:09.732227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:50:09.732272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-07T17:50:16.615484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:50:16.615826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:50:16.616047Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:16.616222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:16.616397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:16.616662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:50:16.616868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:16.617076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:16.617486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1057:2924] finished for tablet 9437184 2026-01-07T17:50:16.618032Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1056:2923];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.033},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.036}],"full":{"a":8096213,"name":"_full_task","f":8096213,"d_finished":0,"c":0,"l":8132731,"d":36518},"events":[{"name":"bootstrap","f":8096514,"d_finished":1793,"c":1,"l":8098307,"d":1793},{"a":8131809,"name":"ack","f":8129979,"d_finished":1624,"c":1,"l":8131603,"d":2546},{"a":8131785,"name":"processing","f":8098506,"d_finished":4714,"c":3,"l":8131606,"d":5660},{"name":"ProduceResults","f":8097756,"d_finished":2859,"c":6,"l":8132284,"d":2859},{"a":8132291,"name":"Finish","f":8132291,"d_finished":0,"c":0,"l":8132731,"d":440},{"name":"task_result","f":8098526,"d_finished":3025,"c":2,"l":8129736,"d":3025}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:16.618122Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:50:16.618631Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1056:2923];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.033},{"events":["l_ProduceResults","f_Finish"],"t":0.036},{"events":["l_ack","l_processing","l_Finish"],"t":0.037}],"full":{"a":8096213,"name":"_full_task","f":8096213,"d_finished":0,"c":0,"l":8133345,"d":37132},"events":[{"name":"bootstrap","f":8096514,"d_finished":1793,"c":1,"l":8098307,"d":1793},{"a":8131809,"name":"ack","f":8129979,"d_finished":1624,"c":1,"l":8131603,"d":3160},{"a":8131785,"name":"processing","f":8098506,"d_finished":4714,"c":3,"l":8131606,"d":6274},{"name":"ProduceResults","f":8097756,"d_finished":2859,"c":6,"l":8132284,"d":2859},{"a":8132291,"name":"Finish","f":8132291,"d_finished":0,"c":0,"l":8133345,"d":1054},{"name":"task_result","f":8098526,"d_finished":3025,"c":2,"l":8129736,"d":3025}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:50:16.618732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:50:16.577248Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-07T17:50:16.618778Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:50:16.619033Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2026-01-07T17:50:06.513694Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678434053858194:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.513805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.555913Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.569885Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678430662238292:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.573061Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.595740Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.822354Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:06.844745Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.013356Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.032740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.148987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.149105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.158648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.161986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.278465Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:07.278644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.284108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.373453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:07.378550Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.509077Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.524134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.587518Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.629602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0015dd/r3tmp/yandexN1eESn.tmp 2026-01-07T17:50:07.629631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0015dd/r3tmp/yandexN1eESn.tmp 2026-01-07T17:50:07.629787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0015dd/r3tmp/yandexN1eESn.tmp 2026-01-07T17:50:07.629875Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:07.884904Z INFO: TTestServer started on Port 19179 GrpcPort 15227 PQClient connected to localhost:15227 === TenantModeEnabled() = 1 === Init PQ - start server on port 15227 2026-01-07T17:50:08.492757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:50:08.492980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.493173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:50:08.493205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:50:08.493390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:08.493441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:08.505602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.505698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:50:08.505834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.505860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:50:08.505875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2026-01-07T17:50:08.505891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 2 -> 3 2026-01-07T17:50:08.518731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.518783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:50:08.518828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 3 -> 128 2026-01-07T17:50:08.521377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.521405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2026-01-07T17:50:08.521436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.522345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.522376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.522396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.522436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2026-01-07T17:50:08.529141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:50:08.531888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2026-01-07T17:50:08.532005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:50:08.537013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808208579, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.537172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767808208579 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:08.537197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.537490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 128 -> 240 2026-01-07T17:50:08.537539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.537690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:50:08.537 ... : writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed BEFORE MODIFY PERMISSIONS 2026-01-07T17:50:17.381209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976710665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:34484" , at schemeshard: 72057594046644480 2026-01-07T17:50:17.381362Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2026-01-07T17:50:17.381464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2026-01-07T17:50:17.381479Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-07T17:50:17.381588Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.381618Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:50:17.381685Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710665:0 progress is 1/1 2026-01-07T17:50:17.381695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-07T17:50:17.381711Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710665:0 progress is 1/1 2026-01-07T17:50:17.381719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-07T17:50:17.381751Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-07T17:50:17.381787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2026-01-07T17:50:17.381802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-07T17:50:17.381812Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-07T17:50:17.381823Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710665:0 2026-01-07T17:50:17.381834Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2026-01-07T17:50:17.381844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2026-01-07T17:50:17.383390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:17.383672Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2026-01-07T17:50:17.383820Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:50:17.383859Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-07T17:50:17.384070Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:50:17.384093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7592678467141833628:2383], at schemeshard: 72057594046644480, txId: 281474976710665, path id: 46 2026-01-07T17:50:17.384508Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2026-01-07T17:50:17.384577Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2026-01-07T17:50:17.384589Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2026-01-07T17:50:17.384602Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2026-01-07T17:50:17.384615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-01-07T17:50:17.384689Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 2026-01-07T17:50:17.386148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710665 2026-01-07T17:50:17.387736Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-07T17:50:17.387758Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-07T17:50:17.388109Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2026-01-07T17:50:17.388207Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:34476 2026-01-07T17:50:17.388226Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:34476 proto=v1 topic=/Root/acc/topic1 durationSec=0 2026-01-07T17:50:17.388233Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-07T17:50:17.389149Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-07T17:50:17.389314Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-07T17:50:17.389334Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-07T17:50:17.389342Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-07T17:50:17.389375Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7592678480026736808:2372] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-07T17:50:17.389399Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-07T17:50:17.389833Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-07T17:50:17.389933Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-message-group|2ecf8a17-701a8242-8bc26d6-70c8b9f3_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2026-01-07T17:50:17.390269Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|2ecf8a17-701a8242-8bc26d6-70c8b9f3_0 2026-01-07T17:50:17.391158Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group|2ecf8a17-701a8242-8bc26d6-70c8b9f3_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-07T17:50:17.391427Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1365: updating token 2026-01-07T17:50:17.391459Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-07T17:50:17.392009Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: test-message-group|2ecf8a17-701a8242-8bc26d6-70c8b9f3_0 describe result for acl check 2026-01-07T17:50:17.392088Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:828: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|2ecf8a17-701a8242-8bc26d6-70c8b9f3_0 2026-01-07T17:50:17.392329Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group|2ecf8a17-701a8242-8bc26d6-70c8b9f3_0 is DEAD 2026-01-07T17:50:17.392545Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |94.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> ReadLoad::ShouldReadKqp >> UpsertLoad::ShouldWriteKqpUpsert2 >> TPQTest::TestUserInfoCompatibility >> ReadLoad::ShouldReadIterate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 17481, MsgBus: 10027 2026-01-07T17:49:26.516739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678259816106839:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.516832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.903623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:27.034411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:27.034545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:27.037064Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678259816106713:2081] 1767808166511084 != 1767808166511087 2026-01-07T17:49:27.041921Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.063248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.190943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.201662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.201699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.201717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.201811Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.535585Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.882594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.890099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.714906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272701009471:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.714913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272701009495:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.715019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.715236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272701009501:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.715324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.718560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.728068Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678272701009500:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.861338Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678272701009553:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.112263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.239384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.342214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.156154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678259816106839:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:32.254426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:41.826961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:49:41.826986Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:45.723059Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:7592678341420494877:2962], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01kecs8dbp5kxqbpmzfcxkxaqm. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjMzN2Q5YzAtZjA3NGZmZTYtNjIxMWFhZTEtMzlkMzIxM2I=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 43 has no snapshot at v1767808173000/18446744073709551615 shard 72075186224037889 with lowWatermark v1767808173215/18446744073709551615 (node# 1 state# Ready) } } 2026-01-07T17:49:45.723547Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592678341420494877:2962], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01kecs8dbp5kxqbpmzfcxkxaqm. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjMzN2Q5YzAtZjA3NGZmZTYtNjIxMWFhZTEtMzlkMzIxM2I=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 43 has no snapshot at v1767808173000/18446744073709551615 shard 72075186224037889 with lowWatermark v1767808173215/18446744073709551615 (node# 1 state# Ready) } }. 2026-01-07T17:49:45.724213Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjMzN2Q5YzAtZjA3NGZmZTYtNjIxMWFhZTEtMzlkMzIxM2I=, ActorId: [1:7592678285585919333:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs8dbp5kxqbpmzfcxkxaqm, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Table id 43 has no snapshot at v1767808173000/18446744073709551615 shard 72075186224037889 with lowWatermark v1767808173215/18446744073709551615 (node# 1 state# Ready)" severity: 1 } } trace_id# Trying to start YDB, gRPC: 28664, MsgBus: 28575 2026-01-07T17:49:46.976222Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678344747027630:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:46.976546Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:46.992873Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:47.079466Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:47.081715Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678344747027527:2081] 1767808186959094 != 1767808186959097 2026-01-07T17:49:47.118578Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:47.118662Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:47.124593Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:47.184548Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:47.189370Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:47.189388Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:47.189395Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:47.189459Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:47.590263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, ... rkloadService] [TPoolFetcherActor] ActorId: [3:7592678424329398139:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.386699Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.386932Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678424329398146:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.387032Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:04.389383Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:04.397392Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678424329398145:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:04.479926Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678424329398198:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:04.533753Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:04.569743Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:05.573926Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:06.353373Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678407149528090:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.462525Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:07.471507Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YWY4Mzg1YWYtZjg3YzI3NjItZGE0MjE3NmMtNWY3ZjAwZjg=, ActorId: [3:7592678432919340703:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs92hhb2akdb0gbwdv4gvn, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 28412, MsgBus: 5406 2026-01-07T17:50:08.938420Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678442967960105:2212];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:08.938474Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:09.045268Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:09.145320Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678442967959931:2081] 1767808208907531 != 1767808208907534 2026-01-07T17:50:09.160047Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:09.173496Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:09.173601Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:09.177285Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:09.338064Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:09.413543Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:09.413567Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:09.413576Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:09.413656Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:09.902014Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:10.078111Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:10.085554Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:13.310845Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678464442797293:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.310853Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678464442797308:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.310948Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.311225Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678464442797316:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.311283Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.314398Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:13.323672Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678464442797315:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:50:13.412228Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678464442797368:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:13.472609Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:13.526210Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:14.389736Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678442967960105:2212];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:14.465019Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:14.767583Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.182857Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=MjRhNDAwMDctMzE1YjcxMTQtZmYyNGQyYmItNTY3MzZkZjE=, ActorId: [4:7592678477327707290:2964], ActorState: ExecuteState, LegacyTraceId: 01kecs9c082snswct7dn77fsp6, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2026-01-07T17:50:06.457144Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678431063424362:2099];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.458331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.569943Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.646279Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678430664277344:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.646323Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.692651Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.951123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:06.989829Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.123146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.177438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.177551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.179348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.183938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.262979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.270144Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:07.271174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.341011Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:07.345037Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.464866Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.575689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00176d/r3tmp/yandexDclPhb.tmp 2026-01-07T17:50:07.575712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00176d/r3tmp/yandexDclPhb.tmp 2026-01-07T17:50:07.581094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00176d/r3tmp/yandexDclPhb.tmp 2026-01-07T17:50:07.581221Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:07.655343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.675329Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.884357Z INFO: TTestServer started on Port 5278 GrpcPort 2926 PQClient connected to localhost:2926 === TenantModeEnabled() = 1 === Init PQ - start server on port 2926 2026-01-07T17:50:08.385413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:50:08.385636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.385816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:50:08.385840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:50:08.386016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:08.386059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:08.393811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.393962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:50:08.394135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.394186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:50:08.394205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-07T17:50:08.394218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-07T17:50:08.397615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.397653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:50:08.397669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-07T17:50:08.401623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.401661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.401684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.401703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-07T17:50:08.406311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:50:08.406543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.406558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-07T17:50:08.406572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.409566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-07T17:50:08.409676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:50:08.413883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808208453, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.414061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767808208453 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:08.414109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.414341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-07T17:50:08.414381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.414496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:50:08.414543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-07T17:50:08.421690Z node 1 :FLAT ... ad_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2026-01-07T17:50:17.828591Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 auth for : consumer_aba 2026-01-07T17:50:17.829116Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:142: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 Handle describe topics response 2026-01-07T17:50:17.829207Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 auth is DEAD 2026-01-07T17:50:17.829286Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 auth ok: topics# 1, initDone# 0 2026-01-07T17:50:17.830595Z :INFO: [/Root] [/Root] [fcda37b7-b7d7ae79-405dd7be-1d1323ca] [null] Server session id: consumer_aba_3_2_8206567841760066938_v1 2026-01-07T17:50:17.830806Z :DEBUG: [/Root] [/Root] [fcda37b7-b7d7ae79-405dd7be-1d1323ca] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:50:17.831436Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1229: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 register session: topic# /Root/account1/write_topic 2026-01-07T17:50:17.831572Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 grpc read done: success# 1, data# { read { } } 2026-01-07T17:50:17.831710Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 got read request: guid# 8c06c3ec-b9610cb7-f6988b75-94857080 2026-01-07T17:50:17.832079Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037894][write_topic] pipe [3:7592678478333479210:2384] connected; active server actors: 1 2026-01-07T17:50:17.832403Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7592678478333479210:2384] session consumer_aba_3_2_8206567841760066938_v1 2026-01-07T17:50:17.832466Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2026-01-07T17:50:17.832528Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2026-01-07T17:50:17.832569Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_8206567841760066938_v1" (Sender=[3:7592678478333479207:2384], Pipe=[3:7592678478333479210:2384], Partitions=[], ActiveFamilyCount=0) 2026-01-07T17:50:17.832602Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2026-01-07T17:50:17.832658Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2026-01-07T17:50:17.832722Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_8206567841760066938_v1" (Sender=[3:7592678478333479207:2384], Pipe=[3:7592678478333479210:2384], Partitions=[], ActiveFamilyCount=0) 2026-01-07T17:50:17.832806Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_8206567841760066938_v1" sender [3:7592678478333479207:2384] lock partition 0 for ReadingSession "consumer_aba_3_2_8206567841760066938_v1" (Sender=[3:7592678478333479207:2384], Pipe=[3:7592678478333479210:2384], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2026-01-07T17:50:17.832878Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2026-01-07T17:50:17.832910Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000225s 2026-01-07T17:50:17.835747Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1347: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_8206567841760066938_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7592678478333479210 RawX2: 4503612512274768 } Path: "/Root/account1/write_topic" } 2026-01-07T17:50:17.835866Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1144: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2026-01-07T17:50:17.836630Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7592678478333479212:2387] 2026-01-07T17:50:17.836696Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: consumer_aba_3_2_8206567841760066938_v1:1 with generation 1 2026-01-07T17:50:17.850726Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1767808217704 CreateTimestampMS: 1767808217702 SizeLag: 165 WriteTimestampEstimateMS: 1767808217704 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-07T17:50:17.851465Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2026-01-07T17:50:17.851609Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2026-01-07T17:50:17.852516Z :INFO: [/Root] [/Root] [fcda37b7-b7d7ae79-405dd7be-1d1323ca] Closing read session. Close timeout: 0.000000s 2026-01-07T17:50:17.852582Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2026-01-07T17:50:17.852639Z :INFO: [/Root] [/Root] [fcda37b7-b7d7ae79-405dd7be-1d1323ca] Counters: { Errors: 0 CurrentSessionLifetimeMs: 28 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:50:17.852732Z :NOTICE: [/Root] [/Root] [fcda37b7-b7d7ae79-405dd7be-1d1323ca] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T17:50:17.852764Z :DEBUG: [/Root] [/Root] [fcda37b7-b7d7ae79-405dd7be-1d1323ca] [null] Abort session to cluster 2026-01-07T17:50:17.853521Z :NOTICE: [/Root] [/Root] [fcda37b7-b7d7ae79-405dd7be-1d1323ca] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:50:17.853585Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 grpc read done: success# 0, data# { } 2026-01-07T17:50:17.853615Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 grpc read failed 2026-01-07T17:50:17.853642Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 closed 2026-01-07T17:50:17.854066Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8206567841760066938_v1 is DEAD 2026-01-07T17:50:17.854492Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer_aba_3_2_8206567841760066938_v1 2026-01-07T17:50:17.854653Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037894][write_topic] pipe [3:7592678478333479210:2384] disconnected. 2026-01-07T17:50:17.854700Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037894][write_topic] pipe [3:7592678478333479210:2384] disconnected; active server actors: 1 2026-01-07T17:50:17.854797Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037894][write_topic] pipe [3:7592678478333479210:2384] client consumer_aba disconnected session consumer_aba_3_2_8206567841760066938_v1 2026-01-07T17:50:18.265678Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7592678482628446526:2391], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:50:18.266049Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=MmY3NzUwMzQtOWVjOWVhMWQtNDI4MDg1ZGMtY2ZlMWIyMzU=, ActorId: [3:7592678482628446524:2390], ActorState: ExecuteState, LegacyTraceId: 01kecs9d4444hkgejrr273pfbr, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:50:18.266342Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 21784, MsgBus: 13416 2026-01-07T17:49:26.386171Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678261734952368:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.386227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.736049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.736172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.784121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.801832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.850041Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678261734952331:2081] 1767808166384615 != 1767808166384618 2026-01-07T17:49:26.863153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.065450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.170126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.170170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.170186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.170250Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.394031Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.923078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.933081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.411627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274619855107:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.411733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.412112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274619855116:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.412236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.413620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274619855121:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.420094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.433957Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678274619855123:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.529298Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678274619855174:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.120380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:30.265551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.265872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.266025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.266076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.266113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.266335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.266340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.266461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.266463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.266546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.266574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.266669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.266699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.266787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.266807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.266868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.266933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.266967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.267070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.267082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678278914822664:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.267162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678278914822663:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:30.267163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: ... e;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.673430Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.673485Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.673502Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.678041Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.678122Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.678142Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.682665Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.682721Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.682740Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.687928Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.687987Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.688006Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.691296Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.691355Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.691374Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.697407Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.697467Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.697488Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.700207Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.700264Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.700283Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.707484Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.707554Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.707575Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.710237Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.710294Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.710311Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.719631Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.719710Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.719733Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.725205Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.725512Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.725540Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.730641Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.730704Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.730725Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.735523Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.735588Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.735610Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:13.767337Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs8vvgdpn63ftp8ykybkrc", SessionId: ydb://session/3?node_id=3&id=ZTU1ZThiMWItNWEwMmI0YTMtMWNiNDRlYTQtZjUwODZmMDM=, Slow query, duration: 10.054664s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:50:14.869384Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:14.869412Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |94.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> UpsertLoad::ShouldWriteKqpUpsert |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2026-01-07T17:50:06.436678Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678434266481314:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.436740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.556938Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.577451Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678434605484960:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.577512Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.645924Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.851020Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:06.908591Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.107128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.107273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.110344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.110398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.129042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.173871Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.175498Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.215501Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:07.224252Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:07.229956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.445497Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.574985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00176e/r3tmp/yandexKVdemx.tmp 2026-01-07T17:50:07.575015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00176e/r3tmp/yandexKVdemx.tmp 2026-01-07T17:50:07.581103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00176e/r3tmp/yandexKVdemx.tmp 2026-01-07T17:50:07.581223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:07.613092Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.638559Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.645576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.884427Z INFO: TTestServer started on Port 27724 GrpcPort 16410 PQClient connected to localhost:16410 === TenantModeEnabled() = 1 === Init PQ - start server on port 16410 2026-01-07T17:50:08.489540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:50:08.493339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.493526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:50:08.493547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:50:08.493707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:08.493780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:08.497827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.498006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:50:08.498173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.498223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:50:08.498252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-07T17:50:08.498265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-07T17:50:08.504192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.504230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:50:08.504259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-07T17:50:08.509847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.509888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.509940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.509981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-07T17:50:08.518019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:50:08.518320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.518332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-01-07T17:50:08.518361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.521957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-07T17:50:08.522064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:50:08.526143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808208572, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.526309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767808208572 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:08.526353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.526636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 128 -> 240 2026-01-07T17:50:08.526667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.526830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:50:08.526 ... InitializePQ completed BEFORE MODIFY PERMISSIONS 2026-01-07T17:50:17.933880Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976710665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:52592" , at schemeshard: 72057594046644480 2026-01-07T17:50:17.934080Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2026-01-07T17:50:17.934286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2026-01-07T17:50:17.934299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-07T17:50:17.934458Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.934495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:50:17.934571Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710665:0 progress is 1/1 2026-01-07T17:50:17.934588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-07T17:50:17.934604Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710665:0 progress is 1/1 2026-01-07T17:50:17.934615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-07T17:50:17.934644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-07T17:50:17.934683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2026-01-07T17:50:17.934706Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-07T17:50:17.934723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-07T17:50:17.934745Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710665:0 2026-01-07T17:50:17.934756Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2026-01-07T17:50:17.934767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2026-01-07T17:50:17.936592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:17.937033Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2026-01-07T17:50:17.937214Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:50:17.937234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-07T17:50:17.937471Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:50:17.937493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7592678464814925778:2379], at schemeshard: 72057594046644480, txId: 281474976710665, path id: 46 2026-01-07T17:50:17.937984Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2026-01-07T17:50:17.938057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2026-01-07T17:50:17.938073Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2026-01-07T17:50:17.938096Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2026-01-07T17:50:17.938116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-01-07T17:50:17.938215Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 2026-01-07T17:50:17.939861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710665 2026-01-07T17:50:17.940460Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-07T17:50:17.940484Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-07T17:50:17.940789Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2026-01-07T17:50:17.940877Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:52572 2026-01-07T17:50:17.940896Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:52572 proto=v1 topic=/Root/acc/topic1 durationSec=0 2026-01-07T17:50:17.940905Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-07T17:50:17.941790Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-07T17:50:17.942027Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-07T17:50:17.942046Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-07T17:50:17.942054Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-07T17:50:17.942086Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7592678477699828935:2374] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-07T17:50:17.942108Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-07T17:50:17.942496Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-07T17:50:17.942616Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-group-id|1b3dff2a-40270f7a-2191766f-fbf2865a_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2026-01-07T17:50:17.943046Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|1b3dff2a-40270f7a-2191766f-fbf2865a_0 2026-01-07T17:50:17.944043Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|1b3dff2a-40270f7a-2191766f-fbf2865a_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-07T17:50:17.944227Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|1b3dff2a-40270f7a-2191766f-fbf2865a_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-07T17:50:17.944268Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:828: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|1b3dff2a-40270f7a-2191766f-fbf2865a_0 2026-01-07T17:50:17.944461Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|1b3dff2a-40270f7a-2191766f-fbf2865a_0 is DEAD 2026-01-07T17:50:17.944782Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |94.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap [GOOD] Test command err: Trying to start YDB, gRPC: 26707, MsgBus: 7444 2026-01-07T17:49:26.454782Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678261169263472:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.454830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.552877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:27.003062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:27.003179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:27.022365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.095734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:27.097028Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678261169263452:2081] 1767808166445300 != 1767808166445303 2026-01-07T17:49:27.097701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.175327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.175347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.175359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.175415Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.369855Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.501123Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.865118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:29.688558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274054166232:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.688568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274054166224:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.688687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.688979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678274054166239:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.689053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.692281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.702878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678274054166238:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.819017Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678274054166291:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.112066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.230446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.294743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.761931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678261169263472:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:32.097795Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27167, MsgBus: 19423 2026-01-07T17:49:34.368061Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678295273035966:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:34.368198Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:34.400357Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:34.584728Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:34.586983Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:34.587755Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.634149Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:34.780919Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:34.787245Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:34.787266Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:34.787274Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:34.787346Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:35.314684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:35.376372Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:37.942191Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678308157938667:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.942244Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678308157938682:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.942301Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.943758Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678308157938698:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.943816Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:37.946969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:37.967978Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592 ... e 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.552193Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.552284Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.552321Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.559900Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.559976Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.560011Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.567798Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.567894Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.567924Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.575442Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.575511Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.575537Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.582705Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.582815Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.582861Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.591133Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.591214Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.591249Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.599872Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.599965Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.600003Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.608178Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.608278Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.608316Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.616873Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.617001Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.617045Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.729690Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs8pvn5yxg60a6vyag3hmf", SessionId: ydb://session/3?node_id=3&id=ZThlNzNmZWItMjQ5ODZhODctNjYzNzk4YTEtM2VmNmQ5ZGU=, Slow query, duration: 19.278291s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=FO::FETCHING;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=INFO;component=334;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=Key;);;ff=(column_ids=1,2;column_names=Key,Value;);;program_input=(column_ids=1,2;column_names=Key,Value;);;; 2026-01-07T17:50:16.806485Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:80;broken_lock_id=281474976715665; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=FO::FETCHING;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=INFO;component=334;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=Key;);;ff=(column_ids=1,2;column_names=Key,Value;);;program_input=(column_ids=1,2;column_names=Key,Value;);;; 2026-01-07T17:50:16.894668Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [3:9746:9119], TxId: 281474976715665, task: 2. Ctx: { CheckpointId : . TraceId : 01kecs9b1wa3956apq3kf53nbp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=M2ViOTlmZGMtYzllMTgyZjgtYjNkN2U4YWItMTdhNmYyNDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } 2026-01-07T17:50:16.895640Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:9746:9119], TxId: 281474976715665, task: 2. Ctx: { CheckpointId : . TraceId : 01kecs9b1wa3956apq3kf53nbp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=M2ViOTlmZGMtYzllMTgyZjgtYjNkN2U4YWItMTdhNmYyNDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 }. 2026-01-07T17:50:16.898366Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=M2ViOTlmZGMtYzllMTgyZjgtYjNkN2U4YWItMTdhNmYyNDE=, ActorId: [3:9517:8895], ActorState: ExecuteState, LegacyTraceId: 01kecs9b1wa3956apq3kf53nbp, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=14;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=15;stage=FO::FETCHING; |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2026-01-07T17:50:06.500455Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678430729792041:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.500503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.621354Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678432303368722:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.621397Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.707019Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.707017Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:07.022078Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.032485Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.067081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.067229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.079906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.080128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.107940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.121065Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:07.123319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.170958Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:07.190093Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.335171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.540077Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.577397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00176c/r3tmp/yandexQ2GoIx.tmp 2026-01-07T17:50:07.577428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00176c/r3tmp/yandexQ2GoIx.tmp 2026-01-07T17:50:07.577619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00176c/r3tmp/yandexQ2GoIx.tmp 2026-01-07T17:50:07.577756Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:07.667038Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.887610Z INFO: TTestServer started on Port 1513 GrpcPort 26693 PQClient connected to localhost:26693 === TenantModeEnabled() = 1 === Init PQ - start server on port 26693 2026-01-07T17:50:08.401748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:50:08.401973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.402153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:50:08.402187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:50:08.402380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:08.402462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:08.405505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.405651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:50:08.405802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.405835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:50:08.405852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-07T17:50:08.405876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-07T17:50:08.409758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.409797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:50:08.409820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-07T17:50:08.411944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.411969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.411985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.412006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-07T17:50:08.416070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:50:08.419644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-07T17:50:08.419745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:50:08.421870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808208467, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.422044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767808208467 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:08.422072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.422339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 128 -> 240 2026-01-07T17:50:08.422381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.422531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:50:08.422584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-07T17:50:08.424097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:50:08.424127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:50:08.424294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:50:08.424330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592678435024759959:2395], at schemeshard: 72057594046644480, txId: 281474976715657, path i ... ash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-07T17:50:17.613037Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7592678481881373716:2373] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-07T17:50:17.613058Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-07T17:50:17.613588Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 ===Assert streaming op1 2026-01-07T17:50:17.613794Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-group-id|f6021fc4-2ab65817-9f185564-b4656ff0_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2026-01-07T17:50:17.614138Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|f6021fc4-2ab65817-9f185564-b4656ff0_0 ===Assert streaming op2 2026-01-07T17:50:17.615200Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|f6021fc4-2ab65817-9f185564-b4656ff0_0 grpc read done: success: 1 data: write_request[data omitted] 2026-01-07T17:50:17.615504Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2026-01-07T17:50:17.615718Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2026-01-07T17:50:17.674529Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2026-01-07T17:50:17.682400Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976715666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:45170" , at schemeshard: 72057594046644480 2026-01-07T17:50:17.682563Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2026-01-07T17:50:17.682649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2026-01-07T17:50:17.682656Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-07T17:50:17.682751Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.682769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:50:17.682830Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715666:0 progress is 1/1 2026-01-07T17:50:17.682838Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2026-01-07T17:50:17.682850Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715666:0 progress is 1/1 2026-01-07T17:50:17.682856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2026-01-07T17:50:17.682882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-07T17:50:17.682907Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/1, is published: false 2026-01-07T17:50:17.682917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-07T17:50:17.682924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2026-01-07T17:50:17.682933Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715666:0 2026-01-07T17:50:17.682941Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 0 2026-01-07T17:50:17.682948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715666, [OwnerId: 72057594046644480, LocalPathId: 46], 4 2026-01-07T17:50:17.684623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusSuccess TxId: 281474976715666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:17.684898Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2026-01-07T17:50:17.685066Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:50:17.685086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715666, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-07T17:50:17.685228Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:50:17.685249Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7592678468996470536:2388], at schemeshard: 72057594046644480, txId: 281474976715666, path id: 46 2026-01-07T17:50:17.685723Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2026-01-07T17:50:17.685803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2026-01-07T17:50:17.685816Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715666 2026-01-07T17:50:17.685829Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715666, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 4 2026-01-07T17:50:17.685843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-01-07T17:50:17.685910Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 0 2026-01-07T17:50:17.687275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 ===Wait for session created with token with removed ACE to die2026-01-07T17:50:18.376317Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7592678486176341049:2382], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:50:18.376803Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=NTI1ZjEwNmYtNTQ5ZmY3ZmEtODg0OTFhMjQtODgyNjhkZDM=, ActorId: [3:7592678486176341042:2378], ActorState: ExecuteState, LegacyTraceId: 01kecs9d7f1yam6wc9gr5aztj3, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:50:18.377225Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:50:18.611965Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-07T17:50:18.612941Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: test-group-id|f6021fc4-2ab65817-9f185564-b4656ff0_0 describe result for acl check 2026-01-07T17:50:18.613071Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:828: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|f6021fc4-2ab65817-9f185564-b4656ff0_0 2026-01-07T17:50:18.613341Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|f6021fc4-2ab65817-9f185564-b4656ff0_0 is DEAD 2026-01-07T17:50:18.613659Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } |94.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert [GOOD] Test command err: Trying to start YDB, gRPC: 25464, MsgBus: 22473 2026-01-07T17:49:26.383881Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678262681855487:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.383965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.657434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.795579Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678262681855454:2081] 1767808166382050 != 1767808166382053 2026-01-07T17:49:26.797114Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.805541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.806000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.809515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.885119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.173457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.173475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.173484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.173550Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.397095Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.896992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.902981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.588582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275566758238:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.589070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275566758226:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.589146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.589727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275566758245:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.589817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.593771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.607535Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678275566758244:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.697288Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678275566758297:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.119625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:30.271809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.271809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.272041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.272238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.272331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.272406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.272529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.272535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.272640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.272646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.272724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.272768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.272830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.272906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.272970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.273039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.273051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.273168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:30.273177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.273258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678279861725754:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.273280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678279861725758:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:30.273355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_ ... :50:18.555582Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555593Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555604Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555613Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555624Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555633Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555643Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555651Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555661Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555672Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555681Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555692Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555702Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555712Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555721Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555731Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555739Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555749Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555758Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555767Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555778Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555789Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555799Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555809Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555818Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555829Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555838Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555847Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555857Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555867Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.555929Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=ODViOTZjMWItMjRhNzhjODEtYWY3MzMwN2MtNzkzOWFlMDI=, ActorId: [4:7592678482069723054:2967], ActorState: ExecuteState, LegacyTraceId: 01kecs9dcefkxn9whh6z40r9ev, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7592678482069723620:2967] from: [4:7592678482069723307:2967] trace_id# 2026-01-07T17:50:18.556059Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7592678482069723620:2967] TxId: 281474976710669. Ctx: { TraceId: 01kecs9dcefkxn9whh6z40r9ev, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ODViOTZjMWItMjRhNzhjODEtYWY3MzMwN2MtNzkzOWFlMDI=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:50:18.556273Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=ODViOTZjMWItMjRhNzhjODEtYWY3MzMwN2MtNzkzOWFlMDI=, ActorId: [4:7592678482069723054:2967], ActorState: ExecuteState, LegacyTraceId: 01kecs9dcefkxn9whh6z40r9ev, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:50:18.557703Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557722Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557732Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557742Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557752Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557762Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557772Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557781Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557817Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557828Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557838Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557848Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557857Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557867Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557875Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557884Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557894Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557903Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557913Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557921Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:18.557930Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7592678482069723307:2967], SessionActorId: [4:7592678482069723054:2967], StateRollback: unknown message 278003713 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 17959, MsgBus: 10557 2026-01-07T17:49:26.391375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678259408604234:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.391414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.720465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.731117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.731230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.830347Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678259408604210:2081] 1767808166388662 != 1767808166388665 2026-01-07T17:49:26.837533Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.839337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.931894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.171510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.171534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.171552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.171634Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.404036Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.919002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.925417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.638917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272293506996:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.638922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272293506985:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.638984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.639185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678272293507000:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.639213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.641948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.652063Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678272293506999:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.791520Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678272293507052:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.112731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.254157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.397036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678259408604234:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.397145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:31.419558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.508103Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678289473384253:2963], SessionActorId: [1:7592678285178416906:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7592678285178416906:2963]. 2026-01-07T17:49:33.508350Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NjczYzYyODctYjRhZDA3YTAtM2I5M2JmMzUtNzliMTIyZTI=, ActorId: [1:7592678285178416906:2963], ActorState: ExecuteState, LegacyTraceId: 01kecs81bncrr6bpp3bz838gtt, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7592678289473384254:2963] from: [1:7592678289473384253:2963] trace_id# 2026-01-07T17:49:33.508442Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592678289473384254:2963] TxId: 281474976710665. Ctx: { TraceId: 01kecs81bncrr6bpp3bz838gtt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjczYzYyODctYjRhZDA3YTAtM2I5M2JmMzUtNzliMTIyZTI=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } trace_id# 2026-01-07T17:49:33.508672Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NjczYzYyODctYjRhZDA3YTAtM2I5M2JmMzUtNzliMTIyZTI=, ActorId: [1:7592678285178416906:2963], ActorState: ExecuteState, LegacyTraceId: 01kecs81bncrr6bpp3bz838gtt, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 23203, MsgBus: 24838 2026-01-07T17:49:34.951245Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678296946795507:2204];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:34.997314Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:35.020733Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:35.111373Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:35.111458Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:35.135007Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:35.139958Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678296946795327:2081] 1767808174914236 != 1767808174914239 2026-01-07T17:49:35.181411Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:35.305713Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:35.353462Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:35.353483Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:35.353491Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:35.353556Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:35.894006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:35.898982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:35.951691Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors: ... ecute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.053598Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.053668Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.053690Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.060419Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.060497Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.060520Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.063790Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.063860Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.063880Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.071792Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.071865Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.071886Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.075647Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.075701Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.075720Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.083105Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.083207Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.083231Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.085794Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.085860Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.085881Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.095106Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.095178Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.095198Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.095876Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.095931Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.095948Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.105792Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.105854Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.105873Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.106884Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.106949Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.106967Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.115854Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.115928Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.115952Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.117177Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.117241Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.117259Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:15.175584Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs8wjq99nytz69066s5hwb", SessionId: ydb://session/3?node_id=4&id=MmE3Mzg5YzgtNWRkNDY2LWFiYTVhYzczLWM3YjE2OTY2, Slow query, duration: 10.501967s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:50:15.656192Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:15.656221Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 11220, MsgBus: 28006 2026-01-07T17:49:26.424492Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678260588495041:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.424806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.530610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:26.952401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.953743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.953838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.956031Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.964874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:27.156079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.169546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.169562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.169644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.169713Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.469025Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.889251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.899115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.749305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678273473397789:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.749328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678273473397777:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.749441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.749707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678273473397792:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.749778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.752719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.762350Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678273473397791:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.889470Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678273473397844:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.175709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:30.376714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.377149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.377385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.377519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.377604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.377743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.377828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.377956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.378050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.378145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.378254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:30.378368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:30.378480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678277768365318:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:30.382244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277768365314:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.382299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277768365314:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.382507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277768365314:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.382622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277768365314:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.382698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277768365314:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.382798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277768365314:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.382885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277768365314:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.382990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7592678277768365314:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.383088Z node 1 :T ... e;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.815143Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.815195Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.815213Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.823138Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.823217Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.823234Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.823376Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.823425Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.823442Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.830824Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.830885Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.830905Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.832568Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.832624Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.832643Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.837812Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.837867Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.837885Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.842254Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.842323Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.842346Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.846602Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.846659Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.846681Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.852609Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.852711Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.852740Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.857122Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.857182Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.857201Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.862458Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.862525Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.862544Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.866779Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.866840Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.866860Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.872699Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.872760Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.872779Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:14.901347Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs8wqr28x7m7ss0ys07fkm", SessionId: ydb://session/3?node_id=3&id=Nzc2OTIwMjQtNzQ4YzFlZDgtOTdlMzA3ZmYtZmQxY2IxZDY=, Slow query, duration: 10.592844s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:50:15.838085Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:15.838115Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex [GOOD] Test command err: Trying to start YDB, gRPC: 25862, MsgBus: 8942 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:30.963836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:31.074144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:31.091817Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:31.092298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:31.092357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:31.375973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:31.376096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:31.449384Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808168451136 != 1767808168451140 2026-01-07T17:49:31.463515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:31.509021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:31.643393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:32.016199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:32.016280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:32.016318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:32.016846Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:32.029750Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:32.393430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:32.402793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:32.457128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.713475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.094740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.409826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.268095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1906:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.268229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.270195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1980:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.270305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:34.303837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.509248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.765775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.080611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.351556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.629241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.939845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.260420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.638220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2790:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.638455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.638866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.639164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.639346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.647444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:36.826456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2799:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:36.888464Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2859:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:38.739178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: E ... 12433Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:10.712507Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:10.984344Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:10.984469Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:11.019939Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1767808206710192 != 1767808206710196 2026-01-07T17:50:11.024912Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:11.070758Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:11.176222Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:11.530770Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:11.530851Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:11.530896Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:11.531399Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:11.542989Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:12.025649Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:12.080454Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.332854Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.735654Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:13.026511Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:13.649754Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1906:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.650300Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.651096Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1979:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.651236Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:13.681381Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:13.887935Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:14.209133Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:14.487562Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:14.772909Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:15.093960Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:15.395888Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:15.707460Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:16.138161Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2796:4177], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:16.138302Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:16.138820Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2800:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:16.138923Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2803:4184], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:16.138998Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:16.145106Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:16.282800Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2805:4186], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:16.356792Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:2860:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:18.337505Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:18.593083Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:18.967548Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2026-01-07T17:50:06.444767Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678434573996686:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.444891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.621083Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.723755Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.962089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:06.973013Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:06.973135Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:07.190180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.190284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.195309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.195411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.244882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.245086Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:07.255380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.365461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:07.372413Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.396069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.500088Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.661309Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.794051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0015e3/r3tmp/yandexixNAYo.tmp 2026-01-07T17:50:07.794082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0015e3/r3tmp/yandexixNAYo.tmp 2026-01-07T17:50:07.800193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0015e3/r3tmp/yandexixNAYo.tmp 2026-01-07T17:50:07.800280Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:07.892737Z INFO: TTestServer started on Port 62208 GrpcPort 7412 PQClient connected to localhost:7412 === TenantModeEnabled() = 1 === Init PQ - start server on port 7412 2026-01-07T17:50:08.515737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:50:08.515875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.516061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:50:08.516077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:50:08.516215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:08.516248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:08.518401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.518535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:50:08.518720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.518756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:50:08.518774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2026-01-07T17:50:08.518784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 2 -> 3 2026-01-07T17:50:08.521463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.521503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:50:08.521526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 3 -> 128 2026-01-07T17:50:08.523095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.523116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2026-01-07T17:50:08.523132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.523567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.523585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.523603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.523626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2026-01-07T17:50:08.527725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:50:08.530439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2026-01-07T17:50:08.530592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:50:08.532563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808208579, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.532717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767808208579 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:08.532766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.533059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 128 -> 240 2026-01-07T17:50:08.533092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.533254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:50:08.533289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-07T17:50:08.538801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:50:08.538821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:50:08.539135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__p ... CacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 431 2026-01-07T17:50:20.060550Z node 4 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [4:7592678487536410923:2307] 2026-01-07T17:50:20.060646Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037893][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:50:20.060702Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037893][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:50:20.060735Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037893][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-07T17:50:20.060768Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037893][Partition][0][StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2026-01-07T17:50:20.060906Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.060945Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.060957Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.060974Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.060984Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.061014Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:50:20.061052Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2026-01-07T17:50:20.061184Z node 4 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' size 431 2026-01-07T17:50:20.062266Z :DEBUG: [] MessageGroupId [123] SessionId [123|85ab35b-4c9d72cc-f60cd4d9-cc500d06_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 13 } 2026-01-07T17:50:20.062309Z :DEBUG: [] MessageGroupId [123] SessionId [123|85ab35b-4c9d72cc-f60cd4d9-cc500d06_0] Write session: acknoledged message 1 2026-01-07T17:50:20.061500Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::IEventHandle 2026-01-07T17:50:20.130511Z :INFO: [] MessageGroupId [123] SessionId [123|85ab35b-4c9d72cc-f60cd4d9-cc500d06_0] Write session will now close 2026-01-07T17:50:20.130573Z :DEBUG: [] MessageGroupId [123] SessionId [123|85ab35b-4c9d72cc-f60cd4d9-cc500d06_0] Write session: aborting 2026-01-07T17:50:20.131033Z :INFO: [] MessageGroupId [123] SessionId [123|85ab35b-4c9d72cc-f60cd4d9-cc500d06_0] Write session: gracefully shut down, all writes complete 2026-01-07T17:50:20.131076Z :DEBUG: [] MessageGroupId [123] SessionId [123|85ab35b-4c9d72cc-f60cd4d9-cc500d06_0] Write session: destroy 2026-01-07T17:50:20.136277Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: 123|85ab35b-4c9d72cc-f60cd4d9-cc500d06_0 grpc closed 2026-01-07T17:50:20.136309Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: 123|85ab35b-4c9d72cc-f60cd4d9-cc500d06_0 is DEAD 2026-01-07T17:50:20.137012Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:50:20.135868Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.135912Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.135927Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.135945Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.135970Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.136018Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.136027Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.136035Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.136054Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.136061Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.141336Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.141360Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.141375Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.141392Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.141405Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.149283Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037893] server disconnected, pipe [3:7592678494172818334:2401] destroyed 2026-01-07T17:50:20.149319Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037893][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:50:20.149345Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.149373Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.149384Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.149396Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.149403Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.238009Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.238044Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.238059Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.238077Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.238099Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.238154Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.238162Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.238169Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.238177Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.238183Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.242048Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.242095Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.242111Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.242130Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.242146Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.338839Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.338877Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.338894Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.338909Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.338924Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.338998Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.339008Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.339015Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.339025Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.339031Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.342473Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.342506Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.342520Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.342542Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:20.342555Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist |94.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert+UseBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2026-01-07T17:50:06.449314Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678433449772760:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.449415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.533714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:06.557098Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.566818Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678431119109519:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.566896Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.616398Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.836518Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:06.857024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.070582Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.126401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.126551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.127304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.127349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.163357Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:07.163507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.178185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.306137Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:07.313047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.485592Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.493685Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.575847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0015eb/r3tmp/yandex9Ohgzr.tmp 2026-01-07T17:50:07.575879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0015eb/r3tmp/yandex9Ohgzr.tmp 2026-01-07T17:50:07.581174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0015eb/r3tmp/yandex9Ohgzr.tmp 2026-01-07T17:50:07.581265Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:07.581650Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.884388Z INFO: TTestServer started on Port 17061 GrpcPort 23346 PQClient connected to localhost:23346 === TenantModeEnabled() = 1 === Init PQ - start server on port 23346 2026-01-07T17:50:08.607895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:50:08.608080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.608262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T17:50:08.608282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T17:50:08.608453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:08.608499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:08.612345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.612513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:50:08.612671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.612711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T17:50:08.612725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2026-01-07T17:50:08.612735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 2 -> 3 2026-01-07T17:50:08.614998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.615047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:50:08.615065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 3 -> 128 2026-01-07T17:50:08.616753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.616770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:08.616797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.616822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2026-01-07T17:50:08.655467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:50:08.655831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.655850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2026-01-07T17:50:08.655866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-07T17:50:08.658124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2026-01-07T17:50:08.658225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:50:08.660333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808208705, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:08.660455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767808208705 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T17:50:08.660481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.660735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 128 -> 240 2026-01-07T17:50:08.660764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-07T17:50:08.660906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T17:50:08.660956Z node 1 :FLAT_TX_ ... EBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.370046Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.397059Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.397096Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.397100Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.397112Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.397120Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.397131Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.397133Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.397144Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.397150Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.397162Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.397211Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.397226Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.397235Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.397249Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.397257Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.457066Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:50:22.470125Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.470158Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.470170Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.470190Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.470201Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.497225Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.497254Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.497266Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.497287Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.497298Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.497368Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.497378Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.497388Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.497400Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.497409Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.497434Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.497443Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.497453Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.497467Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.497474Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.570658Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.570726Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.570757Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.570784Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.570802Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.597669Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.597693Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.597698Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.597707Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.597709Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.597717Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.597725Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.597731Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.597736Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.597741Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.597778Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.597794Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.597804Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.597815Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.597822Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.670821Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.670853Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.670864Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.670881Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.670894Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.701272Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.701271Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.701294Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.701299Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.701304Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.701308Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.701323Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.701335Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.701336Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.701346Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-07T17:50:22.701391Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:22.701400Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.701407Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:22.701418Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:22.701429Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist |94.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TResourcePoolTest::CreateResourcePoolWithProperties >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> TResourcePoolTest::ParallelAlterResourcePool >> TResourcePoolTest::ParallelCreateSameResourcePool >> TResourcePoolTest::ParallelCreateResourcePool >> TResourcePoolTest::SchemeErrors >> TResourcePoolTest::DropResourcePoolTwice >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OlapNamedStatement >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] >> TResourcePoolTest::SchemeErrors [GOOD] >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] >> TResourcePoolTest::DropResourcePoolTwice [GOOD] >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:50:25.949270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:50:25.949374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.949453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:50:25.949502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:50:25.949546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:50:25.949577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:50:25.949658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.949734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:50:25.950625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:50:25.953853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:50:26.108681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:50:26.108738Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:26.130091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:50:26.130455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:50:26.130665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:50:26.163126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:50:26.163515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:50:26.164197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:50:26.164492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:50:26.167614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.167812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:50:26.169106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:50:26.169163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.169280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:50:26.169333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:50:26.169390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:50:26.169562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:50:26.305514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:27.033943Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.034126Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 181us result status StatusSuccess 2026-01-07T17:50:27.034398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:27.035030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.035176Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools" took 163us result status StatusSuccess 2026-01-07T17:50:27.037254Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools" PathDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:27.040326Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.040543Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" took 193us result status StatusSuccess 2026-01-07T17:50:27.040861Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" PathDescription { Self { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:27.041405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.041605Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 213us result status StatusSuccess 2026-01-07T17:50:27.041905Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:50:25.950189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:50:25.950305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.950369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:50:25.950422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:50:25.950464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:50:25.950493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:50:25.950551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.950672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:50:25.951631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:50:25.952067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:50:26.054705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:50:26.054763Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:26.072019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:50:26.072372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:50:26.073421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:50:26.104699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:50:26.105125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:50:26.105870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:50:26.106189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:50:26.109266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.109463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:50:26.111236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:50:26.111302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.111412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:50:26.111462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:50:26.111539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:50:26.112588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:50:26.279227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.281470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.281644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.281723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.281789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.281824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.281884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.281942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.281983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 01-07T17:50:27.054572Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 265us result status StatusSuccess 2026-01-07T17:50:27.054983Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:27.055528Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.055693Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 168us result status StatusSuccess 2026-01-07T17:50:27.056002Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2026-01-07T17:50:27.056277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2026-01-07T17:50:27.056331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2026-01-07T17:50:27.056418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2026-01-07T17:50:27.056444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2026-01-07T17:50:27.056488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2026-01-07T17:50:27.056515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2026-01-07T17:50:27.057134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-07T17:50:27.057236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.057283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:759:2748] 2026-01-07T17:50:27.057361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-07T17:50:27.057550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.057581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:759:2748] 2026-01-07T17:50:27.057676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-07T17:50:27.057743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.057764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:759:2748] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-01-07T17:50:27.058326Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.058562Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 244us result status StatusSuccess 2026-01-07T17:50:27.058900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2026-01-07T17:50:27.062399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "NilNoviSubLuna" } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:50:27.062660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 128:0, path# /MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna 2026-01-07T17:50:27.062854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-07T17:50:27.065359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 125, at schemeshard: 72057594046678944 2026-01-07T17:50:27.065602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 8163, MsgBus: 1737 2026-01-07T17:49:53.498821Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678375107755022:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:53.498865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:53.545361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:53.872649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:53.872773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:53.924576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:53.932515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:53.933665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:54.115970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:54.119333Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:54.119353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:54.119365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:54.119444Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:54.511151Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:54.760318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:54.773808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:54.837784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:55.015522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:55.189770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:55.279824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.226984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678392287625864:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:57.227094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:57.227781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678392287625874:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:57.227837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:57.542741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.603418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.641570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.693053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.753954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.829156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.909628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:57.986252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:58.074441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678396582594047:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.074510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.074810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678396582594052:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.074864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678396582594053:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.074897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.078184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:58.094214Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678396582594056:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:49:58.169650Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678396582594107:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:58.499868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678375107755022:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:58.499956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25435, MsgBus: 25653 2026-01-07T17:50:01.555563Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists. ... e 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:09.696332Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:09.792854Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:09.964973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:10.083820Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.271930Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678456585441847:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.272024Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.272275Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678456585441856:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.272315Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.338824Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.374626Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.408575Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.447458Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.484241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.546855Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.585229Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.642390Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:12.740987Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678456585442735:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.741141Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.744570Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678456585442740:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.744685Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678456585442741:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.744879Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:12.749066Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:12.763131Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678456585442744:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:12.832711Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678456585442795:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:13.578035Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678439405570842:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:13.578115Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:23.682623Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:23.682685Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:25.510156Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7592678512420018348:2675], TxId: 281474976715682, task: 1. Ctx: { CheckpointId : . TraceId : 01kecs9ky37gqe732mz32pc61j. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=N2FmZDJiOWEtYjBlOGNmNDUtOTllYzM2NjItODUwM2M0MDI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 38 has no snapshot at v1767808214564/18446744073709551615 shard 72075186224037888 with lowWatermark v1767808215005/18446744073709551615 (node# 3 state# Ready) } } 2026-01-07T17:50:25.510904Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7592678512420018348:2675], TxId: 281474976715682, task: 1. Ctx: { CheckpointId : . TraceId : 01kecs9ky37gqe732mz32pc61j. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=N2FmZDJiOWEtYjBlOGNmNDUtOTllYzM2NjItODUwM2M0MDI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 38 has no snapshot at v1767808214564/18446744073709551615 shard 72075186224037888 with lowWatermark v1767808215005/18446744073709551615 (node# 3 state# Ready) } }. 2026-01-07T17:50:25.511504Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7592678512420018349:2676], TxId: 281474976715682, task: 2. Ctx: { CheckpointId : . TraceId : 01kecs9ky37gqe732mz32pc61j. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=N2FmZDJiOWEtYjBlOGNmNDUtOTllYzM2NjItODUwM2M0MDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7592678512420018344:2531], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-07T17:50:25.512074Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=N2FmZDJiOWEtYjBlOGNmNDUtOTllYzM2NjItODUwM2M0MDI=, ActorId: [3:7592678465175377681:2531], ActorState: ExecuteState, LegacyTraceId: 01kecs9ky37gqe732mz32pc61j, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Table id 38 has no snapshot at v1767808214564/18446744073709551615 shard 72075186224037888 with lowWatermark v1767808215005/18446744073709551615 (node# 3 state# Ready)" severity: 1 } } trace_id# |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:50:25.953730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:50:25.953852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.953905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:50:25.953954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:50:25.953989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:50:25.954031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:50:25.954092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.954175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:50:25.955036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:50:25.955311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:50:26.059394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:50:26.059453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:26.074472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:50:26.074751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:50:26.074896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:50:26.083098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:50:26.084848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:50:26.087602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:50:26.091385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:50:26.105818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.106030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:50:26.111954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:50:26.112036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.112159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:50:26.112211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:50:26.112271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:50:26.112890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:50:26.305155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.306958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.307339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944 2026-01-07T17:50:27.093894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-07T17:50:27.094024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:50:27.094183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:50:27.094242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:50:27.094892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:50:27.096672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:50:27.096834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:50:27.097973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:50:27.098019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:50:27.098125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:50:27.098193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:50:27.098245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:27.098286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 40 2026-01-07T17:50:27.098322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-07T17:50:27.098342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-07T17:50:27.098587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:50:27.098624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:50:27.098706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:50:27.098734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:50:27.098762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:50:27.098791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:50:27.098819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:50:27.098852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:50:27.098883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:50:27.098907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:50:27.098969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-07T17:50:27.099010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:50:27.099042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2026-01-07T17:50:27.099065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2026-01-07T17:50:27.099919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:50:27.099988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:50:27.100014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:50:27.100044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2026-01-07T17:50:27.100232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:50:27.101100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:50:27.101154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:50:27.101181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:50:27.101211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-07T17:50:27.101253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:50:27.101313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:50:27.103526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:50:27.103884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:50:27.104030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:50:27.104076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:50:27.104394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:50:27.104465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.104502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:751:2740] TestWaitNotification: OK eventTxId 102 2026-01-07T17:50:27.104885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.105095Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 217us result status StatusSuccess 2026-01-07T17:50:27.106056Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { Properties { key: "concurrent_query_limit" value: "10" } Properties { key: "query_cancel_after_seconds" value: "60" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePoolTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:50:25.950500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:50:25.950609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.950680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:50:25.950729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:50:25.950769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:50:25.950804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:50:25.950869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.950947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:50:25.951876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:50:25.952232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:50:26.067610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:50:26.067671Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:26.082579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:50:26.082886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:50:26.083075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:50:26.090077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:50:26.090426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:50:26.091099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:50:26.091368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:50:26.100707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.101843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:50:26.111106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:50:26.111183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.111316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:50:26.111371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:50:26.111423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:50:26.112580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:50:26.281527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.282978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.283751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:50:27.086550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:50:27.086701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:50:27.086768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:50:27.086909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:27.086963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-07T17:50:27.087028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 40 2026-01-07T17:50:27.087052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 41 2026-01-07T17:50:27.087303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:50:27.087342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-07T17:50:27.087438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:50:27.087469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:50:27.087507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-07T17:50:27.087554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:50:27.087596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-07T17:50:27.087647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-07T17:50:27.087694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:50:27.087722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:50:27.087819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:50:27.087862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-07T17:50:27.087890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2026-01-07T17:50:27.087915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2026-01-07T17:50:27.087948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-07T17:50:27.088820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:50:27.088903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:50:27.088967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:50:27.089016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-07T17:50:27.089065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:50:27.089551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:50:27.089595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:50:27.089661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:50:27.090102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:50:27.090169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:50:27.090208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:50:27.090246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-07T17:50:27.090274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:50:27.091015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:50:27.091073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-07T17:50:27.091116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-07T17:50:27.091144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2026-01-07T17:50:27.091172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:50:27.091232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-07T17:50:27.095601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:50:27.095782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:50:27.096554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:50:27.097005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-07T17:50:27.097349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:50:27.097395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:50:27.097812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:50:27.097932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.097970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:781:2770] TestWaitNotification: OK eventTxId 103 2026-01-07T17:50:27.098531Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.098776Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 287us result status StatusPathDoesNotExist 2026-01-07T17:50:27.098979Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:50:25.948944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:50:25.949036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.949099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:50:25.949149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:50:25.949184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:50:25.949231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:50:25.949285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.949346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:50:25.950274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:50:25.952039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:50:26.107140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:50:26.107193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:26.124807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:50:26.125226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:50:26.125416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:50:26.133861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:50:26.134282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:50:26.135078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:50:26.135383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:50:26.138186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.138408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:50:26.139582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:50:26.139644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.139768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:50:26.139820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:50:26.139874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:50:26.140050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:50:26.291618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.292647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.292800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.292913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.293839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:50:27.048577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2026-01-07T17:50:27.048668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2026-01-07T17:50:27.048704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 124 2026-01-07T17:50:27.048732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 124, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-07T17:50:27.048763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:50:27.048841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 124, subscribers: 0 2026-01-07T17:50:27.051421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-07T17:50:27.051506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-07T17:50:27.053238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-07T17:50:27.053337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 TestModificationResult got TxId: 124, wait until txId: 124 TestWaitNotification wait txId: 124 2026-01-07T17:50:27.053567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 124: send EvNotifyTxCompletion 2026-01-07T17:50:27.053630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 124 2026-01-07T17:50:27.053978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-07T17:50:27.054104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.054159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:722:2711] TestWaitNotification: OK eventTxId 124 TestModificationResults wait txId: 125 2026-01-07T17:50:27.057319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:50:27.057585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/AnotherDir, operationId: 125:0, at schemeshard: 72057594046678944 2026-01-07T17:50:27.057788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:50:27.057857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 0 2026-01-07T17:50:27.057901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 125:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 41] source path: 2026-01-07T17:50:27.057976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:50:27.058102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 125:1, path# /MyRoot/AnotherDir/MyResourcePool 2026-01-07T17:50:27.058228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2026-01-07T17:50:27.060773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 125:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 2026-01-07T17:50:27.061018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 125:0, at schemeshard: 72057594046678944 2026-01-07T17:50:27.063445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 125, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:27.063697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2026-01-07T17:50:27.066797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:50:27.067145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/.metadata/workload_manager/pools/AnotherDir, operationId: 126:0, at schemeshard: 72057594046678944 2026-01-07T17:50:27.067346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 40], parent name: pools, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-07T17:50:27.067406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 0 2026-01-07T17:50:27.067448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 126:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 42] source path: 2026-01-07T17:50:27.067541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:50:27.067615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 126:1, path# /MyRoot/.metadata/workload_manager/pools/AnotherDir/MyResourcePool 2026-01-07T17:50:27.067745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2026-01-07T17:50:27.070357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 126:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 2026-01-07T17:50:27.070494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 126:0, at schemeshard: 72057594046678944 2026-01-07T17:50:27.072936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:27.073190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-07T17:50:27.076158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "" } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:50:27.076337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 127:0, path# /MyRoot/.metadata/workload_manager/pools/ 2026-01-07T17:50:27.076500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-07T17:50:27.078844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/\', error: path part shouldn\'t be empty" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:50:27.079083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, operation: CREATE RESOURCE POOL, path: TestModificationResult got TxId: 127, wait until txId: 127 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:50:25.948842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:50:25.948976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.949029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:50:25.949076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:50:25.949113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:50:25.949137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:50:25.949211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:50:25.949281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:50:25.950193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:50:25.953763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:50:26.055577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:50:26.055645Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:26.076507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:50:26.076682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:50:26.076903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:50:26.092531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:50:26.092948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:50:26.093776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:50:26.094106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:50:26.108732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.109023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:50:26.111039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:50:26.111116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:50:26.111243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:50:26.111293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:50:26.111405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:50:26.112566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:50:26.335645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.336654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.336794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.336877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:50:26.337843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.214673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.214732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-07T17:50:27.214850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.214872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.214925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-07T17:50:27.215022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-07T17:50:27.215091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.215113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.215307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-07T17:50:27.215372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-07T17:50:27.215502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.215526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.215556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-07T17:50:27.215608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.215628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.215672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-07T17:50:27.215780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.215808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.215957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.215983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.216069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.216091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.216139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-07T17:50:27.216269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-07T17:50:27.216317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.216340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.216485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.216502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.216618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-07T17:50:27.216670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.216705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.216807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.216829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.216867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.216884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.216958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-01-07T17:50:27.217102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2026-01-07T17:50:27.217174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.217198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.217359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.217385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.217473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.217513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.217652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.217681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.217781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.217807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:834:2823] 2026-01-07T17:50:27.218082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2026-01-07T17:50:27.218105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:834:2823] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 2026-01-07T17:50:27.221062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:50:27.221306Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 289us result status StatusSuccess 2026-01-07T17:50:27.221812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 Properties { Properties { key: "concurrent_query_limit" value: "20" } Properties { key: "query_cancel_after_seconds" value: "60" } Properties { key: "query_count_limit" value: "50" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 12330, MsgBus: 8233 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:29.557372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:29.666257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:29.684508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:29.684998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:29.685063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:29.924227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:29.924366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:29.998979Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808166852636 != 1767808166852640 2026-01-07T17:49:30.013741Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:30.059484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:30.179501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:30.586392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:30.586482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:30.586536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:30.587080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:30.601595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:30.959398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:31.056559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.339617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.705364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.028567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:32.857918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1906:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.858104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.858941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1979:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.859050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.891113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.047931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.313809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.622008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.871225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.179060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.513715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.877420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:35.246574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.246768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.247164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.247227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.247358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.253179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:35.437399Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:49:35.501832Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:37.325562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table. ... 66639Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:14.566728Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:14.844822Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:14.845076Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:14.878223Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1767808210651153 != 1767808210651157 2026-01-07T17:50:14.882586Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:14.934236Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:15.049917Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:15.496374Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:15.496443Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:15.496484Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:15.497234Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:15.509681Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:16.013122Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:16.083688Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:16.340297Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:16.741806Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.055501Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.626982Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1906:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.627548Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.628388Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1979:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.628543Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.660958Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.880978Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:18.163875Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:18.415048Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:18.667360Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:18.974088Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:19.273418Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:19.607417Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:20.034936Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2795:4177], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:20.035095Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:20.035569Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2799:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:20.035648Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:20.035733Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2802:4184], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:20.045749Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:20.205950Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2804:4186], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:20.266567Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:2859:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:22.623116Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:22.887377Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:23.264888Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewReplace-IsOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:50:23.634435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:23.756413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:50:23.777759Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:50:23.778329Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:23.778402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:24.075385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:24.075508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:24.189878Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808220730097 != 1767808220730101 2026-01-07T17:50:24.207040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:24.257599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:24.383752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:24.773088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:24.789546Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:24.938834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:25.303663Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2026-01-07T17:50:25.303794Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2026-01-07T17:50:25.314049Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-07T17:50:25.314153Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.314232Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.314268Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.314311Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.314349Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.334037Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=NzYzOGQ4NWYtNjQ4NDk4NzgtYWJkOTBjNzMtYmM5YzQwNzU= 2026-01-07T17:50:25.334162Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=NzczM2YyZWItNDQyNTRiN2ItMjc4N2M0MGYtNjY5ZWRiNw== 2026-01-07T17:50:25.337795Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=ZTUwYzA5M2MtOWQ1ZTE2Zi1mNmRlODIzMS05NjNiNGZmNw== 2026-01-07T17:50:25.337856Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=ZDY4NjJjNmQtZmM5MGQwODMtMjdhN2Q0NDktZTRlODRlZDY= 2026-01-07T17:50:25.339316Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=ZjI5YmIxNjAtYjU5ZWZkMWQtNTE1NTJmY2MtOTc4ZmI4Y2U= 2026-01-07T17:50:25.367942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2825], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.371006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.371240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.382842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:25.426244Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1011:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:25.427984Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1013:2865] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:25.428557Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1017:2869] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:25.429212Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:25.549015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.549138Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.549226Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.549292Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.549353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1009:2861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.588690Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.011173Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1767808227.011131s, errors=0 2026-01-07T17:50:27.011425Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1767808227011 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.025340Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1167:2965] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.094950Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1767808227.094908s, errors=0 2026-01-07T17:50:27.095230Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1767808227094 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.109504Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1214:2987] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.175570Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1767808227.175535s, errors=0 2026-01-07T17:50:27.175710Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1767808227175 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.188759Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1261:3009] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.254451Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1767808227.254414s, errors=0 2026-01-07T17:50:27.254726Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1767808227254 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.268133Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1308:3031] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.342740Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1767808227.342697s, errors=0 2026-01-07T17:50:27.343037Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1767808227342 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.343101Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 2.029279s, oks# 20, errors# 0 2026-01-07T17:50:27.343275Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 1736, MsgBus: 15851 2026-01-07T17:49:32.289497Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678285079735363:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:32.289553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:32.400085Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:32.724250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:32.724363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:32.799610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:32.808847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:32.819805Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:33.067134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:33.067156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:33.067165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:33.067276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:33.069960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:33.350269Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:33.622231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:33.627943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:35.623202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678297964638071:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.623333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.624758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678297964638108:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.625227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678297964638110:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.625310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.626172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678297964638117:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.626236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:35.628437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:35.641603Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678297964638111:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:35.793680Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678297964638165:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:36.097703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:36.185756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.063861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.740216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678285079735363:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:37.820655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27254, MsgBus: 4447 2026-01-07T17:49:39.919215Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678316540394915:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:39.935969Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:39.936427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:40.084398Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678316540394756:2081] 1767808179850467 != 1767808179850470 2026-01-07T17:49:40.094255Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:40.096919Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:40.098055Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:40.098121Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:40.123600Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:40.311281Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:40.311302Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:40.311309Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:40.311390Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:40.339290Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:40.914529Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:40.994163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:41.001687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:43.545160Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678333720264810:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:43.545296Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:43.545687Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678333720264835:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: ... ncel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.221972Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222022Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222220Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222266Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222446Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222468Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222639Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222663Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222853Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.222857Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.223027Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.223033Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.223217Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.223227Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.223399Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.223742Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.223976Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.223998Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.224184Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.224383Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-07T17:50:14.493565Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:14.493595Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 31635, MsgBus: 3379 2026-01-07T17:50:16.545751Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678475194985500:2244];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:16.545819Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:16.562737Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:16.661472Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678475194985282:2081] 1767808216523309 != 1767808216523312 2026-01-07T17:50:16.671239Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:16.672333Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:16.704691Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:16.704779Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:16.707798Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:16.765483Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:16.765508Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:16.765516Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:16.765598Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:16.921044Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:17.368177Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:17.544572Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:21.198324Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678496669822637:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:21.198448Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:21.201289Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678496669822660:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:21.201394Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678496669822659:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:21.201467Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:21.209964Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:21.222928Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678496669822663:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:50:21.300100Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678496669822714:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:21.372712Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:21.465849Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:21.545005Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678475194985500:2244];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:21.545079Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:22.912550Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:25.225759Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7592678513849700001:2968], SessionActorId: [4:7592678509554732634:2968], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[4:7592678509554732634:2968]. 2026-01-07T17:50:25.225954Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=OWMwYjU2MzUtN2U0YzFkY2UtNjhlMTczNDYtZjc3NjNkMzg=, ActorId: [4:7592678509554732634:2968], ActorState: ExecuteState, LegacyTraceId: 01kecs9ky8emhkdxjfsgpcmz91, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7592678513849700019:2968] from: [4:7592678513849700001:2968] trace_id# 2026-01-07T17:50:25.226036Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7592678513849700019:2968] TxId: 281474976710668. Ctx: { TraceId: 01kecs9ky8emhkdxjfsgpcmz91, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OWMwYjU2MzUtN2U0YzFkY2UtNjhlMTczNDYtZjc3NjNkMzg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } trace_id# 2026-01-07T17:50:25.226254Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=OWMwYjU2MzUtN2U0YzFkY2UtNjhlMTczNDYtZjc3NjNkMzg=, ActorId: [4:7592678509554732634:2968], ActorState: ExecuteState, LegacyTraceId: 01kecs9ky8emhkdxjfsgpcmz91, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:50:23.442448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:23.584678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:50:23.624392Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:50:23.625323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:23.625438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:24.078605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:24.078746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:24.247311Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808220808658 != 1767808220808662 2026-01-07T17:50:24.257763Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:24.307763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:24.422744Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:24.773205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:24.789578Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:24.942529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:25.293971Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2026-01-07T17:50:25.294135Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2026-01-07T17:50:25.313887Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-07T17:50:25.314015Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.314093Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.314130Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.314168Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.314194Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-07T17:50:25.335385Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=ZmRhYzQ2ZWQtMjQ3ODE2OTktZDJkMGFhYS1lYTczYTljNg== 2026-01-07T17:50:25.335472Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=ZjgyMGNkYmMtOTkzMzczNjYtODk5ODBkYTMtNjkwNDU0NDM= 2026-01-07T17:50:25.340949Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=MzY2ZDhiMDQtNmU1ZjRmZWItNzFhMTE3ZmQtYmQ5ZmYxNTA= 2026-01-07T17:50:25.341007Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=YzViOWI4MDMtNGEzYTE2N2YtMzllYmY3ZWItZjFlZGQwMTQ= 2026-01-07T17:50:25.342654Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=ZTVlNmQ1OWYtNDZjMTY2MzUtY2E2ZDE3NTItNDU1MjFmZg== 2026-01-07T17:50:25.367905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2825], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.368375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.371064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.371331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:25.380004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:25.442538Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1011:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:25.449361Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1013:2865] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:25.449970Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1017:2869] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:25.450586Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:25.587194Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.587302Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.587382Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.587430Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.587472Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1009:2861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:25.623798Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.016669Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1767808227.016623s, errors=0 2026-01-07T17:50:27.017000Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1767808227016 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.031374Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1167:2965] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.098268Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1767808227.098241s, errors=0 2026-01-07T17:50:27.098451Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1767808227098 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.112052Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1214:2987] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.174683Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1767808227.174647s, errors=0 2026-01-07T17:50:27.174918Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1767808227174 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.188687Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1261:3009] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.254015Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1767808227.253952s, errors=0 2026-01-07T17:50:27.254310Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1767808227253 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.267678Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1308:3031] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:27.332945Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1767808227.332884s, errors=0 2026-01-07T17:50:27.333245Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1767808227332 OperationsOK: 4 OperationsError: 0 } 2026-01-07T17:50:27.333319Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 2.019678s, oks# 20, errors# 0 2026-01-07T17:50:27.333479Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert [GOOD] >> KqpSinkTx::TIsolationSettingTest-IsOlap-UsePragma [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap-FillTables >> KqpSnapshotIsolation::TPragmaSettingOltp+IsSnapshotIsolation [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewReplace-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 16138, MsgBus: 9576 2026-01-07T17:49:56.113374Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678390464203007:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:56.113437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:56.215316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:56.608019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:56.608139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:56.618255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:56.878725Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:56.901073Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678390464202797:2081] 1767808196080227 != 1767808196080230 2026-01-07T17:49:56.905170Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:56.956714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:56.956736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:56.956748Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:56.956876Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:57.097976Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:57.171192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:57.569953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:59.560844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678403349105574:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:59.560948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:59.561244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678403349105588:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:59.561255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678403349105582:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:59.561284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:59.565220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:59.574951Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678403349105590:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:59.670636Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678403349105641:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:59.976112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:00.118463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:01.087491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:01.108545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678390464203007:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:01.108601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:02.629546Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715670; 2026-01-07T17:50:02.650361Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7592678416234015698:2967], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [1:7592678416234015390:2967]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[1:7592678416234015698:2967].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-07T17:50:02.650901Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7592678416234015390:2967]. 2026-01-07T17:50:02.651066Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651084Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651093Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651108Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651124Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651134Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651144Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651154Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651164Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651174Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651183Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651193Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651203Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651213Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651223Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651232Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651240Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651250Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7592678416234015692:2967], SessionActorId: [1:7592678416234015390:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:02.651258Z node 1 :KQP_COMPUTE ... ion/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:19.925008Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:20.014681Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:20.015985Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:20.016064Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:20.036294Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:20.141569Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:20.141597Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:20.141607Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:20.141689Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:20.172572Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:20.666556Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:20.681365Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:20.861574Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:23.905134Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678503662732015:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:23.905200Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678503662732026:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:23.905251Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:23.906026Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678503662732030:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:23.906073Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:23.914938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:23.927483Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678503662732029:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:50:23.986747Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678503662732082:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:24.048079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:24.137392Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:25.122285Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678486482861995:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:25.122431Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:25.451310Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:27.634086Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710668; 2026-01-07T17:50:27.667738Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7592678520842609939:2967], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [3:7592678516547642025:2967]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[3:7592678520842609939:2967].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-07T17:50:27.667852Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7592678516547642025:2967]. 2026-01-07T17:50:27.668098Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668119Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668133Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668147Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668160Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668174Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668187Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668202Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668215Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668227Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668238Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668249Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668261Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668273Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7592678520842609933:2967], SessionActorId: [3:7592678516547642025:2967], StateRollback: unknown message 278003713 2026-01-07T17:50:27.668353Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=YjBmZmI0YjUtYTI4YWZmNDgtNWQ3MTUxN2EtMTEzN2MwOGM=, ActorId: [3:7592678516547642025:2967], ActorState: ExecuteState, LegacyTraceId: 01kecs9p8raa239x9e2c732e4x, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592678520842609934:2967] from: [3:7592678520842609933:2967] trace_id# 2026-01-07T17:50:27.668468Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592678520842609934:2967] TxId: 281474976710668. Ctx: { TraceId: 01kecs9p8raa239x9e2c732e4x, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YjBmZmI0YjUtYTI4YWZmNDgtNWQ3MTUxN2EtMTEzN2MwOGM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:50:27.668767Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YjBmZmI0YjUtYTI4YWZmNDgtNWQ3MTUxN2EtMTEzN2MwOGM=, ActorId: [3:7592678516547642025:2967], ActorState: ExecuteState, LegacyTraceId: 01kecs9p8raa239x9e2c732e4x, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQCompact >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> TKqpScanFetcher::ScanDelayedRetry >> TKqpScheduler::LeftFairShareIsDistributed >> TKqpScheduler::QueriesWithFairShareOverlimit+AllowOverlimit [GOOD] >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] >> TKqpScheduler::ZeroLimits [GOOD] >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] >> TKqpScheduler::WeightedPools [GOOD] >> TKqpScheduler::WeightedQueries [GOOD] >> TKqpScheduler::ZeroQueries [GOOD] >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] >> TKqpScanData::UnboxedValueSize >> TKqpScheduler::SingleDatabasePoolQueryStructure [GOOD] >> TKqpScheduler::WeightedDatabase [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter >> TKqpScheduler::LeftFairShareIsDistributed [GOOD] >> TKqpScheduler::MultipleDatabasesPoolsQueries [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:50:23.603024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:23.718357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:50:23.740573Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:50:23.741168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:23.741241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:24.075386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:24.079458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:24.198156Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808220729375 != 1767808220729379 2026-01-07T17:50:24.211071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:24.262149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:24.385895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:24.778279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:24.792395Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:24.938653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:25.336776Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2026-01-07T17:50:25.338664Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-07T17:50:25.380358Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor finished in 0.041297s, errors=0 2026-01-07T17:50:25.380818Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2026-01-07T17:50:25.380921Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [1:961:2819] with id# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2026-01-07T17:50:25.386328Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-07T17:50:25.386467Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:964:2822] 2026-01-07T17:50:25.386581Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Bootstrap called, sample# 0 2026-01-07T17:50:25.386634Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-07T17:50:25.386919Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:25.393777Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} finished in 0.006791s, read# 1000 2026-01-07T17:50:25.394269Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:964:2822] with chunkSize# 0 finished: 0 { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 } 2026-01-07T17:50:25.394416Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:967:2825] 2026-01-07T17:50:25.394477Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Bootstrap called, sample# 0 2026-01-07T17:50:25.394509Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Connect to# 72075186224037888 called 2026-01-07T17:50:25.394822Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:25.639653Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} finished in 0.244770s, read# 1000 2026-01-07T17:50:25.639810Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:967:2825] with chunkSize# 1 finished: 0 { DurationMs: 244 OperationsOK: 1000 OperationsError: 0 } 2026-01-07T17:50:25.639920Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:970:2828] 2026-01-07T17:50:25.639964Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Bootstrap called, sample# 0 2026-01-07T17:50:25.639995Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Connect to# 72075186224037888 called 2026-01-07T17:50:25.640254Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:25.742625Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} finished in 0.102314s, read# 1000 2026-01-07T17:50:25.742796Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:970:2828] with chunkSize# 10 finished: 0 { DurationMs: 102 OperationsOK: 1000 OperationsError: 0 } 2026-01-07T17:50:25.742989Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:973:2831] 2026-01-07T17:50:25.743052Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Bootstrap called, sample# 1000 2026-01-07T17:50:25.743097Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Connect to# 72075186224037888 called 2026-01-07T17:50:25.743390Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:25.746247Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} finished in 0.002305s, sampled# 1000, iter finished# 1, oks# 1000 2026-01-07T17:50:25.746359Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} received keyCount# 1000 2026-01-07T17:50:25.746538Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} started read actor with id# [1:976:2834] 2026-01-07T17:50:25.746596Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [1:961:2819], subTag: 5} Bootstrap called, will read keys# 1000 2026-01-07T17:50:26.233863Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} received point times# 1000, Inflight left# 0 2026-01-07T17:50:26.234093Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 487 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 51\n" } 2026-01-07T17:50:26.234264Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} finished in 0.849054s with report: { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 244 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 102 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 487 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 51\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2026-01-07T17:50:26.234675Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:961:2819] with tag# 3 ... waiting for SysViewsRoster update finished 2026-01-07T17:50:29.921211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:29.926440Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:50:29.930542Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:50:29.930857Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:29.931040Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:30.158565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:30.158665Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:30.185847Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767808226844438 != 1767808226844442 2026-01-07T17:50:30.189851Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:30.240951Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:30.313409Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:30.602044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:30.615416Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:30.724017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:30.977365Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2026-01-07T17:50:30.977754Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-07T17:50:30.991717Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 1} TUpsertActor finished in 0.013611s, errors=0 2026-01-07T17:50:30.992347Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2026-01-07T17:50:30.992487Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [2:961:2819] with id# {Tag: 0, parent: [2:952:2810], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2026-01-07T17:50:30.993812Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-07T17:50:30.993958Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:964:2822] 2026-01-07T17:50:30.994093Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Bootstrap called, sample# 0 2026-01-07T17:50:30.994192Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-07T17:50:30.994459Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:30.995162Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} finished in 0.000662s, read# 10 2026-01-07T17:50:30.995300Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:964:2822] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2026-01-07T17:50:30.995419Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:967:2825] 2026-01-07T17:50:30.995469Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Bootstrap called, sample# 0 2026-01-07T17:50:30.995499Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Connect to# 72075186224037888 called 2026-01-07T17:50:30.995722Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:30.999130Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} finished in 0.003363s, read# 10 2026-01-07T17:50:30.999272Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:967:2825] with chunkSize# 1 finished: 0 { DurationMs: 3 OperationsOK: 10 OperationsError: 0 } 2026-01-07T17:50:30.999380Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:970:2828] 2026-01-07T17:50:30.999451Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Bootstrap called, sample# 0 2026-01-07T17:50:30.999503Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Connect to# 72075186224037888 called 2026-01-07T17:50:30.999763Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:31.000293Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} finished in 0.000495s, read# 10 2026-01-07T17:50:31.000399Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:970:2828] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2026-01-07T17:50:31.000517Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:973:2831] 2026-01-07T17:50:31.000570Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Bootstrap called, sample# 10 2026-01-07T17:50:31.000599Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Connect to# 72075186224037888 called 2026-01-07T17:50:31.000794Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:31.001294Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} finished in 0.000440s, sampled# 10, iter finished# 1, oks# 10 2026-01-07T17:50:31.001387Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} received keyCount# 10 2026-01-07T17:50:31.001544Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} started read actor with id# [2:976:2834] 2026-01-07T17:50:31.001607Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [2:961:2819], subTag: 5} Bootstrap called, will read keys# 10 2026-01-07T17:50:31.429514Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} received point times# 1000, Inflight left# 0 2026-01-07T17:50:31.429755Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 428 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 2\n99.9%: 30\n" } 2026-01-07T17:50:31.429936Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} finished in 0.437276s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 3 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 428 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 2\n99.9%: 30\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2026-01-07T17:50:31.430065Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:961:2819] with tag# 3 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TKqpScheduler::DemandIsCutOffByLimit [GOOD] >> TKqpScheduler::AddUpdateQueries [GOOD] >> TKqpScheduler::DeleteQueries [GOOD] >> TKqpScanData::UnboxedValueSize [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TKqpScanData::EmptyColumns [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TKqpScanFetcher::ScanDelayedRetry [GOOD] >> TKqpScheduler::AddUpdatePools [GOOD] >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 19459, MsgBus: 5581 2026-01-07T17:49:29.534664Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678272417289717:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:29.534766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:29.777528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:29.777654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:29.784887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:29.802684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:29.820976Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678272417289683:2081] 1767808169532888 != 1767808169532891 2026-01-07T17:49:29.823206Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:29.871593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:29.871634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:29.871643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:29.871771Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:30.063572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:30.266561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:30.294532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:30.542712Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:32.595450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678285302192460:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.595533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.595862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678285302192473:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.595902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678285302192472:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.595933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:32.599097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:32.608264Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678285302192476:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:32.736438Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678285302192527:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:33.044005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:33.161111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.195473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.985449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678272417289717:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:35.082888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:36.132281Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678302482069737:2962], SessionActorId: [1:7592678298187102397:2962], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7592678298187102397:2962]. 2026-01-07T17:49:36.132546Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MjAyZTkyZjEtNjNiNzg5NTctOGE0YmM2ZDMtYmRhNzljOTM=, ActorId: [1:7592678298187102397:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs83zm9mj0qas1tbkr5z2r, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7592678302482069738:2962] from: [1:7592678302482069737:2962] trace_id# 2026-01-07T17:49:36.132655Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592678302482069738:2962] TxId: 281474976715665. Ctx: { TraceId: 01kecs83zm9mj0qas1tbkr5z2r, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyZTkyZjEtNjNiNzg5NTctOGE0YmM2ZDMtYmRhNzljOTM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } trace_id# 2026-01-07T17:49:36.132890Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MjAyZTkyZjEtNjNiNzg5NTctOGE0YmM2ZDMtYmRhNzljOTM=, ActorId: [1:7592678298187102397:2962], ActorState: ExecuteState, LegacyTraceId: 01kecs83zm9mj0qas1tbkr5z2r, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 22377, MsgBus: 3424 2026-01-07T17:49:37.503574Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678307288520196:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:37.503631Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:37.620735Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:37.644625Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:37.644708Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:37.659482Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678307288520167:2081] 1767808177501302 != 1767808177501305 2026-01-07T17:49:37.671803Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:37.680084Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:37.753827Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:37.753848Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:37.753856Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:37.753931Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:37.811411Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:38.218145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:38.224174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:38.513121Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::T ... RN: log.cpp:841: tablet_id=72075186224038018;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.008072Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.008148Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.008170Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.011017Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.011088Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.011110Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.027068Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.027153Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.027179Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.031656Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.031724Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.031746Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.038973Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038022;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.039045Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038022;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.039068Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038022;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.042604Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.042673Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.042694Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.050120Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.050192Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.050220Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:25.173330Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:25.173364Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:25.179006Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs9604bt7t5e9txbv0gmge", SessionId: ydb://session/3?node_id=4&id=MTdlMWQyNzAtY2FjZmRjNjUtZmIwZTM5ODAtMTk2ODA1OTA=, Slow query, duration: 10.741438s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:50:26.670246Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976710667;tx_id=281474976710667;commit_tx_id=281474976710667;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; 2026-01-07T17:50:26.822535Z node 4 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[4:7592678467040408578:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976710669; 2026-01-07T17:50:26.825105Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7592678518580025341:3646], SessionActorId: [4:7592678514285057832:3646], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[4:7592678518580025341:3646].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-07T17:50:26.825214Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7592678518580025341:3646], SessionActorId: [4:7592678514285057832:3646], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[4:7592678514285057832:3646]. 2026-01-07T17:50:26.825415Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=ZjQ5OTZjNGItNjc1ZGNjNmItOGJlZjIxMGUtNDE1Nzg0MWU=, ActorId: [4:7592678514285057832:3646], ActorState: ExecuteState, LegacyTraceId: 01kecs9nc14aaagvsjnn1y1sy5, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7592678518580025358:3646] from: [4:7592678518580025341:3646] trace_id# 2026-01-07T17:50:26.825514Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7592678518580025358:3646] TxId: 281474976710669. Ctx: { TraceId: 01kecs9nc14aaagvsjnn1y1sy5, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZjQ5OTZjNGItNjc1ZGNjNmItOGJlZjIxMGUtNDE1Nzg0MWU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-07T17:50:26.825811Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=ZjQ5OTZjNGItNjc1ZGNjNmItOGJlZjIxMGUtNDE1Nzg0MWU=, ActorId: [4:7592678514285057832:3646], ActorState: ExecuteState, LegacyTraceId: 01kecs9nc14aaagvsjnn1y1sy5, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:50:26.831247Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.831420Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.831548Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.831874Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.832031Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.832167Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.832302Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.832441Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.832577Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-07T17:50:26.835598Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::MultipleDatabasesPoolsQueries [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert+UseBulkUpsert [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedDatabase [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> KqpSnapshotIsolation::ConflictWrite-IsOlap-FillTables [GOOD] >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedQueries [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::DeleteQueries [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=535;kqp_scan_fetcher_actor.cpp:50 :META:Reads { ShardId: 1001001 KeyRanges { } } 2026-01-07T17:50:33.112394Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2026-01-07T17:50:33.113307Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2026-01-07T17:50:33.124014Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2026-01-07T17:50:33.124133Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #2 (total 2) schedule after 0.250000s |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:50:21.644430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:21.759036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:50:21.795376Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:50:21.796028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:21.796091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:22.301499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:22.301629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:22.393712Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808218607673 != 1767808218607677 2026-01-07T17:50:22.411835Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:22.461584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:22.603659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:22.938818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:50:22.939691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:22.941326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:22.942194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:50:22.943887Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-07T17:50:22.943949Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-07T17:50:22.955097Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:22.982102Z node 1 :HIVE DEBUG: hive_impl.cpp:2519: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2026-01-07T17:50:22.982223Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2026-01-07T17:50:23.055250Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-07T17:50:23.055371Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-07T17:50:23.055585Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-07T17:50:23.055956Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-07T17:50:23.056008Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-07T17:50:23.056448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:23.057479Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-07T17:50:23.057573Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-07T17:50:23.057629Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-07T17:50:23.057667Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-07T17:50:23.058299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:50:23.058359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-07T17:50:23.059196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2026-01-07T17:50:23.073378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:23.077023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:23.077113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:23.078046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2026-01-07T17:50:23.083865Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2026-01-07T17:50:23.104103Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:50:23.104199Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-07T17:50:23.104443Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2026-01-07T17:50:23.104506Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2026-01-07T17:50:23.104594Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-07T17:50:23.104771Z node 1 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2026-01-07T17:50:23.105583Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-01-07T17:50:23.105778Z node 1 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-01-07T17:50:23.106587Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2026-01-07T17:50:23.106989Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2026-01-07T17:50:23.107093Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136875826074400}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2026-01-07T17:50:23.107173Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136875826074400}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2026-01-07T17:50:23.107322Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136875826074400}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2026-01-07T17:50:23.107415Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2026-01-07T17:50:23.107471Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2026-01-07T17:50:23.107516Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2026-01-07T17:50:23.107737Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2026-01-07T17:50:23.107800Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2026-01-07T17:50:23.107853Z node 1 :HIVE DEBUG: hive_impl.cpp:1254: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2026-01-07T17:50:23.109124Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2026-01-07T17:50:23.109281Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2026-01-07T17:50:23.109417Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2026-01-07T17:50:23.109591Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#7205759403796889 ... reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:32.115766Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/table-2 2026-01-07T17:50:32.119362Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-07T17:50:32.119505Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2026-01-07T17:50:32.119586Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2026-01-07T17:50:32.120269Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:50:32.120483Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:50:32.131468Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [2:333:2371] NKikimrLocal.TEvStopTablet TabletId: 72075186224037888 FollowerId: 0 Generation: 1,0x10040206 [2:400:2399] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1 Actions: NKikimr::TTabletReqBlockBlobStorage} 2026-01-07T17:50:32.132129Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:50:32.132832Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:50:32.132991Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:50:32.135658Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2026-01-07T17:50:32.135752Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2026-01-07T17:50:32.136342Z node 2 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2026-01-07T17:50:32.136512Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:50:32.136695Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2026-01-07T17:50:32.136746Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:50:32.137121Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2026-01-07T17:50:32.148225Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:50:32.149815Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2026-01-07T17:50:32.149889Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715665 step# 4500 Status# 16 SEND to# [2:400:2399] Proxy marker# C1 2026-01-07T17:50:32.160612Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2026-01-07T17:50:32.253666Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715665 has been planned 2026-01-07T17:50:32.253789Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2026-01-07T17:50:32.253838Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2026-01-07T17:50:32.254082Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 5000 in 0.500000s at 4.950000s 2026-01-07T17:50:32.254515Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 4500, txid# 281474976715665 marker# C2 2026-01-07T17:50:32.254587Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715665 stepId# 4500 Status# 17 SEND EvProposeTransactionStatus to# [2:400:2399] Proxy 2026-01-07T17:50:32.255209Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 4500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:32.255732Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715665 at step 4500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 4500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-07T17:50:32.255772Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:50:32.255898Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:50:32.255950Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:50:32.256072Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [4500:281474976715665] in PlanQueue unit at 72075186224037889 2026-01-07T17:50:32.256271Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 4500:281474976715665 keys extracted: 0 2026-01-07T17:50:32.256395Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:50:32.256560Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:50:32.256619Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2026-01-07T17:50:32.259165Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:50:32.260701Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 4500} 2026-01-07T17:50:32.260783Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:50:32.261734Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-07T17:50:32.261831Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-07T17:50:32.261883Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2026-01-07T17:50:32.261915Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2026-01-07T17:50:32.261957Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 acknowledged 2026-01-07T17:50:32.262298Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:50:32.262388Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:50:32.262451Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2026-01-07T17:50:32.262530Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:50:32.270709Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2026-01-07T17:50:32.273781Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2026-01-07T17:50:32.273881Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-07T17:50:32.274670Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2026-01-07T17:50:32.274778Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2026-01-07T17:50:32.275577Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2026-01-07T17:50:32.276423Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:50:32.299890Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:50:32.300156Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-07T17:50:32.301580Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:50:32.302248Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-07T17:50:32.302574Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2026-01-07T17:50:32.302616Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2026-01-07T17:50:32.302692Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2026-01-07T17:50:32.302802Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2026-01-07T17:50:32.302880Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minstep/unittest |94.5%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> THiveTest::TestCreateTablet >> TCutHistoryRestrictions::BasicTest >> THiveImplTest::BootQueueSpeed >> THiveTest::TestHiveBalancer >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> THeavyPerfTest::TTestLoadEverything >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TScaleRecommenderTest::BasicTest >> THiveTest::TestDrainWithMaxTabletsScheduled >> THiveTest::TestDrain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 7485, MsgBus: 26134 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:32.160196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:32.301942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:32.318502Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:32.318950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:32.319004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:32.578321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:32.578423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:32.644480Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808169584595 != 1767808169584599 2026-01-07T17:49:32.655372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:32.705963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:32.843296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:33.278975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:33.279025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:33.279053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:33.279503Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:33.291560Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:33.671862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:33.723411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:912:2777], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.723514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.723870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.724692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.725177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:33.729339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:33.838414Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:926:2785], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:33.933329Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:983:2823] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:34.243639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:34.515104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:37.050901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:40.238998Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Write conflict with concurrent transaction.;tx_id=4; 2026-01-07T17:49:40.247717Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 4 at tablet 72075186224037989 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-07T17:49:40.259129Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 4 at tablet 72075186224037989 Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-07T17:49:40.259535Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:8781:8416], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [1:8743:8416]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[1:8781:8416].{
: Error: Write conflict with concurrent transaction., code: 2001 } 2026-01-07T17:49:40.260085Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:8772:8416], SessionActorId: [1:8743:8416], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Write conflict with concurrent transaction., code: 2001 . sessionActorId=[1:8743:8416]. 2026-01-07T17:49:40.260565Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=YzRjMTg0MjMtNDVhZGZlZjgtZDcxZWJiMmQtODE5OGI0Mzc=, ActorId: [1:8743:8416], ActorState: ExecuteState, LegacyTraceId: 01kecs87swa09gqb821hyq93ad, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:8773:8416] from: [1:8772:8416] trace_id# 2026-01-07T17:49:40.260784Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:8773:8416] TxId: 281474976715664. Ctx: { TraceId: 01kecs87swa09gqb821hyq93ad, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzRjMTg0MjMtNDVhZGZlZjgtZDcxZWJiMmQtODE5OGI0Mzc=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Write conflict with concurrent transaction., code: 2001 } } trace_id# 2026-01-07T17:49:40.261101Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YzRjMTg0MjMtNDVhZGZlZjgtZDcxZWJiMmQtODE5OGI0Mzc=, ActorId: [1:8743:8416], ActorState: ExecuteState, LegacyTraceId: 01kecs87swa09gqb821hyq93ad, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 23300, MsgBus: 28819 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:44.815209Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:44.822311Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:44.827525Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:44.827870Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:44.828039Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:45.084067Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:45.084198Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:45.117901Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767808181388904 != 1767808181388908 2026-01-07T17:49:45.122262Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:45.169912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:45.335765Z node 2 :KQP_PROXY WARN: kqp_f ... ne=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.335871Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.348476Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.348581Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.348627Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.360097Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.360211Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.360255Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.369856Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.369970Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.370031Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.380260Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.380370Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.380414Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.389070Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.389172Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.389207Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.397432Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.397543Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.397589Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.407105Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.407208Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.407275Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.414998Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.415110Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.415153Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.424206Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.424343Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.424389Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.539463Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs97519smeync4hxmaycyx", SessionId: ydb://session/3?node_id=3&id=M2YwYmJlNzktN2IxNGRmNTAtNzI3ZGYyNmYtN2NmZTFiOGU=, Slow query, duration: 18.405345s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:50:32.363261Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:80;broken_lock_id=281474976715665; 2026-01-07T17:50:32.481046Z node 3 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[3:2001:3476];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037919;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976715668; 2026-01-07T17:50:32.481883Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [3:9232:8024], SessionActorId: [3:8666:8024], Got LOCKS BROKEN for table. ShardID=72075186224037919, Sink=[3:9232:8024].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-07T17:50:32.482076Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:9232:8024], SessionActorId: [3:8666:8024], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[3:8666:8024]. 2026-01-07T17:50:32.482403Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=YjgxNDA0MDItYTg3NmMzNWItZmY1NjRlOTAtNzdmZGYwMjI=, ActorId: [3:8666:8024], ActorState: ExecuteState, LegacyTraceId: 01kecs9szy1xamycav8px2rzq2, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:9800:8024] from: [3:9232:8024] trace_id# 2026-01-07T17:50:32.483195Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:9800:8024] TxId: 281474976715668. Ctx: { TraceId: 01kecs9szy1xamycav8px2rzq2, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YjgxNDA0MDItYTg3NmMzNWItZmY1NjRlOTAtNzdmZGYwMjI=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-07T17:50:32.483766Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YjgxNDA0MDItYTg3NmMzNWItZmY1NjRlOTAtNzdmZGYwMjI=, ActorId: [3:8666:8024], ActorState: ExecuteState, LegacyTraceId: 01kecs9szy1xamycav8px2rzq2, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:50:32.519950Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715668; |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> THiveTest::TestServerlessMigration >> THiveTest::TestLocalDisconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2026-01-07T17:50:06.470201Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678432018984697:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.470241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.614291Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.615436Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.840238Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:06.840432Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:06.846445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.001938Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.069128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.069249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.078008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.078079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.109751Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:07.110011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.111537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.217030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.235501Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:07.472026Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.476228Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.574958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/001770/r3tmp/yandexp5kQzi.tmp 2026-01-07T17:50:07.574997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/001770/r3tmp/yandexp5kQzi.tmp 2026-01-07T17:50:07.581422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/001770/r3tmp/yandexp5kQzi.tmp 2026-01-07T17:50:07.581514Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:07.586920Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.895190Z INFO: TTestServer started on Port 13907 GrpcPort 27026 PQClient connected to localhost:27026 2026-01-07T17:50:08.263363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:08.474244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:10.963932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678449198855106:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.964100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.964224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678449198855119:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.964550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678449198855121:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.964606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.968609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:10.993902Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678449198855122:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:50:11.051268Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678453493822506:3051] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:11.541142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678432018984697:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:11.541221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:11.582378Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592678453493822523:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:50:11.589617Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmMzZWIxMjQtN2RhYzgzOTMtMWViZjZiNjctYjE2ZjVmMTA=, ActorId: [1:7592678449198855081:2331], ActorState: ExecuteState, LegacyTraceId: 01kecs960a8sdgfkqmzyamxwb7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:50:11.594110Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:50:11.676677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:11.766438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:11.911173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592678457788790285:3402] === CheckClustersList. Ok 2026-01-07T17:50:18.377450Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7592678432018984756:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ P ... : partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.833233Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.833240Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-07T17:50:34.833259Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:34.833266Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.833274Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.833289Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.833301Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-07T17:50:34.833322Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:34.833331Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.833340Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.833350Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.833361Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:50:34.833387Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:34.833395Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.833402Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.833411Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.833417Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-07T17:50:34.855657Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:34.855695Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.855719Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.855740Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.855753Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-07T17:50:34.855811Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:34.855819Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.855827Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.855838Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.855846Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-07T17:50:34.855868Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:34.855879Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.855888Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.855900Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.855910Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2026-01-07T17:50:34.932847Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:34.932877Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.932891Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.932909Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.932939Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-07T17:50:34.937693Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:34.937724Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937739Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.937755Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937765Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-07T17:50:34.937823Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:34.937832Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937838Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.937846Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937852Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-07T17:50:34.937879Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:34.937888Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937895Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.937903Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937909Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-07T17:50:34.937927Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:34.937935Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937942Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.937949Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937954Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:50:34.937974Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:34.937982Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.937987Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.937996Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.938001Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-07T17:50:34.955908Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:34.955934Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.955948Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.955963Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.955974Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-07T17:50:34.956032Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:34.956044Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.956050Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.956060Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.956068Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-07T17:50:34.956089Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:34.956097Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.956103Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:34.956112Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:34.956119Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] Test command err: Trying to start YDB, gRPC: 21938, MsgBus: 1736 2026-01-07T17:49:44.496107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678338327205423:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:44.496136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:44.882124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:44.886930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:44.887029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:44.984971Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678338327205317:2081] 1767808184483925 != 1767808184483928 2026-01-07T17:49:44.985644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:44.988713Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:45.137518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:45.137557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:45.137567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:45.137631Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:45.143516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:45.528459Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:45.612037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:45.618631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:47.954147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678351212108087:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.954243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678351212108100:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.954350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.955060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678351212108118:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.955135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:47.958310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:47.968424Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678351212108117:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:48.076781Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678355507075466:2548] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:48.374890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:48.488010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.496473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678338327205423:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:49.496567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:49.539469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 26241, MsgBus: 24489 2026-01-07T17:49:53.163541Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678374729374946:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:53.163593Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:53.172564Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:53.298894Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:53.314275Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:53.318288Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678374729374919:2081] 1767808193162756 != 1767808193162759 2026-01-07T17:49:53.334882Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:53.334953Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:53.337795Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:53.422348Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:53.422366Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:53.422376Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:53.422454Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:53.571440Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:53.814430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:53.818173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:54.169285Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:56.528412Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678387614277693:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.528536Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.529032Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678387614277706:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.529077Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678387614277705:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.529155Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource po ... cal=44;result=not_found; 2026-01-07T17:50:30.961560Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.961581Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.965451Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.965532Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.965553Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.971589Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.971657Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.971674Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.975523Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.975604Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.975622Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.981148Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.981199Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.981218Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.984507Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.984575Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.984593Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.988983Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.989053Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.989071Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.993163Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.993239Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.993257Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.998939Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.999013Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:30.999033Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:31.002118Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:31.002186Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:31.002205Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:31.008167Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:31.008254Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:31.008274Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:31.115146Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs9cs1b2dye4s3mhfyjpxw", SessionId: ydb://session/3?node_id=4&id=Mjk1NGVlZGYtY2RhMGMxMDktZTFkODA4YjgtZThlMGM2OWU=, Slow query, duration: 10.230962s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:50:32.245083Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:32.245116Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:33.359386Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;commit_tx_id=281474976715668;commit_lock_id=281474976715665;fline=manager.cpp:80;broken_lock_id=281474976715666; 2026-01-07T17:50:33.427970Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [4:7592678547749973867:4630], TxId: 281474976715669, task: 1. Ctx: { CheckpointId : . TraceId : 01kecs9vxyfvpx0jqknetrqws3. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=N2YwYjhiNzUtNjQ4N2U3OTEtN2ZjNDcyNGQtMjVhYjM5ZTA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2026-01-07T17:50:33.428102Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [4:7592678547749973867:4630], TxId: 281474976715669, task: 1. Ctx: { CheckpointId : . TraceId : 01kecs9vxyfvpx0jqknetrqws3. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=N2YwYjhiNzUtNjQ4N2U3OTEtN2ZjNDcyNGQtMjVhYjM5ZTA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2026-01-07T17:50:33.429330Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=N2YwYjhiNzUtNjQ4N2U3OTEtN2ZjNDcyNGQtMjVhYjM5ZTA=, ActorId: [4:7592678539160037527:3617], ActorState: ExecuteState, LegacyTraceId: 01kecs9vxyfvpx0jqknetrqws3, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSystemView::PartitionStatsParametricRanges >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets >> KqpSysColV1::SelectRowById >> KqpSystemView::NodesRange2 >> KqpSystemView::Join >> KqpSystemView::QueryStatsScan >> KqpSystemView::QueryStatsSimple >> KqpSysColV1::InnerJoinSelectAsterisk >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView >> KqpSystemView::CompileCacheQueriesOrderByDesc >> KqpSystemView::Sessions-EnableRealSystemViewPaths >> KqpSystemView::PartitionStatsFollower >> KqpSysColV1::SelectRange >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpSysColV1::StreamInnerJoinTables >> KqpSysColV1::StreamSelectRowById >> KqpSysColV0::UpdateAndDelete >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> TScaleRecommenderTest::BasicTest [GOOD] >> TScaleRecommenderTest::RollingRestart >> THiveTest::TestServerlessMigration [GOOD] >> THiveTest::TestUpdateChannelValues >> KqpSysColV0::SelectRowAsterisk >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:50:21.541211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:21.759025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:50:21.795363Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:50:21.796050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:21.796111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:22.298805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:22.298957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:22.391436Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808218607654 != 1767808218607658 2026-01-07T17:50:22.411833Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:22.462940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:22.607514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:22.938815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-07T17:50:22.943539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:22.945225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:22.945972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T17:50:22.947644Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-07T17:50:22.947683Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-07T17:50:22.958979Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:22.985253Z node 1 :HIVE DEBUG: hive_impl.cpp:2519: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2026-01-07T17:50:22.985371Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2026-01-07T17:50:23.057712Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-07T17:50:23.057814Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-07T17:50:23.058045Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-07T17:50:23.058468Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-07T17:50:23.058537Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-07T17:50:23.058978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:23.060103Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-07T17:50:23.060223Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-07T17:50:23.060296Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-07T17:50:23.060345Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-07T17:50:23.060968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-07T17:50:23.061038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-07T17:50:23.061867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2026-01-07T17:50:23.072531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T17:50:23.077032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:23.077117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:23.078044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2026-01-07T17:50:23.083602Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2026-01-07T17:50:23.100633Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-07T17:50:23.100740Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-07T17:50:23.101287Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2026-01-07T17:50:23.101361Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2026-01-07T17:50:23.101438Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-07T17:50:23.101637Z node 1 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2026-01-07T17:50:23.102180Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-01-07T17:50:23.102377Z node 1 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-01-07T17:50:23.103078Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2026-01-07T17:50:23.109146Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2026-01-07T17:50:23.109286Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136553954136864}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2026-01-07T17:50:23.109362Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136553954136864}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2026-01-07T17:50:23.109519Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136553954136864}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2026-01-07T17:50:23.109608Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2026-01-07T17:50:23.109659Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2026-01-07T17:50:23.109697Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2026-01-07T17:50:23.109896Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2026-01-07T17:50:23.109949Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2026-01-07T17:50:23.110029Z node 1 :HIVE DEBUG: hive_impl.cpp:1254: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2026-01-07T17:50:23.110135Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2026-01-07T17:50:23.110244Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2026-01-07T17:50:23.110348Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2026-01-07T17:50:23.110505Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#7205759403796889 ... 94037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2026-01-07T17:50:37.731074Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2026-01-07T17:50:37.731781Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T17:50:37.731862Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2026-01-07T17:50:37.731910Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-07T17:50:37.731994Z node 2 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2026-01-07T17:50:37.732498Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2026-01-07T17:50:37.743469Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-07T17:50:37.754204Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2026-01-07T17:50:37.775671Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=DataShard 72075186224037889 is blocked by a schema operation;tx_id=281474976715662; 2026-01-07T17:50:37.775850Z node 2 :TX_DATASHARD INFO: datashard_pipeline.cpp:1318: Outdated Tx 281474976715662 is cleaned at tablet 72075186224037889 and outdatedStep# 34500 2026-01-07T17:50:37.775964Z node 2 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:38: Cleaned up old txs at 72075186224037889 TxInFly 0 2026-01-07T17:50:37.776176Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:50:37.776417Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715666 ssId 72057594046644480 seqNo 2:4 2026-01-07T17:50:37.776470Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715666 at tablet 72075186224037889 2026-01-07T17:50:37.776679Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:50:37.776886Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:50:37.776975Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:50:37.777010Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:50:37.777054Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2026-01-07T17:50:37.787786Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:50:37.787906Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:50:37.789404Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2026-01-07T17:50:37.789475Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715666 step# 35000 Status# 16 SEND to# [2:400:2399] Proxy marker# C1 2026-01-07T17:50:37.851655Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715666 has been planned 2026-01-07T17:50:37.851756Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2026-01-07T17:50:37.851797Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2026-01-07T17:50:37.852065Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 35500 in 0.500000s at 35.450000s 2026-01-07T17:50:37.852463Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 35000, txid# 281474976715666 marker# C2 2026-01-07T17:50:37.852537Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715666 stepId# 35000 Status# 17 SEND EvProposeTransactionStatus to# [2:400:2399] Proxy 2026-01-07T17:50:37.852977Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 35000, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:37.853591Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715666 at step 35000 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 0 RawX2: 0 } } Step: 35000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-07T17:50:37.853639Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:50:37.853908Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:50:37.853955Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:50:37.853998Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [35000:281474976715666] in PlanQueue unit at 72075186224037889 2026-01-07T17:50:37.854205Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 35000:281474976715666 keys extracted: 0 2026-01-07T17:50:37.854331Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:50:37.854526Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T17:50:37.854600Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2026-01-07T17:50:37.854998Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:50:37.857243Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 35000} 2026-01-07T17:50:37.857314Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:50:37.857604Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-07T17:50:37.857697Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-07T17:50:37.857746Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2026-01-07T17:50:37.857776Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2026-01-07T17:50:37.857816Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 acknowledged 2026-01-07T17:50:37.858167Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T17:50:37.858242Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [35000 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:50:37.858293Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2026-01-07T17:50:37.858367Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T17:50:37.859126Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2026-01-07T17:50:37.861925Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2026-01-07T17:50:37.861996Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-07T17:50:37.862637Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715666:0 2026-01-07T17:50:37.862737Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715666, publications: 2, subscribers: 1 2026-01-07T17:50:37.863746Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2026-01-07T17:50:37.864738Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T17:50:37.879306Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:50:37.879477Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-07T17:50:37.880945Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:50:37.881815Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-07T17:50:37.882261Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2026-01-07T17:50:37.882318Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2026-01-07T17:50:37.882412Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2026-01-07T17:50:37.882533Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2026-01-07T17:50:37.882633Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minstep/unittest >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> KqpSysColV1::SelectRowAsterisk >> KqpSystemView::FailNavigate |94.5%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists [GOOD] >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> TScaleRecommenderTest::RollingRestart [GOOD] >> TScaleRecommenderTest::RollingRestartNoLastRecommendation >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCheckSubHiveForwarding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists [GOOD] Test command err: Trying to start YDB, gRPC: 5929, MsgBus: 14376 2026-01-07T17:49:26.442427Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678258583670847:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.442487Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.515507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:26.889002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.889380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.889487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.937610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.996501Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678258583670745:2081] 1767808166438241 != 1767808166438244 2026-01-07T17:49:27.012495Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:27.161871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.182817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.182834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.182847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.182930Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.452497Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.900570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.911372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.621723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678271468573521:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.621747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678271468573529:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.621858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.622096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678271468573536:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.622164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.624888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.634816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678271468573535:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.759293Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678271468573588:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.120913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:30.320264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678275763541053:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.323914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:30.324165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.324383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.324481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.324572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.324699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.324789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.324878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:30.325305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:30.325409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:30.325510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:30.325596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:30.325717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678275763541059:2343];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:30.326337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678275763541053:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:30.326509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678275763541053:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:30.326606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678275763541053:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:30.326703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678275763541053:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:30.326826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678275763541053:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:30.326931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678275763541053:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:30.327050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:75926782757 ... 1-07T17:50:35.084846Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.084943Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.084966Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.099065Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.099150Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.099177Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.102367Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.102426Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.102449Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.111156Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.111229Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.111254Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.114425Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.114491Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.114513Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.132529Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.132956Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.132985Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.140738Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.140818Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.140842Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.149460Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.149540Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.149561Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.156889Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.160958Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.161001Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.162824Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.162889Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.162908Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.173985Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.174172Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.174200Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:35.223798Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs9dmze9w3rm05sgfvz5xm", SessionId: ydb://session/3?node_id=4&id=MTA5MTViMzktNjBiZDcxMWItNGZkZTUxNWUtODU0NmJkOTY=, Slow query, duration: 12.465895s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:50:36.011839Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=281474976710668;tx_id=281474976710668;commit_tx_id=281474976710668;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; 2026-01-07T17:50:36.228333Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [4:7592678559589409955:3683], TxId: 281474976710669, task: 1. Ctx: { CheckpointId : . TraceId : 01kecs9yfk1k37q7r218rdpcng. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=N2ViMjUyYmMtNjE3Njg1NDgtMzFiNmJiNmEtZWQwY2U3OTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2026-01-07T17:50:36.228445Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [4:7592678559589409955:3683], TxId: 281474976710669, task: 1. Ctx: { CheckpointId : . TraceId : 01kecs9yfk1k37q7r218rdpcng. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=N2ViMjUyYmMtNjE3Njg1NDgtMzFiNmJiNmEtZWQwY2U3OTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2026-01-07T17:50:36.228977Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=N2ViMjUyYmMtNjE3Njg1NDgtMzFiNmJiNmEtZWQwY2U3OTE=, ActorId: [4:7592678555294442558:3652], ActorState: ExecuteState, LegacyTraceId: 01kecs9yfk1k37q7r218rdpcng, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TScaleRecommenderTest::RollingRestartNoLastRecommendation [GOOD] >> TStorageBalanceTest::TestScenario1 >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert [GOOD] >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> YdbTableSplit::SplitByLoadWithReads [GOOD] >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2026-01-07T17:50:13.535710Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678462343929881:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:13.535760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:13.648435Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011127s 2026-01-07T17:50:14.028288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:14.083743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:14.087469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:14.113980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:14.284454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:14.305024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:14.416280Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006991s 2026-01-07T17:50:14.562905Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:14.563435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:14.563442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:14.563456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:14.563525Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:15.098376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:15.117793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:17.307400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800413:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.307513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.307845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800423:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.307881Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.534023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.713842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800602:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.713931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.721112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800605:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.721271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.742621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-07T17:50:17.875203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800703:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.875317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.875331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800708:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.875565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800710:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.875614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.887457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2026-01-07T17:50:17.887737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.887766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715660:1, at schemeshard: 72057594046644480 2026-01-07T17:50:17.887884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.887905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715660:2, at schemeshard: 72057594046644480 2026-01-07T17:50:17.887977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.888019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:3, path# /Root/.metadata/workload_manager/pools/default 2026-01-07T17:50:17.888288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715660:3 1 -> 128 2026-01-07T17:50:17.888607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.888647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:17.891718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800744:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.893039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.893185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800764:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.893251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800765:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.893261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2026-01-07T17:50:17.893287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479523800766:2396], DatabaseId: /Root, PoolId: default ... 24037890, state: WaitScheme 2026-01-07T17:50:37.754919Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-07T17:50:37.759653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710693 TabletId: 72075186224037890 2026-01-07T17:50:37.768055Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:50:37.768161Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:50:37.768319Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:50:37.773488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710693 TabletId: 72075186224037889 2026-01-07T17:50:37.773532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710693:0 3 -> 131 2026-01-07T17:50:37.776122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:50:37.796396Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2026-01-07T17:50:37.796560Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:50:37.796622Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-07T17:50:37.796657Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-07T17:50:37.796878Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-01-07T17:50:37.796947Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-07T17:50:37.797004Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:50:37.797046Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:50:37.797073Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-07T17:50:37.797653Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:50:37.806205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710693 TabletId: 72075186224037888 2026-01-07T17:50:37.806508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710693:0 131 -> 132 2026-01-07T17:50:37.808352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:50:37.808567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:50:37.808629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:50:37.809668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710693 2026-01-07T17:50:37.809717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710693 2026-01-07T17:50:37.809736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-01-07T17:50:37.815088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-07T17:50:37.815097Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-07T17:50:37.815209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-07T17:50:37.815259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-07T17:50:37.815328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-01-07T17:50:37.821341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 2026-01-07T17:50:37.821361Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:50:37.821622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:50:37.827303Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:50:37.827911Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:50:37.828420Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:50:37.828535Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:50:37.829339Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725936, task: 1, CA Id [1:7592678565423250349:2374]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-07T17:50:37.829433Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725935, task: 1, CA Id [1:7592678565423250333:2375]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-07T17:50:37.829458Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725938, task: 1, CA Id [1:7592678565423250372:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-07T17:50:37.875582Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725935, task: 1, CA Id [1:7592678565423250333:2375]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:37.875625Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725938, task: 1, CA Id [1:7592678565423250372:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:37.875649Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725936, task: 1, CA Id [1:7592678565423250349:2374]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:38.081738Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725938, task: 1, CA Id [1:7592678565423250372:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:38.081738Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725935, task: 1, CA Id [1:7592678565423250333:2375]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:38.081794Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725936, task: 1, CA Id [1:7592678565423250349:2374]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:38.429346Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725935, task: 1, CA Id [1:7592678565423250333:2375]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:38.429399Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725938, task: 1, CA Id [1:7592678565423250372:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:38.429426Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725936, task: 1, CA Id [1:7592678565423250349:2374]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:39.006895Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725935, task: 1, CA Id [1:7592678565423250333:2375]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:39.006961Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725938, task: 1, CA Id [1:7592678565423250372:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:39.006990Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725936, task: 1, CA Id [1:7592678565423250349:2374]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:39.768921Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725938, task: 1, CA Id [1:7592678565423250372:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:39.768992Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725936, task: 1, CA Id [1:7592678565423250349:2374]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:40.017027Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725935, task: 1, CA Id [1:7592678565423250333:2375]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:40.669408Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725935, task: 1, CA Id [1:7592678565423250333:2375]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:40.669445Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725938, task: 1, CA Id [1:7592678565423250372:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:40.669465Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725936, task: 1, CA Id [1:7592678565423250349:2374]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:41.327212Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725935, task: 1, CA Id [1:7592678565423250333:2375]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:41.327324Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725938, task: 1, CA Id [1:7592678565423250372:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-07T17:50:41.327378Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976725936, task: 1, CA Id [1:7592678565423250349:2374]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 Table has 2 shards |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 19526, MsgBus: 9037 2026-01-07T17:49:52.779220Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678373639089082:2215];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:52.779321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:53.141007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:53.142500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:53.142592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:53.176624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:53.253048Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678373639088895:2081] 1767808192724711 != 1767808192724714 2026-01-07T17:49:53.276471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:53.394181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:53.413064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:53.413091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:53.413096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:53.413191Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:53.780997Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:54.028059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:54.045059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:56.130630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678390818958971:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.130746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.131138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678390818958983:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.131201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678390818958984:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.131338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:56.135091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:56.148760Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678390818958987:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:56.254701Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678390818959038:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:56.520110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:56.709989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678390818959278:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:56.709989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:56.710198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:56.710219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678390818959278:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:56.710443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:56.710538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:56.710646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:56.710760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678390818959278:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:56.710769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:56.710884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:56.710892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678390818959278:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:56.710996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:56.711548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:56.711656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:56.711743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:56.711822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:56.711915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678390818959213:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:56.712106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678390818959278:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:56.712262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678390818959278:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:56.712356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678390818959278:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:56.712463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592678390818959278:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:56.712588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592678542185847685:2964] from: [3:7592678542185847684:2964] trace_id# 2026-01-07T17:50:32.012585Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592678542185847685:2964] TxId: 281474976715665. Ctx: { TraceId: 01kecs9tf31bdxv438wyd3yeeh, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDNlOWQyYS1kNDVkNWZjMi04NThlNDdhYy1hNTYxNGFhYg==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:50:32.012885Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZDNlOWQyYS1kNDVkNWZjMi04NThlNDdhYy1hNTYxNGFhYg==, ActorId: [3:7592678537890880342:2964], ActorState: ExecuteState, LegacyTraceId: 01kecs9tf31bdxv438wyd3yeeh, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:50:32.014135Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715665; 2026-01-07T17:50:32.014349Z node 3 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1767808232057 : 281474976715665] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 6045, MsgBus: 24410 2026-01-07T17:50:33.642644Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678548690183384:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:33.642896Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:33.729181Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:33.729337Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.009281s 2026-01-07T17:50:33.845026Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678548690183276:2081] 1767808233622722 != 1767808233622725 2026-01-07T17:50:33.887606Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:33.898121Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:33.898212Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:33.929138Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:33.945298Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:34.061616Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:34.061646Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:34.061657Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:34.061755Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:34.647199Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:34.722118Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:38.010822Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678570165020636:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:38.010822Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678570165020649:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:38.010900Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:38.011177Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678570165020654:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:38.011229Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:38.014695Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:38.024187Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678570165020653:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:50:38.103049Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678570165020706:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:38.168126Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:38.264018Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:39.182914Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678548690183384:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:39.258031Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:39.607968Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.053528Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2026-01-07T17:50:42.054962Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7592678587344898002:2964], SessionActorId: [4:7592678583049930649:2964], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[4:7592678587344898002:2964].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-07T17:50:42.055055Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7592678587344898002:2964], SessionActorId: [4:7592678583049930649:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7592678583049930649:2964]. 2026-01-07T17:50:42.055234Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=ZjNiNjhjNTYtNGE2YWRlMGItNWYzY2JjYzItM2QzZTVlNWE=, ActorId: [4:7592678583049930649:2964], ActorState: ExecuteState, LegacyTraceId: 01kecsa49893ge8p6s0x7m1413, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7592678587344898003:2964] from: [4:7592678587344898002:2964] trace_id# 2026-01-07T17:50:42.055347Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7592678587344898003:2964] TxId: 281474976710666. Ctx: { TraceId: 01kecsa49893ge8p6s0x7m1413, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZjNiNjhjNTYtNGE2YWRlMGItNWYzY2JjYzItM2QzZTVlNWE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T17:50:42.055626Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=ZjNiNjhjNTYtNGE2YWRlMGItNWYzY2JjYzItM2QzZTVlNWE=, ActorId: [4:7592678583049930649:2964], ActorState: ExecuteState, LegacyTraceId: 01kecsa49893ge8p6s0x7m1413, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T17:50:42.058616Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; 2026-01-07T17:50:42.058751Z node 4 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1767808242102 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSysColV1::SelectRowById [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> KqpSysColV0::SelectRowById >> KqpSysColV1::StreamSelectRowById [GOOD] >> KqpSysColV1::SelectRange [GOOD] >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> KqpSysColV0::SelectRowAsterisk [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation [GOOD] >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 23670, MsgBus: 15537 2026-01-07T17:50:38.511559Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678570811369576:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.511633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.977143Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.990623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.990743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.066421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.140153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.204588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.322791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.322825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.322859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.322952Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.525241Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.171661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.239477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.472259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.709696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.788416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.509907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678587991240573:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.510040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.521111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678587991240583:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.521224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.058475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.090312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.138575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.177365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.204387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.246190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.322220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.380063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.458470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592286208754:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.458545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.458679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592286208759:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.458711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592286208760:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.458814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.462176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.472245Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678592286208763:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.511722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678570811369576:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.511831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.557071Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678592286208817:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2026-01-07T17:50:13.445593Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678462807936074:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:13.451951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:13.688228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:13.883157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:13.883243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:13.927625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:13.963817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:13.967482Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:14.453087Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:14.545573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:14.545596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:14.545611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:14.545707Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:15.132484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:15.145719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:17.003146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806609:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.003252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.003606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806619:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.003693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.534004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.707807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806800:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.707910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.708134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806802:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.708167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.742626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-07T17:50:17.833062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806906:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806929:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806930:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806933:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806931:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806932:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806934:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806928:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.833997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.835156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806960:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.835162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806956:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.835208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.836128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806972:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.836249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.836950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987806989:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.838340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987807002:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.838400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.839087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479987807017:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool ... : {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-07T17:50:38.110965Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YTNkYWQ0YTUtMzc3MTcxNmEtYmE3ODU0NDYtNjViODZkZA==, ActorId: [1:7592678479987806899:2378], ActorState: ExecuteState, LegacyTraceId: 01kecsa0gd6ryp0aqh6fecn020, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-07T17:50:38.113833Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2026-01-07T17:50:38.113872Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2026-01-07T17:50:38.117044Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:807: SelfId: [1:7592678570182200776:2371], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7592678479987806892:2371]Got WRONG SHARD STATE for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7592678570182200776:2371].{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2026-01-07T17:50:38.117177Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678570182200751:2371], SessionActorId: [1:7592678479987806892:2371], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[1:7592678479987806892:2371]. 2026-01-07T17:50:38.117424Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MTM3ODA2NC04M2MxNjEyLWU2YzRkMmY2LWI1M2VhMjEx, ActorId: [1:7592678479987806892:2371], ActorState: ExecuteState, LegacyTraceId: 01kecsa0gj4dfe9xjp0z4g2nqj, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [1:7592678570182200753:2371] from: [1:7592678570182200751:2371] trace_id# 2026-01-07T17:50:38.117531Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592678570182200753:2371] TxId: 281474976722858. Ctx: { TraceId: 01kecsa0gj4dfe9xjp0z4g2nqj, Database: , SessionId: ydb://session/3?node_id=1&id=MTM3ODA2NC04M2MxNjEyLWU2YzRkMmY2LWI1M2VhMjEx, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-01-07T17:50:38.117796Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTM3ODA2NC04M2MxNjEyLWU2YzRkMmY2LWI1M2VhMjEx, ActorId: [1:7592678479987806892:2371], ActorState: ExecuteState, LegacyTraceId: 01kecsa0gj4dfe9xjp0z4g2nqj, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/Foo`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-01-07T17:50:38.119204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-07T17:50:38.119276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-07T17:50:38.119313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-07T17:50:38.119364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-07T17:50:38.121268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 UNAVAILABLE:
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 2026-01-07T17:50:38.209703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:50:38.209877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 14.999790s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-07T17:50:38.214596Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678462807936426:2204] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-01-07T17:50:38.215960Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678462807936426:2204] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-01-07T17:50:38.262025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 52, next wakeup# 14.947656s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-07T17:50:38.265720Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2026-01-07T17:50:38.275040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 65, next wakeup# 14.934638s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:50:38.279617Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2026-01-07T17:50:38.287178Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-07T17:50:38.296314Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:50:38.301386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:50:38.309569Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:50:38.309988Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:50:38.310647Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:50:38.310759Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:50:38.362719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, compactionInfo# {72057594046644480:2, SH# 3, Rows# 3691, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 583.282410s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:50:38.362885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 14.846783s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:50:38.363410Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186224037889 tableId# 38 localTid# 1001, requested from [1:7592678462807936426:2204], partsCount# 2, memtableSize# 3088, memtableWaste# 1520, memtableRows# 23 2026-01-07T17:50:38.364416Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678462807936426:2204] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-01-07T17:50:38.364498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 1, next wakeup# 14.845183s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:50:38.432641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 69 ms, with status# 0, next wakeup in# 583.212499s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:50:42.364888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, compactionInfo# {72057594046644480:3, SH# 3, Rows# 3979, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 579.280249s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:50:42.365496Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186224037890 tableId# 38 localTid# 1001, requested from [1:7592678462807936426:2204], partsCount# 2, memtableSize# 2752, memtableWaste# 1856, memtableRows# 20 2026-01-07T17:50:42.471238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 106 ms, with status# 0, next wakeup in# 579.173903s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 2 shards, running# 0 shards at schemeshard 72057594046644480 Table has 2 shards |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:50:23.616920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:23.734437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:50:23.754564Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:50:23.755176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:23.755244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:50:24.075381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:24.075544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:24.172683Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808220730241 != 1767808220730245 2026-01-07T17:50:24.185516Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:24.236350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:24.386255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:50:24.790889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:24.805556Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:24.940257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:25.336612Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2026-01-07T17:50:25.338836Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-07T17:50:25.362115Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor finished in 0.022880s, errors=0 2026-01-07T17:50:25.362444Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2026-01-07T17:50:25.362558Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:322: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2026-01-07T17:50:25.363895Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:367: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-07T17:50:25.365370Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:401: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} started fullscan actor# [1:964:2822] 2026-01-07T17:50:25.365595Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Bootstrap called, sample# 100 2026-01-07T17:50:25.365672Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-07T17:50:25.366015Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-07T17:50:25.370716Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} finished in 0.004533s, sampled# 100, iter finished# 1, oks# 100 2026-01-07T17:50:25.370928Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:417: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} received keyCount# 100 2026-01-07T17:50:25.371229Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:446: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} started# 10 actors each with inflight# 1 2026-01-07T17:50:25.371330Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} Bootstrap called 2026-01-07T17:50:25.371396Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.371466Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} Bootstrap called 2026-01-07T17:50:25.371495Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.371527Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} Bootstrap called 2026-01-07T17:50:25.371563Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.371607Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} Bootstrap called 2026-01-07T17:50:25.371635Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.371720Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} Bootstrap called 2026-01-07T17:50:25.371744Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.371774Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} Bootstrap called 2026-01-07T17:50:25.371810Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.371847Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} Bootstrap called 2026-01-07T17:50:25.371881Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.371911Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} Bootstrap called 2026-01-07T17:50:25.371935Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.371975Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} Bootstrap called 2026-01-07T17:50:25.372001Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.372026Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} Bootstrap called 2026-01-07T17:50:25.372048Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-07T17:50:25.374372Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} session: ydb://session/3?node_id=1&id=MTU5YzRjOC1jZjU0ZDJhYS04YjRlYWI5Zi03YjhkY2FiMw== 2026-01-07T17:50:25.376781Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} session: ydb://session/3?node_id=1&id=MzQ2ZTg0NjktMjc1ZGRiYzItYjM2OWE1OTAtZmQwMzJjOGI= 2026-01-07T17:50:25.381150Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} session: ydb://session/3?node_id=1&id=NDlhZTk4ZmYtODg5Yzk2M2ItMmUyMGVlYy0yYjQ5YzBmZQ== 2026-01-07T17:50:25.384141Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} session: ydb://session/3?node_id=1&id=M2M1ODIyZmYtNTU3NDUwNzItZmFhNmQzYjAtZTE3YjZlYTc= 2026-01-07T17:50:25.386310Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} session: ydb://session/3?node_id=1&id=OTI2OTUxMzItNjYzYTdlZDMtN2FlMzI2YTEtODFmMjhhNGI= 2026-01-07T17:50:25.388284Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} session: ydb://session/3?node_id=1&id=YjcwN2EwOWMtOWUzMDE3MzMtN2YxNTAwZGEtNzZiZjc0NjM= 2026-01-07T17:50:25.392443Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} session: ydb://session/3?node_id=1&id=ZjRlMWExN2ItZTA3ZmU1ODAtZmQ4YzRlOGItNTM1ZGEzMWE= 2026-01-07T17:50:25.392605Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} session: ydb://session/3?node_id=1&id=MWQwZGRiNS0yYzE2ODk3MC0yMjc0NjlkMi0xNGQ5ZTMz 2026-01-07T17:50:25.394609Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} session: ydb://session/3?node_id=1&id=MTMwZTQ4NzUtMzNhYjE0YmEtZjIxNWI4YTktY2ZjYzhiOTk= 2026-01-07T17:50:25.396439Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} session: ydb://session/3?node_id=1&id=MWU1NjJlMjEtMjIyZDgxOGItYmU2Zjk4NGQtODg1ODAyZTU= 2026-01-07T17:50:25.402446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:988:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don' ... _manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:38.016679Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1081:2924] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:38.017278Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1082:2925] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:38.017879Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1086:2926] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-07T17:50:38.142316Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1049:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142417Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1050:2902], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142470Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1051:2903], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142521Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1052:2904], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142595Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1053:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142657Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1054:2906], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142712Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1055:2907], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142766Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1057:2909], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142831Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1062:2914], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.142900Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1064:2916], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:50:38.178132Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1188:3012] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:38.601919Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 4} finished in 0.632277s, errors=0 2026-01-07T17:50:38.602163Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 4 { Tag: 4 DurationMs: 632 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:38.616722Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2117:3434] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:39.059814Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 6} finished in 1.086736s, errors=0 2026-01-07T17:50:39.060154Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 6 { Tag: 6 DurationMs: 1086 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:39.079294Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:3024:3840] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:39.512584Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 8} finished in 1.534517s, errors=0 2026-01-07T17:50:39.512883Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 8 { Tag: 8 DurationMs: 1534 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:39.530189Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:3931:4246] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:40.063176Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 11} finished in 2.081640s, errors=0 2026-01-07T17:50:40.063537Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 11 { Tag: 11 DurationMs: 2081 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:40.082821Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4838:4652] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:40.781663Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 7} finished in 2.806978s, errors=0 2026-01-07T17:50:40.781984Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 7 { Tag: 7 DurationMs: 2806 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:40.801569Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:5745:5058] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:41.591306Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 3} finished in 3.623379s, errors=0 2026-01-07T17:50:41.591773Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 3 { Tag: 3 DurationMs: 3623 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:41.612249Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:6652:5464] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:42.494600Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 5} finished in 4.523219s, errors=0 2026-01-07T17:50:42.496220Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 5 { Tag: 5 DurationMs: 4523 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:42.516712Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7559:5870] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:43.545284Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 10} finished in 5.565464s, errors=0 2026-01-07T17:50:43.545773Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 10 { Tag: 10 DurationMs: 5565 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:43.564688Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:8466:6276] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:44.465487Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 2} finished in 6.499505s, errors=0 2026-01-07T17:50:44.466027Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 2 { Tag: 2 DurationMs: 6499 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:44.486661Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:9373:6682] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:45.426954Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 9} finished in 7.448771s, errors=0 2026-01-07T17:50:45.427496Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 9 { Tag: 9 DurationMs: 7448 OperationsOK: 100 OperationsError: 0 } 2026-01-07T17:50:45.427575Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:481: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished in 7.464654s, oks# 1000, errors# 0 2026-01-07T17:50:45.428023Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:961:2819] with tag# 3 >> RetryPolicy::RetryWithBatching [GOOD] >> KqpSysColV1::SelectRowAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 63657, MsgBus: 10206 2026-01-07T17:50:38.496568Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678571653945588:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.496619Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.824515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.861949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.862074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:38.959808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:38.966623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:38.971572Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678571653945544:2081] 1767808238494145 != 1767808238494148 2026-01-07T17:50:39.205006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.283680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.283714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.283720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.283793Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.507976Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.179599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.186754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.242034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.455425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.695378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.778606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.650088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588833816600:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.650232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.651162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588833816610:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.651211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.064118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.111737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.186835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.221343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.253654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.287744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.316265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.370163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.442454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593128784777:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.442536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.442949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593128784782:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.443000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593128784783:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.443035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.446785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.463483Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678593128784786:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.496853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678571653945588:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.496920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.557369Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678593128784838:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2026-01-07T17:50:13.456033Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678462431789823:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:13.456285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:13.508282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:13.817029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:13.914243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:13.914342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:13.933918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:14.036770Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:14.122381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:14.460066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:14.547526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:14.547545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:14.547562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:14.547631Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:15.086812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:17.190113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660353:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.190251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.190811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660363:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.190868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.535990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.746791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660542:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.746890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.747358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660545:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.747403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.768973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-07T17:50:17.882322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660650:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.882473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.882741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660671:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.882785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660672:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.882921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660673:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.883108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660675:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.883159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660674:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.883313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660676:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.883448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660680:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.883515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660687:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.883567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.884499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660705:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.884502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660714:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.884584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.886159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660743:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.886217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660745:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.886221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.886794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479611660751:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.886863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed ... 075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-07T17:50:37.896490Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678565511085761:2367], SessionActorId: [1:7592678479611660630:2367], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7592678479611660630:2367]. 2026-01-07T17:50:37.896613Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MjVlZWU3YS0yNDI1NTUwLWM2ZjI4YzM1LTZiMmVhYzgx, ActorId: [1:7592678479611660630:2367], ActorState: ExecuteState, LegacyTraceId: 01kecsa08sa1pr0gw09hgjehz3, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7592678565511085762:2367] from: [1:7592678565511085761:2367] trace_id# 2026-01-07T17:50:37.896672Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592678565511085762:2367] TxId: 281474976722733. Ctx: { TraceId: 01kecsa08sa1pr0gw09hgjehz3, Database: , SessionId: ydb://session/3?node_id=1&id=MjVlZWU3YS0yNDI1NTUwLWM2ZjI4YzM1LTZiMmVhYzgx, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-07T17:50:37.896842Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MjVlZWU3YS0yNDI1NTUwLWM2ZjI4YzM1LTZiMmVhYzgx, ActorId: [1:7592678479611660630:2367], ActorState: ExecuteState, LegacyTraceId: 01kecsa08sa1pr0gw09hgjehz3, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-07T17:50:37.897430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710693 2026-01-07T17:50:37.897464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710693 2026-01-07T17:50:37.897482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-01-07T17:50:37.898001Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:7592678565511085837:2373], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7592678479611660636:2373]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7592678565511085837:2373]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-07T17:50:37.898083Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678565511085763:2373], SessionActorId: [1:7592678479611660636:2373], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7592678479611660636:2373]. 2026-01-07T17:50:37.898213Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OTBmNmRkYjUtN2U5OGZmNTEtOGNkOWM3NGEtZGU3ZDQ1MDc=, ActorId: [1:7592678479611660636:2373], ActorState: ExecuteState, LegacyTraceId: 01kecsa08xfqx88dh243qqb708, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7592678565511085764:2373] from: [1:7592678565511085763:2373] trace_id# 2026-01-07T17:50:37.898277Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592678565511085764:2373] TxId: 281474976722734. Ctx: { TraceId: 01kecsa08xfqx88dh243qqb708, Database: , SessionId: ydb://session/3?node_id=1&id=OTBmNmRkYjUtN2U5OGZmNTEtOGNkOWM3NGEtZGU3ZDQ1MDc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-07T17:50:37.898461Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTBmNmRkYjUtN2U5OGZmNTEtOGNkOWM3NGEtZGU3ZDQ1MDc=, ActorId: [1:7592678479611660636:2373], ActorState: ExecuteState, LegacyTraceId: 01kecsa08xfqx88dh243qqb708, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-07T17:50:37.903029Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state);tx_id=0; 2026-01-07T17:50:37.903065Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state) 2026-01-07T17:50:37.903237Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:7592678565511085631:2369], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7592678479611660632:2369]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7592678565511085631:2369]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-07T17:50:37.903317Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678565511085609:2369], SessionActorId: [1:7592678479611660632:2369], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7592678479611660632:2369]. 2026-01-07T17:50:37.903485Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MjRjNmMwNjgtZjY4YTk2MzMtMzUxM2Q1YWEtZWIyZmY0MWQ=, ActorId: [1:7592678479611660632:2369], ActorState: ExecuteState, LegacyTraceId: 01kecsa0721b29mvxvxx8c9z3k, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7592678565511085610:2369] from: [1:7592678565511085609:2369] trace_id# 2026-01-07T17:50:37.903584Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592678565511085610:2369] TxId: 281474976722716. Ctx: { TraceId: 01kecsa0721b29mvxvxx8c9z3k, Database: , SessionId: ydb://session/3?node_id=1&id=MjRjNmMwNjgtZjY4YTk2MzMtMzUxM2Q1YWEtZWIyZmY0MWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-07T17:50:37.903817Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MjRjNmMwNjgtZjY4YTk2MzMtMzUxM2Q1YWEtZWIyZmY0MWQ=, ActorId: [1:7592678479611660632:2369], ActorState: ExecuteState, LegacyTraceId: 01kecsa0721b29mvxvxx8c9z3k, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-07T17:50:37.910268Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-07T17:50:37.910641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-07T17:50:37.910729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-07T17:50:37.910786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-07T17:50:37.910837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-01-07T17:50:37.913236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 2026-01-07T17:50:37.915198Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:50:37.918624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:50:37.924149Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:50:37.925168Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:50:37.925293Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T17:50:37.925918Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Table has 2 shards |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 29437, MsgBus: 8920 2026-01-07T17:50:38.490895Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678570770510142:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.490973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.843938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.891823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.891938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:38.982736Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:38.985382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.061080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.282229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.282260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.282266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.282336Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.499097Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.097991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.109497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.173589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.427255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.634468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.730217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.275743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678587950381163:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.275872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.280757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678587950381172:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.280827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.058698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.133466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.170425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.212071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.251491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.293092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.335455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.388150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.475062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592245349343:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.475127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.475331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592245349348:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.475367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592245349349:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.475577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.478843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.488125Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678592245349352:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.491105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678570770510142:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.491166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.570798Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678592245349404:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:45.280285Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808245308, txId: 281474976710673] shutting down |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 26846, MsgBus: 5033 2026-01-07T17:50:38.490794Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678571835836578:2136];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.494626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.913070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.915407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.915490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:38.943366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.055225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.084996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.285487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.285515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.285528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.285599Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.497268Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.127821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.175061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.383685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.546121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.628249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.273511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589015707534:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.273637Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.281101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589015707543:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.281191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.058495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.123558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.167156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.201745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.236180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.281569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.314407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.357855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.441050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593310675717:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.441205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.441649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593310675722:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.441659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593310675723:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.441684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.445731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.455116Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678593310675726:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.491184Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678571835836578:2136];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.491278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.538531Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678593310675780:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:45.740254Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808245742, txId: 281474976710673] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 1046, MsgBus: 22324 2026-01-07T17:50:38.512635Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678571143815773:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.512734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.850147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.917562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.917668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:38.992119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.063918Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678571143815547:2081] 1767808238485943 != 1767808238485946 2026-01-07T17:50:39.069980Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.081109Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.282343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.282367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.282373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.282429Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.501249Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.187118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.233799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.322811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.508341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.707317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.793757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.440185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588323686615:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.440301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.440570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588323686625:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.440621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.058580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.089530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.119770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.173638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.216461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.255727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.329069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.419611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.505072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592618654811:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.505141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.505181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592618654816:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.505466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592618654818:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.505554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.508577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.509371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678571143815773:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.509432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.516917Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678592618654819:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:43.574311Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678592618654874:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] >> KqpSysColV0::UpdateAndDelete [GOOD] >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 20167, MsgBus: 2141 2026-01-07T17:50:38.499297Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678568590443547:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.501303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.873867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.873985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:38.935043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.961498Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678568590443316:2081] 1767808238483418 != 1767808238483421 2026-01-07T17:50:38.967832Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:38.975094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.229368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.281302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.281399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.281406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.281498Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.496225Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.198617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.217071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.288115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.471300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.656322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.744859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.345068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678585770314371:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.345193Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.349143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678585770314381:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.349208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.058528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.097192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.127493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.157406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.197752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.270373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.315377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.367348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.459834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678590065282557:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.459919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.460098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678590065282562:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.460151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678590065282563:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.460195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.463724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.477951Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678590065282566:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.495732Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678568590443547:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.495894Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.583155Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678590065282618:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 63466, MsgBus: 15617 2026-01-07T17:50:39.414946Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678573477322464:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:39.423938Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:39.857101Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.864809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:39.865075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.867431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:40.064605Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:40.187258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:40.202920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:40.202949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:40.202956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:40.203032Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:40.437038Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:41.008824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:41.017575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:41.071274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:41.223421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:41.411619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:41.535606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.307230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678590657193421:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.307409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.308083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678590657193431:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.308178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.588898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.630342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.672489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.705868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.743958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.805247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.849375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.901820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.995412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678590657194301:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.995511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.995795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678590657194307:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.995834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678590657194306:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.995868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.999576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:44.010770Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678590657194310:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:44.104817Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678594952161657:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:44.409936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678573477322464:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:44.410007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveImplTest::BootQueueConfigurePriorities [GOOD] >> THiveTest::TestBlockCreateTablet |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 32201, MsgBus: 10902 2026-01-07T17:50:38.506363Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678570640528363:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.506777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.555331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:39.021104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.065316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:39.065410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.113394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.127759Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.133057Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678570640528213:2081] 1767808238483453 != 1767808238483456 2026-01-07T17:50:39.207470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.282891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.287405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.287417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.287529Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.512553Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.120434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.208339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.475924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.702576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.788646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.438998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678587820399280:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.439116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.439387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678587820399290:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.439425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.060048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.121698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.155694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.195266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.227412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.259406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.294773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.340388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.426285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592115367461:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.426379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.426934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592115367466:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.426980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592115367467:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.427096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.431321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.446485Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678592115367470:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.498817Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678570640528363:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.498879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.511540Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678592115367521:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:45.541200Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808245518, txId: 281474976710673] shutting down |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.5%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 2892, MsgBus: 6064 2026-01-07T17:50:38.509707Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678569941080859:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.509799Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.929118Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.982731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.982884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.005080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.095192Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.097166Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678569941080834:2081] 1767808238508283 != 1767808238508286 2026-01-07T17:50:39.263586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.286743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.286763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.286767Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.286852Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.529058Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.166423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.180509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.255517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.454867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.628520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.714767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.577216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678587120951894:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.577372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.578103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678587120951904:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.578193Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.059344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.097511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.170792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.205938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.243845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.286253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.321478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.380309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.468572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678591415920073:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.468649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.469323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678591415920078:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.469328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678591415920079:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.469380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.473003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.483043Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678591415920082:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.510907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678569941080859:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.510978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.582709Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678591415920138:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:45.162516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSysColV0::InnerJoinSelect |94.6%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 5128, MsgBus: 4698 2026-01-07T17:50:38.505182Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678567819854960:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.505310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.929709Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.950185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.950292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.102088Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678567819854744:2081] 1767808238483607 != 1767808238483610 2026-01-07T17:50:39.120334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.122732Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.171162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.283663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.283686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.283693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.283740Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.513049Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.214072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.285926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.478267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.688797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.763945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.298889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678584999725801:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.298985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.299256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678584999725811:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.299284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.058713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.131606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.163609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.203578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.240254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.286593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.322254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.372636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.459555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589294693982:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.459620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.459698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589294693987:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.459950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589294693989:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.459993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.462524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.472011Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678589294693990:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.501341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678567819854960:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.501413Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.535556Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678589294694043:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2026-01-07T17:50:06.505862Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678430645952245:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.508398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.578929Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.582368Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678433151385630:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:06.582406Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:06.676737Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:50:06.889021Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:06.915680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:07.081707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.081834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.089433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:07.089495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:07.116290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.117995Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:07.120429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:07.223745Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:07.233042Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.247653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:07.519030Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.586158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0015f1/r3tmp/yandex61IUal.tmp 2026-01-07T17:50:07.586183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0015f1/r3tmp/yandex61IUal.tmp 2026-01-07T17:50:07.599168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0015f1/r3tmp/yandex61IUal.tmp 2026-01-07T17:50:07.599275Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:07.673166Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:07.889696Z INFO: TTestServer started on Port 64159 GrpcPort 5968 PQClient connected to localhost:5968 2026-01-07T17:50:08.280563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:08.474932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:10.868566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678447825822842:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.868642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678447825822832:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.868876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.869288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678447825822848:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.869344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:10.872342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:10.908741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678447825822847:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2026-01-07T17:50:11.006992Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678447825822936:3083] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:11.579160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678430645952245:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:11.581469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:11.589062Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678433151385630:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:11.589126Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:11.602013Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592678452120790242:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:50:11.604658Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Nzc0YjA1Y2QtZDNjMWIyYjQtOGQwNDQ5NTItZjYzODM5YTE=, ActorId: [1:7592678447825822830:2331], ActorState: ExecuteState, LegacyTraceId: 01kecs95xjcjfkzv3jnrft7jq6, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:50:11.606900Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:50:11.676291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:11.760656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:11.908737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex ... : partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.153193Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.153201Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-07T17:50:46.153228Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:46.153238Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.153246Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.153270Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.153283Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-07T17:50:46.153312Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:46.153327Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.153335Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.153347Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.153362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-07T17:50:46.153395Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:46.153405Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.153412Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.153423Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.153430Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2026-01-07T17:50:46.157973Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:46.158012Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.158025Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.158044Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.158055Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-07T17:50:46.158120Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:46.158131Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.158141Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.158153Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.158163Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-07T17:50:46.158186Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:46.158196Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.158205Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.158216Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.158226Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-07T17:50:46.253033Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:46.253077Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253092Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.253111Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253124Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T17:50:46.253181Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:46.253194Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253203Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.253216Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253226Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-07T17:50:46.253255Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:46.253268Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253284Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.253298Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253308Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-07T17:50:46.253340Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:46.253352Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.253411Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253425Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-07T17:50:46.253457Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:46.253470Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253482Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.253509Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253520Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-07T17:50:46.253550Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:46.253562Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253571Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.253582Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.253592Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2026-01-07T17:50:46.258160Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:46.258189Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.258202Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.258222Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.258233Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-07T17:50:46.258288Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-07T17:50:46.258301Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.258311Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.258325Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.258333Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-07T17:50:46.258357Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-07T17:50:46.258369Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.258376Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.258388Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.258396Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist >> KqpSystemView::Sessions-EnableRealSystemViewPaths [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 14664, MsgBus: 23306 2026-01-07T17:50:40.659979Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678579454354965:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:40.660041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:41.053932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:41.095322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:41.095448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:41.138098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:41.212222Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:41.331134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:41.352867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:41.352884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:41.352890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:41.352990Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:41.663418Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:41.824891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:41.834368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:41.881726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.039834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.220785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.291552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.841292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592339258603:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.841417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.841921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592339258613:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.841987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.213553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.263344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.305495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.345081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.388049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.445058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.491155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.576313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.657911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678596634226780:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.657976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.658365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678596634226786:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.658404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678596634226785:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.658516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.662380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:44.676332Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678596634226789:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:44.767523Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678596634226842:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:45.652426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678579454354965:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:45.652506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDownAfterDrain >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 24578, MsgBus: 28711 2026-01-07T17:50:41.096443Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678584387988026:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:41.096497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:41.464442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:41.509653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:41.509777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:41.612306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:41.633811Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678584387987796:2081] 1767808241066777 != 1767808241066780 2026-01-07T17:50:41.664045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:41.672696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:41.875107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:41.875145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:41.875153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:41.875244Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:42.096465Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:42.370983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:42.491134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.662726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.828135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.900293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.713967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678597272891563:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.714105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.714636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678597272891573:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.714703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.165627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.196252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.228083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.260121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.297560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.337893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.411162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.459585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.535160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678601567859748:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.535260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.535375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678601567859753:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.535552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678601567859755:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.535627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.539685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:45.549789Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678601567859757:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:45.609359Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678601567859808:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:46.097192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678584387988026:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:46.097268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:47.111076Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7592678610157794719:3984], for# user0@builtin, access# DescribeSchema 2026-01-07T17:50:47.111101Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7592678610157794719:3984], for# user0@builtin, access# DescribeSchema 2026-01-07T17:50:47.120036Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592678610157794707:2538], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:50:47.120509Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTMxODUwMWMtYjI2YWM3OWMtNGRjNGU4NWMtMzA0YjQ3MzU=, ActorId: [1:7592678610157794700:2534], ActorState: ExecuteState, LegacyTraceId: 01kecsa98e24k3xkecxc2hhgfx, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/.sys/partition_stats]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 8389, MsgBus: 12166 2026-01-07T17:50:38.501570Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678572015714209:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.505045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.823202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.852482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.852618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:38.855879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:38.973735Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:38.981127Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678572015714178:2081] 1767808238485765 != 1767808238485768 2026-01-07T17:50:38.988490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.289224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.289253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.289259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.289342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.517042Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.164307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.180044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.239718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.446427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.752054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.831625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.273120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589195585251:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.273222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.280753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589195585260:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.280831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.058520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.094018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.149895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.186835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.218893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.256076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.286997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.326909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.443988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593490553426:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.444100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.444303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593490553431:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.444353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678593490553432:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.444391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.447710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.458352Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678593490553435:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:43.494794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678572015714209:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.494862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.521455Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678593490553486:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSystemView::FailResolve |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2026-01-07T17:44:44.556367Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:44.556403Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:44.556428Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:44:44.556768Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:44:44.556808Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:44.556829Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:44.558051Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007549s 2026-01-07T17:44:44.558598Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:44:44.558647Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:44.558674Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:44.558790Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006490s 2026-01-07T17:44:44.559327Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:44:44.559363Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:44.559401Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:44:44.559464Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007254s 2026-01-07T17:44:44.580226Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1767807884580198 2026-01-07T17:44:44.815819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592677048795495395:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:44.815877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:44.853037Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:44:44.854132Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592677051065053469:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:44:44.854281Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:44:44.867098Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:44:45.036728Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:45.065068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:44:45.094922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:45.095055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:45.096877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:44:45.097059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:44:45.102821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:45.131968Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:44:45.134105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:44:45.180719Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:44:45.227234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:45.234183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:44:45.247492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003839/r3tmp/yandex91zagG.tmp 2026-01-07T17:44:45.247527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003839/r3tmp/yandex91zagG.tmp 2026-01-07T17:44:45.247737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003839/r3tmp/yandex91zagG.tmp 2026-01-07T17:44:45.247967Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:44:45.288545Z INFO: TTestServer started on Port 7865 GrpcPort 17305 PQClient connected to localhost:17305 2026-01-07T17:44:45.453864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:44:45.823944Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:45.862385Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:44:47.778564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677061680398583:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:47.778655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677061680398591:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:47.778714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:47.779220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592677061680398598:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:47.779316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:44:47.781737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:44:47.797879Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592677061680398597:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-07T17:44:48.084122Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592677061680398691:2960] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:44:48.113932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:44:48.115017Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592677063949955595:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:48.115546Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YTMxMWVkNzItMWVhN2FhNmEtNjI5NmRhMGYtNDYzYmIxMDM=, ActorId: [2:7592677063949955570:2300], ActorState: ExecuteState, LegacyTraceId: 01kecrzadv8x0z9vy0zj0e8ysz, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:44:48.117222Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592677065975366004:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:44:48.117691Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDU0OTA4MjctNGY0MmRlYWMtNzM2MzA1MC1mZTY5MjkzOA==, ActorId: [1:7592677061680398581:2328], ActorState: ExecuteState, LegacyTraceId: 01kecrzad1779pfqyr539cxt39, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { p ... ] Write session: gracefully shut down, all writes complete 2026-01-07T17:50:44.880817Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|da6b3aa9-6fa191d5-76f4eae7-ccb5a127_0] Write session: destroy 2026-01-07T17:50:44.880099Z node 17 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 suffix '63' size 1208 2026-01-07T17:50:44.880184Z node 17 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 suffix '63' 2026-01-07T17:50:44.889302Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message-group-id|da6b3aa9-6fa191d5-76f4eae7-ccb5a127_0 grpc read done: success: 0 data: 2026-01-07T17:50:44.889337Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message-group-id|da6b3aa9-6fa191d5-76f4eae7-ccb5a127_0 grpc read failed 2026-01-07T17:50:44.889380Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message-group-id|da6b3aa9-6fa191d5-76f4eae7-ccb5a127_0 grpc closed 2026-01-07T17:50:44.889416Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message-group-id|da6b3aa9-6fa191d5-76f4eae7-ccb5a127_0 is DEAD 2026-01-07T17:50:44.891099Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:50:44.891332Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [17:7592678594514903340:2617] destroyed 2026-01-07T17:50:44.891378Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:50:44.891416Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:44.891441Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:44.891461Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:44.891489Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:44.891514Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:44.921832Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:44.921868Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:44.921895Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:44.921922Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:44.921937Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.022219Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.022268Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.022296Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.022331Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.022357Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.122565Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.122625Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.122651Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.122687Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.122710Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.225129Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.225199Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.225231Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.225278Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.225305Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.325197Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.325258Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.325280Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.325311Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.325339Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.425627Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.425685Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.425710Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.425742Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.425761Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.526627Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.526680Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.526706Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.526739Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.526761Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.626841Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.626907Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.626934Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.626968Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.626990Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.727170Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.727228Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.727246Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.727273Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.727293Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.827518Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.827561Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.827586Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.827616Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.827635Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:45.927947Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:45.927997Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.928025Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:45.928071Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:45.928092Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:50:46.028204Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:46.028244Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.028278Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:46.028310Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:50:46.028329Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |94.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> KqpSysColV1::StreamInnerJoinSelect >> KqpSystemView::QuerySessionsOrderByDesc |94.6%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::TopQueriesOrderByDesc >> KqpSystemView::PartitionStatsRange3 >> KqpSysColV1::UpdateAndDelete >> KqpSystemView::ReadSuccess >> KqpSinkMvcc::OlapNamedStatement [GOOD] |94.6%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::PartitionStatsOrderByDesc >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions-EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 21025, MsgBus: 20149 2026-01-07T17:50:38.488524Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678571501263058:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.488577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.833098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.878762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.878824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:38.890646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:38.981045Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678571501263026:2081] 1767808238484391 != 1767808238484394 2026-01-07T17:50:38.985227Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.098756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.282426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.282450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.282456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.282523Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.509314Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.112499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.141081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 1 2026-01-07T17:50:42.973186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588681133172:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.973272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588681133184:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.973295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.973556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588681133187:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.973589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.986579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:42.999725Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678588681133186:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:50:43.143945Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678592976100535:2352] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:43.488567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678571501263058:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.488638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ydb-cpp-sdk/dev 2026-01-07T17:50:48.077178Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808248070, txId: 281474976715673] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.6%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSysColV1::StreamSelectRange >> KqpSysColV1::StreamSelectRowAsterisk >> KqpSystemView::PartitionStatsRange2 |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::Sessions+EnableRealSystemViewPaths |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange >> KqpSystemView::NodesOrderByDesc |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 26187, MsgBus: 27868 2026-01-07T17:50:42.922393Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678586441775721:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:42.922499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:43.139698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:43.139779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:43.187920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:43.202471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:43.205719Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:43.221077Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678586441775616:2081] 1767808242918448 != 1767808242918451 2026-01-07T17:50:43.374782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:43.374811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:43.374816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:43.374890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:43.443245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:43.814788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:43.867151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.932329Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:44.029328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.237874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.308528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.185657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678603621646680:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.185763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.186118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678603621646690:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.186200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.573919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.606600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.633791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.666575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.695707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.733531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.766877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.809728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.891936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678603621647558:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.892076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.892267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678603621647564:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.892315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.892331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678603621647563:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.896041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:46.906901Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678603621647567:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:47.011781Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678607916614916:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:47.921605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678586441775721:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:47.921673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:48.600740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple >> KqpSystemView::NodesRange2 [GOOD] >> KqpSystemView::NodesRange1 >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestDrainAndReconnect >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 7882, MsgBus: 1186 2026-01-07T17:49:27.900822Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678263508273553:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:27.900976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:27.945573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:28.169989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:28.198186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:28.198341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:28.200270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:28.289811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:28.293190Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678263508273506:2081] 1767808167898820 != 1767808167898823 2026-01-07T17:49:28.421631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:28.421654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:28.421669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:28.421791Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:28.438779Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:28.829132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:28.835136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:28.907259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:30.804456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678276393176272:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.804542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678276393176256:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.804684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.807983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678276393176293:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.808096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:30.810604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:30.830728Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678276393176292:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:30.951794Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678276393176346:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:31.267499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:31.427721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:31.427960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:31.428179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:31.428279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:31.428281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:31.428302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:31.428396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:31.428419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:31.428518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:31.428532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:31.428614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:31.428632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:31.428712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:31.428732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:31.428806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:31.428824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:31.428884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:31.428904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:31.428988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:31.429029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678280688143809:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:31.429055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7592678280688143898 ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.519513Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.522687Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.522763Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.522785Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.533129Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.533219Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.533246Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.534029Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.534122Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.534149Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.546155Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.546241Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.546267Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.552671Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.552764Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.552789Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.558376Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.558458Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.558481Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.564640Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.564739Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.564763Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.570132Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.570211Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.570235Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.578913Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.579014Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.579038Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.581983Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.582055Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.582092Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.592256Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.592364Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.592391Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.595894Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.596214Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.596247Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.608441Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.608548Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.608572Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:50:42.657845Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecs9nfh7v09615j5y7chgse", SessionId: ydb://session/3?node_id=4&id=ZWNhZWU4M2MtM2MxYjY5YmMtODU1MzM3MS0zMWUzZjMwMA==, Slow query, duration: 12.377847s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSysColV0::SelectRowById [GOOD] >> KqpSysColV1::InnerJoinTables >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> KqpSysColV0::InnerJoinTables >> KqpSystemView::PartitionStatsRange1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 30844, MsgBus: 17815 2026-01-07T17:50:38.731580Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.007855s 2026-01-07T17:50:38.995543Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678569074245136:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.995606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:39.158715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:39.222876Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678576226273808:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:39.222928Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:39.460364Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678575927445472:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:39.536545Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:40.079539Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.171431Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.176616Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.181034Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.192080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:40.233383Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.228173Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.241519Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:40.261824Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:40.328541Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.388564Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.421443Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.421552Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:40.453101Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.457837Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.476798Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.539568Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:40.540658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.542519Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.580129Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.817013Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:40.877500Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:41.019506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:41.019625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:41.019805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:41.019836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:41.019932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:41.019978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:41.038058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:41.038134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:41.038876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:41.038946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:41.068051Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-07T17:50:41.068657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:41.069474Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:50:41.069590Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:41.069613Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:50:41.069709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:41.109895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:41.110140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:41.142966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:41.300771Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:41.302263Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:41.308585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:41.351512Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:41.746375Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:41.999321Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:42.055318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:42.055350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:42.055358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:42.061059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:43.419770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:43.577204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.831539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.995754Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678569074245136:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.995818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:44.145509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.223080Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678576226273808:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:44.223178Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:44.278669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:44.375505Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592678575927445472:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:44.375649Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:46.221117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678603433985692:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.221249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.225046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678603433985702:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.225156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:46.606832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.661635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.720059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.769633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.815986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.862660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.922579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:46.997633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:47.109871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678607728953981:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:47.109952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:47.110278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678607728953987:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:47.110285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678607728953986:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:47.110342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:47.114036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:47.144210Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678607728953990:2403], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:47.206241Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678607728954066:4592] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:49.190190Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808249183, txId: 281474976710673] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 27111, MsgBus: 29724 2026-01-07T17:50:46.026877Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678605157243636:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:46.027461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:46.300149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:46.300217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:46.335986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:46.353087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:46.357542Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678605157243608:2081] 1767808246025105 != 1767808246025108 2026-01-07T17:50:46.359804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:46.469596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:46.469636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:46.469647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:46.469742Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:46.574418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:46.870488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:46.880477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:46.923745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:47.034203Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:47.060319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:47.219173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:47.284472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.051929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678618042147369:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.052062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.052412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678618042147379:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.052454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.339192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.367530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.400177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.431179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.468481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.500652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.561771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.605211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.685893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678618042148252:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.685963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.685966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678618042148257:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.686122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678618042148259:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.686160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:49.689108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:49.704126Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678618042148260:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:49.767636Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678618042148312:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:51.027211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678605157243636:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:51.027289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> THiveTest::TestDrainAndReconnect [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> THiveTest::TestAsyncReassign >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> KqpSysColV0::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 15552, MsgBus: 30224 2026-01-07T17:50:49.718506Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678618169237116:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:49.718867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:49.933087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.034870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.034935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.036979Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678618169237006:2081] 1767808249709669 != 1767808249709672 2026-01-07T17:50:50.038571Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.044670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.146625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.146662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.146671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.146766Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:50.161003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:50.639467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:50.654632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:50:50.721040Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:52.961365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678631054139860:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.961365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678631054139868:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.961476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.961736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678631054139875:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.961827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.964960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:52.976009Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678631054139874:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:50:53.088833Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678635349107223:2580] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> KqpSystemView::FailResolve [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 9066, MsgBus: 4563 2026-01-07T17:50:38.489049Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678571493857050:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.489129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.926488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.931947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.932044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.085064Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678571493857018:2081] 1767808238484440 != 1767808238484443 2026-01-07T17:50:39.095018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.098423Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.155904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.281761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.281783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.281787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.281841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.505691Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.090435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.106273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.200285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.373494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.573985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.661185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.345920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588673728069:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.346025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.347750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678588673728079:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.347833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.058549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.094037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.123485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.155357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.228237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.266666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.338479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.380718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.469854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592968696252:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.469919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.470161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592968696257:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.470170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678592968696258:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.470206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.473496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.485582Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678592968696261:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:43.488863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678571493857050:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.488984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.576766Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678592968696315:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:45.411401Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808245403, txId: 281474976715673] shutting down waiting... 2026-01-07T17:50:46.581341Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808246576, txId: 281474976715675] shutting down waiting... 2026-01-07T17:50:47.750132Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808247745, txId: 281474976715677] shutting down waiting... 2026-01-07T17:50:48.911535Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808248907, txId: 281474976715679] shutting down waiting... 2026-01-07T17:50:50.090055Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808250084, txId: 281474976715681] shutting down waiting... 2026-01-07T17:50:51.272641Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808251267, txId: 281474976715683] shutting down waiting... 2026-01-07T17:50:52.450730Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808252440, txId: 281474976715685] shutting down waiting... 2026-01-07T17:50:53.641509Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808253636, txId: 281474976715687] shutting down 2026-01-07T17:50:53.861363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:53.861397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:54.064835Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808254049, txId: 281474976715689] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 21627, MsgBus: 19585 2026-01-07T17:50:48.185295Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678614269310011:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:48.186225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:48.403277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:48.421953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:48.422052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:48.488013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:48.500157Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:48.586086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:48.586146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:48.586163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:48.586286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:48.667586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:49.039341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:49.094670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.192848Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:49.227502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.391225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.457344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.386078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678627154213730:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.386219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.386535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678627154213740:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.386613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.781827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.815671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.844136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.880135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.920736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.969453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.026055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.068743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.132533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678631449181911:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.132612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.132810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678631449181917:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.132821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678631449181916:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.132862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.135979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:52.146702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678631449181920:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:52.246942Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678631449181973:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:53.184980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678614269310011:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:53.185055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] >> THiveTest::TestAsyncReassign [GOOD] >> THiveTest::TestAlterFollower >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> KqpSysColV1::StreamSelectRange [GOOD] >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSysColV0::SelectRange [GOOD] >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 6263, MsgBus: 1629 2026-01-07T17:50:48.053296Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678614499938513:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:48.053365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:48.254914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:48.264740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:48.264866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:48.315335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:48.361076Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:48.424091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:48.424121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:48.424156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:48.424268Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:48.502911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:48.865622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:48.913662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.061165Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:49.063738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.224979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:49.289571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.221104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678627384842228:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.221231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.221591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678627384842237:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.221636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.558006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.601582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.633563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.662241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.692071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.722744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.756869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.803577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.874947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678627384843104:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.875046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.876263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678627384843109:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.876293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678627384843110:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.876371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:51.880076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:51.890413Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678627384843113:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:51.971673Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678627384843164:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:53.053637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678614499938513:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:53.053706Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 1797, MsgBus: 26322 2026-01-07T17:50:49.683858Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678616778251644:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:49.684092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:49.893116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:49.927241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:49.927542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:49.967487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:49.971733Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.085525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.085549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.085555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.085625Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:50.186960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:50.476167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:50.492877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:50.565473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:50.696426Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:50.767916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:50.930465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:50.994178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.647377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678629663155230:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.647560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.648002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678629663155240:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.648092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.892085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.920053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.948638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.983074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.016402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.055812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.096750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.173081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.266017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678633958123417:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.266090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.266513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678633958123422:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.266560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678633958123423:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.266789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.269901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:53.283491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678633958123426:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:53.375457Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678633958123477:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:54.682961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678616778251644:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:54.683037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:55.311437Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808255346, txId: 281474976710673] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 7133, MsgBus: 5563 2026-01-07T17:50:49.570367Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678618163896964:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:49.571128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:49.829049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:49.838587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:49.838704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:49.935146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:49.954459Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678618163896936:2081] 1767808249566901 != 1767808249566904 2026-01-07T17:50:49.974356Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.007735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:50.041963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.041990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.042037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.042145Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:50.595913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:50.597603Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:50.605158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:50.674981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:50.834309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.001514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.082369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.801520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678631048800702:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.801636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.802133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678631048800712:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:52.802208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.112106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.145643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.183963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.221010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.258261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.300268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.335760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.404759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.509700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678635343768885:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.509787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.510172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678635343768890:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.510218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678635343768891:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.510256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.514558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:53.531113Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678635343768894:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:53.588689Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678635343768945:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:54.569040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678618163896964:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:54.569115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:55.175990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:50:55.359900Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7592678643933703926:4046], for# user0@builtin, access# SelectRow 2026-01-07T17:50:55.360027Z node 1 :KQP_EXECUTER ERROR: kqp_table_resolver.cpp:274: TxId: 281474976710675. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 30] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-07T17:50:55.371089Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MThiNzM4YTMtNjAyZmE1MGMtZjJmOGJjYS1kOTcwMTNmOQ==, ActorId: [1:7592678643933703899:2535], ActorState: ExecuteState, LegacyTraceId: 01kecsah7nasnd7e52ddvqyq6q, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Failed to resolve table `/Root/.sys/partition_stats` status: AccessDenied." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-07T17:50:55.371445Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808255358, txId: 281474976710674] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 17648, MsgBus: 12494 2026-01-07T17:50:50.051742Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678619419186450:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:50.051936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.285464Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.338405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.338516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.383445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.393520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.499720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.499738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.499744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.499820Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:50.582907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:50.943746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:50.950328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:50.993470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.053407Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.155132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.318945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.393714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.240883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632304090152:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.241028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.241707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632304090162:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.241799Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.573687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.616786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.647494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.674643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.711124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.751339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.794595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.838187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.964903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632304091034:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.965005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.965434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632304091039:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.965475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632304091040:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.965839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.969965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:53.984873Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678632304091043:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:54.062065Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678636599058390:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:55.051047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678619419186450:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:55.051135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:55.613593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:50:55.784111Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808255776, txId: 281474976710674] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 10412, MsgBus: 3034 2026-01-07T17:50:49.880350Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678615273642925:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:49.881190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.114849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.125060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.125149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.222177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.246353Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678615273642803:2081] 1767808249873019 != 1767808249873022 2026-01-07T17:50:50.249975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.315110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:50.370029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.370057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.370068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.370165Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:50.805141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:50.811135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:50.858253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:50.886984Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:50.999854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.161306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.229033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.041969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632453513873:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.042086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.042457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632453513883:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.042588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.373442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.405052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.443326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.476546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.519858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.553822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.583587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.628445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.716173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632453514751:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.716248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632453514756:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.716254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.716520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678632453514758:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.716576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.719866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:53.738042Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678632453514759:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:53.849586Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678632453514811:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:54.878459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678615273642925:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:54.878518Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:55.735953Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808255728, txId: 281474976715673] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 8758, MsgBus: 8988 2026-01-07T17:50:50.137287Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678621864651934:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:50.137403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.411227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.470367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.470483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.487818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.549680Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.640280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.640306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.640312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.640382Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:50.669002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:51.113762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:51.129713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:51.152256Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.191828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.344247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.508298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.573982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.399099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678634749555657:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.399206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.409103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678634749555667:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.409220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.735934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.771360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.801676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.847689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.882936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.918759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.959314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.030263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.111169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639044523836:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.111260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.111436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639044523841:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.111473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639044523842:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.111585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.116223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:54.130616Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678639044523845:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:54.211227Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678639044523898:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:55.133169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678621864651934:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:55.133261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 13585, MsgBus: 9232 2026-01-07T17:50:50.415958Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678620486635853:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:50.418458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.631628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.633133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.633243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.659816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.697297Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.707685Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678620486635826:2081] 1767808250412520 != 1767808250412523 2026-01-07T17:50:50.817875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.817899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.817906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.818022Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:50.881017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:51.324807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:51.419915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.424103Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.580479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.741006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.810531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.611782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678633371539587:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.611892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.612216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678633371539597:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.612265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.903976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.937606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.971910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.008356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.039085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.082439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.120989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.199635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.272838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678637666507760:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.272915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.273270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678637666507765:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.273299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678637666507766:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.273409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.279188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:54.287331Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678637666507769:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:54.352012Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678637666507820:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:55.414725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678620486635853:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:55.414790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:56.211753Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808256249, txId: 281474976710673] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 31744, MsgBus: 15212 2026-01-07T17:50:51.116330Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678624869876218:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:51.116382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:51.366857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:51.382203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:51.382359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:51.384559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:51.481977Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:51.573468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:51.573490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:51.573496Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:51.573610Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:51.667317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:51.996345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:52.056885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.138203Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:52.207789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.356164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.457325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.098168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678637754779940:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.098282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.099822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678637754779950:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.099885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.421769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.459981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.500700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.538048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.590069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.628160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.683506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.733205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.808140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678637754780817:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.808243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.808474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678637754780822:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.808524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678637754780823:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.808636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.812045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:54.822816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678637754780826:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:54.925335Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678637754780877:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:56.116984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678624869876218:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:56.117047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 12002, MsgBus: 28950 2026-01-07T17:50:50.404388Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678622593223349:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:50.408159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.420241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:50.709624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.714882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.714977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.770374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.856270Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.865087Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678622593223180:2081] 1767808250363816 != 1767808250363819 2026-01-07T17:50:50.924247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:51.007816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:51.007843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:51.007850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:51.007938Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:51.411571Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.585680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:51.591980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:51.633969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.812050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.957680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.046494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.778646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678635478126956:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.778746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.779206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678635478126966:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.779276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.108865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.150540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.186503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.219906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.252135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.292101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.331259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.386299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.493174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639773095140:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.493272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.493594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639773095146:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.493627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639773095145:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.493701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.498562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:54.507674Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678639773095149:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:54.569882Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678639773095200:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:55.400714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678622593223349:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:55.400800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:56.366945Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808256403, txId: 281474976715673] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 14834, MsgBus: 64517 2026-01-07T17:50:50.596998Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678621893756730:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:50.602958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.789137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.859451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.859524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.863062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.924313Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.953037Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678621893756625:2081] 1767808250591899 != 1767808250591902 2026-01-07T17:50:51.025132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:51.025153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:51.025160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:51.025287Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:51.053145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:51.472946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:51.483211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:51.524280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.612658Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.681300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.852238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.921087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.765807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678634778660382:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.765942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.766854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678634778660392:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.766911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.114099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.147159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.186471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.237980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.272908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.319812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.379094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.428467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.523352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639073628559:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.523441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.524115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639073628564:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.524156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639073628565:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.524198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.528582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:54.544708Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678639073628568:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:54.615555Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678639073628621:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:55.596524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678621893756730:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:55.596595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:56.403689Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808256397, txId: 281474976715673] shutting down |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 10328, MsgBus: 22480 2026-01-07T17:50:51.766665Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678627743685539:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:51.768981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:52.024851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.043023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.043110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.057486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.144230Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:52.212705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:52.244793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:52.244812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:52.244819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:52.244909Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:52.649041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:52.653906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:52.709016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.780397Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:52.847990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.997026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.055517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.789455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678640628589245:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.789604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.791192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678640628589255:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.791247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.135096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.168509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.214794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.253262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.282097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.368278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.413301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.468959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.580298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678644923557431:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.580445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.580838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678644923557436:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.580873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678644923557437:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.580998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.584689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:55.597193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678644923557440:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:55.657769Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678644923557491:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:56.766927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678627743685539:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:56.767024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:57.261488Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808257252, txId: 281474976715673] shutting down |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveTest::TestAlterFollower [GOOD] >> THiveTest::TestBootProgress >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 8293, MsgBus: 23116 2026-01-07T17:50:49.977866Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678618914405333:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:49.984902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.219301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.258625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.258745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.331724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.333963Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.359625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678618914405297:2081] 1767808249970100 != 1767808249970103 2026-01-07T17:50:50.441304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.441331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.441339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.441431Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:50.492638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:51.014118Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.019630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:51.025541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:51.082251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.210747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.367641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:51.449125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.299516Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678636094276352:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.299624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.300878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678636094276361:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.301010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:53.702530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.737710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.771666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.803780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.840271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.918235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.970656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.023489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.113186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678640389244530:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.113296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.113626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678640389244535:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.113680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678640389244536:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.113712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.118246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:54.131281Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678640389244539:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:54.221868Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678640389244590:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:54.978445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678618914405333:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:54.978515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] >> KqpSysColV0::InnerJoinTables [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId >> TColumnShardTestSchema::ForgetWithLostAnswer >> MoveTable::RenameAbsentTable_Negative >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn >> TColumnShardTestSchema::ExportAfterFail >> TColumnShardTestSchema::RebootExportWithLostAnswer >> MoveTable::RenameToItself_Negative >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> KqpSystemView::TopQueriesOrderByDesc [GOOD] >> MoveTable::WithData-Reboot >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId >> TColumnShardTestSchema::EnableColdTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 14791, MsgBus: 6065 2026-01-07T17:50:52.761025Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678628654455298:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:52.762066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:52.983758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:53.037974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:53.038089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:53.039951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:53.113051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:53.121004Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678628654455262:2081] 1767808252759186 != 1767808252759189 2026-01-07T17:50:53.155961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:53.175597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:53.175625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:53.175653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:53.175753Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:53.679777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:53.685073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:53.756589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:53.767416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:53.956597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.106366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.167117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.936838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678641539359021:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.936978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.937370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678641539359031:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.937414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.224919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.258230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.288469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.323365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.356708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.387788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.418249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.462846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.546659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678645834327195:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.546720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.546833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678645834327200:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.548666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678645834327202:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.548743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.550680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:56.561351Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678645834327203:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:56.643925Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678645834327255:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:57.761032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678628654455298:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:57.761100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveTest::TestBootProgress [GOOD] >> THiveTest::TestBridgeCreateTablet >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 31256, MsgBus: 22172 2026-01-07T17:50:50.635739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678622374942692:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:50.636215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.935966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.936089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.955028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:51.026780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:51.067397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:51.070988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678622374942562:2081] 1767808250621969 != 1767808250621972 2026-01-07T17:50:51.190793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:51.190815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:51.190825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:51.190948Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:51.201685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:51.644871Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.670918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:51.683602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:51.702205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 1 2026-01-07T17:50:54.511044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639554812924:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.511145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639554812932:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.511211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.513119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678639554812939:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.513239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.515762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:54.530970Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678639554812938:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:50:54.672917Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678639554812991:2575] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ydb-cpp-sdk/dev 2026-01-07T17:50:55.631430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678622374942692:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:55.631745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:58.986738Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808258981, txId: 281474976710673] shutting down |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 64543, MsgBus: 15328 2026-01-07T17:50:53.610652Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678632664124903:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:53.610796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:53.862767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:53.865030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:53.865144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:53.928049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:53.936139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:53.945034Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678632664124753:2081] 1767808253592794 != 1767808253592797 2026-01-07T17:50:54.084972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:54.085000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:54.085006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:54.085072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:54.154285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:54.531156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:54.539756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:54.605066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.611902Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:54.740021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.875569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.950800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.692412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678645549028513:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.692558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.692879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678645549028522:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.692915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.022261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.053433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.085389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.117499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.149466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.224720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.264231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.340883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.437911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678649843996708:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.438013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.438226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678649843996713:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.438265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678649843996714:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.438399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.442508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:57.452789Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678649843996717:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:57.543884Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678649843996768:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:58.607561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678632664124903:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:58.607650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:59.208727Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808259203, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 18679, MsgBus: 3815 2026-01-07T17:50:53.299621Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678633857141150:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:53.299674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:53.544229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:53.575893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:53.576022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:53.630927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:53.659886Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:53.660901Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678633857141004:2081] 1767808253287797 != 1767808253287800 2026-01-07T17:50:53.750587Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:53.767172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:53.767193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:53.767223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:53.767320Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:54.195035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:54.201277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:54.253383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.310488Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:54.392470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.556168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.622008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.515304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678646742044768:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.515443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.515744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678646742044778:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.515801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:56.843171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.880149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.914526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.966731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.001925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.038540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.078358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.152962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.226372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678651037012942:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.226474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.226730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678651037012947:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.226750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678651037012948:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.226782Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.230212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:57.240904Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678651037012951:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:57.329099Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678651037013002:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:58.298381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678633857141150:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:58.298442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TColumnShardTestSchema::RebootExportAfterFail >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestServerlessComputeResourcesMode |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameAbsentTable_Negative [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::TopQueriesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 21518, MsgBus: 26068 2026-01-07T17:50:50.078931Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678620978487055:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:50.080644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.197350Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678621765565130:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:50.197722Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:50.492260Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.492096Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.492328Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:50.551140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:50.600356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.600454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.601693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.601813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.601988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:50.602043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:50.607223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.660968Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:50:50.661031Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:50.661962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.671115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:50.735340Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:50.759066Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:50.763688Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:50.764571Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:50.902403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:50.902436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:50.902444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:50.902548Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:51.110734Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.234966Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.248519Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:51.696559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:51.779121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.042329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.354617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:52.567283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.815018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678638158358517:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.815126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.815377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678638158358526:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:54.815421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.072944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678620978487055:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:55.073011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:55.172316Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678621765565130:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:55.172390Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:55.199301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.259403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.325836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.408313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.457189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.514159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.591025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.681875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.813282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678642453326931:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.813361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.813592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678642453326936:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.813607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678642453326937:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.813696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:55.817901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:55.857238Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678642453326940:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725670 completed, doublechecking } 2026-01-07T17:50:55.964881Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678642453327026:4760] txid# 281474976725671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> MoveTable::RenameToItself_Negative [GOOD] >> MoveTable::WithData-Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 1534, MsgBus: 20581 2026-01-07T17:50:54.323550Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678639327983889:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:54.323639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:54.603609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:54.603774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:54.603880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:54.606821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:54.699834Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:54.700502Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678639327983846:2081] 1767808254321864 != 1767808254321867 2026-01-07T17:50:54.778399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:54.778418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:54.778426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:54.778506Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:54.888314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:55.262801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:55.268940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:55.320068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.383620Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:55.463480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.648462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.732638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.494277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678652212887605:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.494409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.494804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678652212887615:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.494867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.863253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.898958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.928646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.959203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.992689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.028754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.068096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.136404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.200629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678656507855779:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.200692Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.200745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678656507855784:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.200841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678656507855786:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.200867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.203819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:58.213968Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678656507855788:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:58.286992Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678656507855839:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:59.323812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678639327983889:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:59.323896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:51:00.561928Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808260589, txId: 281474976710673] shutting down |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestFollowers >> TColumnShardTestSchema::HotTiersTtl >> KqpSystemView::NodesRange1 [GOOD] >> KqpSystemView::QueryStatsScan [FAIL] >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameToItself_Negative [GOOD] Test command err: 2026-01-07T17:51:01.025141Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.059680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.059945Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.077082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.077399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.077709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.077860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.077963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.078082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.078203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.078324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.078446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.078550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.078646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.078777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.078902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:01.110261Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:01.110829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:01.110878Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:01.111047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:01.111261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:01.111294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:01.111370Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:01.111450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:01.111473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:01.111606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:01.111699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:01.111737Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:01.111825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:01.111906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:01.111924Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:01.111956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:01.111988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:01.112014Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:01.112043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:01.112069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:01.112087Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:01.112277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:01.112319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:01.112351Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:01.112437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:01.112464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.112482Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.112534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:01.112564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:01.112591Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:01.112625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:01.112650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:01.112669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:01.112790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:01.112837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mon_data.cpp:29;EXECUTE:storages_managerLoadingTime=317; 2026-01-07T17:51:01.437991Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2026-01-07T17:51:01.438021Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2026-01-07T17:51:01.438047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2026-01-07T17:51:01.438102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2026-01-07T17:51:01.438162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-07T17:51:01.438241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2026-01-07T17:51:01.438273Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-07T17:51:01.438338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2026-01-07T17:51:01.438397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=20; 2026-01-07T17:51:01.438464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=30; 2026-01-07T17:51:01.438505Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2547; 2026-01-07T17:51:01.438611Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:01.438665Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:01.439001Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:01.439291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:01.439343Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-07T17:51:01.439411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.190000s; 2026-01-07T17:51:01.439674Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:01.439767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:01.439835Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:01.439882Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-07T17:51:01.440017Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.190000s; 2026-01-07T17:51:01.440066Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-07T17:51:01.736622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136948233198112;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:01.736721Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136948233198112;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-07T17:51:01.750388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136948233198112;op_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137154394350336;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:51:01.750538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136948233198112;op_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137154394350336;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:158:2181]; 2026-01-07T17:51:01.750622Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136948233198112;op_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808261887;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137154394350336;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-07T17:51:01.750989Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:01.751153Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808261887 at tablet 9437184, mediator 0 2026-01-07T17:51:01.751205Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-07T17:51:01.751548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:01.751660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:01.751722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:01.751818Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:51:01.764735Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767808261887;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-07T17:51:01.764830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:51:01.764963Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-07T17:51:01.765022Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-07T17:51:01.765274Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:51:01.782486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-07T17:51:01.807061Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-07T17:51:01.808091Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136948233247840;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767808261890;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=1; 2026-01-07T17:51:01.808170Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136948233247840;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767808261890;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=Rename to existing table; 2026-01-07T17:51:01.820542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767808261890;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136948233247840;op_tx=11:TX_KIND_SCHEMA;min=1767808261890;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-07T17:51:01.820638Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767808261890;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136948233247840;op_tx=11:TX_KIND_SCHEMA;min=1767808261890;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:23;message=Rename to existing table;tablet_id=9437184;tx_id=11; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameAbsentTable_Negative [GOOD] Test command err: 2026-01-07T17:51:01.020660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.055780Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.055955Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.077193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.077489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.077834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.078001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.078112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.078237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.078373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.078527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.078646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.078746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.078861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.078970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.079074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:01.110963Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:01.111174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:01.111228Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:01.111395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:01.111597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:01.111641Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:01.111751Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:01.111879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:01.111908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:01.112094Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.112167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:01.112209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:01.112236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:01.112352Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:01.112411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:01.112474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:01.112509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:01.112563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:01.112598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:01.112632Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:01.112674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:01.112708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:01.112735Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:01.112982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:01.113039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:01.113072Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:01.113216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:01.113261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.113306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.113378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:01.113428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:01.113471Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:01.113518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:01.113554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:01.113583Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:01.113710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:01.113757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-01-07T17:51:01.458026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:51:01.458110Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2026-01-07T17:51:01.458164Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-07T17:51:01.458252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2026-01-07T17:51:01.458329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-01-07T17:51:01.458475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=73; 2026-01-07T17:51:01.458546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=26; 2026-01-07T17:51:01.458598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=21; 2026-01-07T17:51:01.458631Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3179; 2026-01-07T17:51:01.458813Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:01.458883Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:01.458965Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:01.459303Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:01.459380Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-07T17:51:01.459468Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.080000s; 2026-01-07T17:51:01.459784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:01.459872Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:01.459946Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:01.459996Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-07T17:51:01.460136Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.155000s; 2026-01-07T17:51:01.460207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-07T17:51:01.755066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137078026983744;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:01.755146Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137078026983744;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-07T17:51:01.767545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;this=137078026983744;op_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_this=137284188094912;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:51:01.767620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;this=137078026983744;op_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_this=137284188094912;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2179]; 2026-01-07T17:51:01.767675Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;this=137078026983744;op_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808261886;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_this=137284188094912;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-07T17:51:01.767969Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:01.768114Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808261886 at tablet 9437184, mediator 0 2026-01-07T17:51:01.768160Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-07T17:51:01.768449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:01.768542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:01.768579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:01.768658Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:51:01.777063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767808261886;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-07T17:51:01.777168Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:51:01.777291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-07T17:51:01.777345Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-07T17:51:01.777606Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:51:01.793890Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-07T17:51:01.818132Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-07T17:51:01.819158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137078027033696;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767808261889;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=111;dst=2; 2026-01-07T17:51:01.819236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137078027033696;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767808261889;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=111;result=not_found; 2026-01-07T17:51:01.819289Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137078027033696;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767808261889;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=No such table; 2026-01-07T17:51:01.831967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767808261889;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;this=137078027033696;op_tx=11:TX_KIND_SCHEMA;min=1767808261889;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:104:2137]; 2026-01-07T17:51:01.832062Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767808261889;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;this=137078027033696;op_tx=11:TX_KIND_SCHEMA;min=1767808261889;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=propose_tx.cpp:23;message=No such table;tablet_id=9437184;tx_id=11; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpSystemView::NodesOrderByDesc [GOOD] >> TColumnShardTestSchema::RebootColdTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData-Reboot [GOOD] Test command err: 2026-01-07T17:51:01.025175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.054872Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.055791Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.077652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.078006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.078304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.078449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.078589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.078716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.078842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.078974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.079106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.079225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.079343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.079452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.079561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:01.115487Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:01.115706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:01.115800Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:01.115993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.116158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:01.116218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:01.116249Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:01.116313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:01.116349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:01.116379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:01.116397Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:01.116540Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.116596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:01.116630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:01.116659Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:01.116726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:01.116760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:01.116815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:01.116844Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:01.116882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:01.116914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:01.116948Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:01.116976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:01.117000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:01.117016Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:01.117167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:01.117198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:01.117217Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:01.117311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:01.117339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.117362Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.117410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:01.117447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:01.117480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:01.117509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:01.117533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:01.117551Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:01.117630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:01.117656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:51:02.158271Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:329:2341];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:51:02.158523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:02.158729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:02.158904Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:02.159229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:02.159441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:02.159646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:02.159964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:335:2347] finished for tablet 9437184 2026-01-07T17:51:02.160629Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:329:2341];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.012},{"events":["l_bootstrap","f_processing"],"t":0.013},{"events":["f_task_result"],"t":0.014},{"events":["f_ack","l_task_result"],"t":0.035},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.043}],"full":{"a":1906355,"name":"_full_task","f":1906355,"d_finished":0,"c":0,"l":1950123,"d":43768},"events":[{"name":"bootstrap","f":1906670,"d_finished":13288,"c":1,"l":1919958,"d":13288},{"a":1949293,"name":"ack","f":1942333,"d_finished":6726,"c":1,"l":1949059,"d":7556},{"a":1949277,"name":"processing","f":1920342,"d_finished":10770,"c":3,"l":1949063,"d":11616},{"name":"ProduceResults","f":1918769,"d_finished":3482,"c":6,"l":1949769,"d":3482},{"a":1949776,"name":"Finish","f":1949776,"d_finished":0,"c":0,"l":1950123,"d":347},{"name":"task_result","f":1920364,"d_finished":3969,"c":2,"l":1942204,"d":3969}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:02.160744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:329:2341];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:02.161236Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:329:2341];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.012},{"events":["l_bootstrap","f_processing"],"t":0.013},{"events":["f_task_result"],"t":0.014},{"events":["f_ack","l_task_result"],"t":0.035},{"events":["l_ProduceResults","f_Finish"],"t":0.043},{"events":["l_ack","l_processing","l_Finish"],"t":0.044}],"full":{"a":1906355,"name":"_full_task","f":1906355,"d_finished":0,"c":0,"l":1950889,"d":44534},"events":[{"name":"bootstrap","f":1906670,"d_finished":13288,"c":1,"l":1919958,"d":13288},{"a":1949293,"name":"ack","f":1942333,"d_finished":6726,"c":1,"l":1949059,"d":8322},{"a":1949277,"name":"processing","f":1920342,"d_finished":10770,"c":3,"l":1949063,"d":12382},{"name":"ProduceResults","f":1918769,"d_finished":3482,"c":6,"l":1949769,"d":3482},{"a":1949776,"name":"Finish","f":1949776,"d_finished":0,"c":0,"l":1950889,"d":1113},{"name":"task_result","f":1920364,"d_finished":3969,"c":2,"l":1942204,"d":3969}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:02.161335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:02.057576Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-07T17:51:02.161381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:02.161552Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-07T17:51:02.162541Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-07T17:51:02.162947Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1767808261895:12} readable: {1767808261895:max} at tablet 9437184 2026-01-07T17:51:02.163104Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-07T17:51:02.163165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1767808261895:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:02.163281Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1767808261895:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpSystemView::QueryStatsSimple [FAIL] >> TColumnShardTestSchema::RebootHotTiers >> THiveTest::TestBridgeCreateTablet [GOOD] >> THiveTest::TestBridgeDisconnect >> TColumnShardTestSchema::HotTiers >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 9917, MsgBus: 12109 2026-01-07T17:50:52.197975Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678630311224172:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:52.199369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:52.235713Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678630723745389:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:52.236707Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:52.248197Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678629602852397:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:52.253988Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:52.270038Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678629841390293:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:52.270147Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:52.279253Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678630434273342:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:52.281259Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:52.535770Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.537016Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.537011Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.540587Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.541007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.660871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.661247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.664260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.664531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.664732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.664775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.667372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.667438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.667611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.667666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.707137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.716435Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:50:52.716474Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:52.741142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.741364Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:50:52.741395Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-07T17:50:52.741462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.765926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.767116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.895613Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:52.905062Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:52.907114Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:52.911769Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:52.919276Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:52.921115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:53.193629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:53.193656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:53.193663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:53.193759Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:53.195746Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:53.250251Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:53.270914Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:53.285555Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:53.355512Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:54.056314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:54.203268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.665284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.059145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.159775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.230169Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678630723745389:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:57.230245Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:57.245781Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678629602852397:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:57.245858Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:57.251936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678630311224172:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:57.252011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:57.261903Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678629841390293:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:57.261969Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:57.264591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678651786062708:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.264726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.265072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678651786062718:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.265166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.279328Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592678630434273342:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:57.279423Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:57.600319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.665975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.735253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.789824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.886762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.943446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.035302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.108974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.197493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678656081030998:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.197585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.197659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678656081031003:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.197829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678656081031005:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.197889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.201120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:58.220122Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678656081031006:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:50:58.299828Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678656081031081:4581] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:59.941312Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808259936, txId: 281474976710673] shutting down |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> TColumnShardTestSchema::OneColdTier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 12465, MsgBus: 6843 2026-01-07T17:50:51.694441Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678625866980474:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:51.694528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:51.743599Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678625735148000:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:51.744244Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:51.750751Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678627379159653:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:51.752392Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:51.783761Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678623861242846:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:51.783802Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:51.793590Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:51.851161Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592678627929083621:2231];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:51.949721Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:52.185926Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.201510Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.208392Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.231950Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.238190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.365455Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.408413Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.433274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.459619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.459747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.465719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.465813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.466648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.473146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.473489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.473528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.559114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:52.559213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:52.573241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.577262Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-07T17:50:52.577311Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:50:52.577338Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:50:52.606089Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:52.626921Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:52.646748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.648601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.693980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.698992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:52.704156Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:52.731835Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:52.768716Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:52.761053Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:52.773030Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:52.780519Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:52.821461Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:52.830537Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:52.932578Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:52.955781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:53.155322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:53.155343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:53.155356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:53.155481Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:53.205813Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:54.174716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976735657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:54.313345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:54.791975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.033962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:55.178902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:56.701073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678625866980474:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:56.701133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:56.737032Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678625735148000:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:56.737091Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:56.757033Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678627379159653:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:56.757130Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:56.784045Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678623861242846:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:56.784110Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:56.849611Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592678627929083621:2231];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:56.849690Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:57.468365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678651636786224:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.468486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.468811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678651636786234:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.468859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:57.821406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.890514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.937983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:57.993625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.118466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.177801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.235716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.296369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.432506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678655931754493:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.432595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.432657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678655931754498:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.432820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678655931754501:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.432867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:58.436425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976735670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:58.456974Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678655931754502:2401], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976735670 completed, doublechecking } 2026-01-07T17:50:58.560618Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678655931754573:4585] txid# 281474976735671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-07T17:51:01.020721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.059252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.059522Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.077067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.077378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.077662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.077793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.077902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.078014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.078139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.078278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.078410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.078531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.078666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.078769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.078890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:01.116007Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:01.116699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:01.116765Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:01.116990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.117174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:01.117257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:01.117304Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:01.117425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:01.117512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:01.117558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:01.117592Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:01.117782Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.117869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:01.117918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:01.117955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:01.118072Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:01.118169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:01.118224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:01.118256Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:01.118327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:01.118377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:01.118414Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:01.118461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:01.118510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:01.118542Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:01.118830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:01.118901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:01.118941Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:01.119094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:01.119145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.119177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.119238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:01.119284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:01.119322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:01.119384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:01.119439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:01.119480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:01.119651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:01.119699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 7184;request_tx=104:TX_KIND_SCHEMA;min=1767808262011;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136866642971680;op_tx=104:TX_KIND_SCHEMA;min=1767808262011;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=104:TX_KIND_SCHEMA;min=1767808262011;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=137072783226048;method=TTxController::FinishProposeOnComplete;tx_id=104;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=104; 2026-01-07T17:51:04.034100Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:04.034233Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808262011 at tablet 9437184, mediator 0 2026-01-07T17:51:04.034295Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2026-01-07T17:51:04.034580Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-07T17:51:04.046983Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2026-01-07T17:51:04.047653Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808262011:max} readable: {1767808262011:max} at tablet 9437184 2026-01-07T17:51:04.047820Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:51:04.052214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262011:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:04.052310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262011:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:04.052900Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262011:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:51:04.054589Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262011:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:51:04.105598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262011:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:563:2575];trace_detailed=; 2026-01-07T17:51:04.107065Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:51:04.107379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:04.107802Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.107986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.108301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:04.108546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.108758Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.109011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:563:2575] finished for tablet 9437184 2026-01-07T17:51:04.109524Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:557:2569];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":4157340,"name":"_full_task","f":4157340,"d_finished":0,"c":0,"l":4160905,"d":3565},"events":[{"name":"bootstrap","f":4157681,"d_finished":2171,"c":1,"l":4159852,"d":2171},{"a":4160084,"name":"ack","f":4160084,"d_finished":0,"c":0,"l":4160905,"d":821},{"a":4160061,"name":"processing","f":4160061,"d_finished":0,"c":0,"l":4160905,"d":844},{"name":"ProduceResults","f":4159454,"d_finished":795,"c":2,"l":4160629,"d":795},{"a":4160635,"name":"Finish","f":4160635,"d_finished":0,"c":0,"l":4160905,"d":270}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.109621Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:557:2569];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:04.110039Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:557:2569];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.002},{"events":["l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":4157340,"name":"_full_task","f":4157340,"d_finished":0,"c":0,"l":4161498,"d":4158},"events":[{"name":"bootstrap","f":4157681,"d_finished":2171,"c":1,"l":4159852,"d":2171},{"a":4160084,"name":"ack","f":4160084,"d_finished":0,"c":0,"l":4161498,"d":1414},{"a":4160061,"name":"processing","f":4160061,"d_finished":0,"c":0,"l":4161498,"d":1437},{"name":"ProduceResults","f":4159454,"d_finished":795,"c":2,"l":4160629,"d":795},{"a":4160635,"name":"Finish","f":4160635,"d_finished":0,"c":0,"l":4161498,"d":863}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.110143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:04.054523Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:51:04.110210Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:04.110377Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-07T17:51:01.020510Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.055774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.056029Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.076993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.077293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.077572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.077703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.077810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.077922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.078038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.078146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.078283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.078409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.078527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.078630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.078737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:01.110279Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:01.110845Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:01.110897Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:01.111046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:01.111254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:01.111287Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:01.111366Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:01.111456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:01.111478Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:01.111615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:01.111705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:01.111724Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:01.111793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:01.111840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:01.111882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:01.111906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:01.111940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:01.112002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:01.112024Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:01.112055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:01.112080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:01.112099Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:01.112290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:01.112323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:01.112345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:01.112448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:01.112489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.112516Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.112550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:01.112578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:01.112603Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:01.112640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:01.112665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:01.112697Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:01.112790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:01.112837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:04.389660Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:51:04.389704Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:04.390180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.019000s; 2026-01-07T17:51:04.390222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-07T17:51:04.483941Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808262066:max} readable: {1767808262066:max} at tablet 9437184 2026-01-07T17:51:04.484166Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:51:04.489207Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262066:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:04.489374Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262066:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:04.490303Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262066:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:51:04.492389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262066:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:51:04.561783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808262066:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:665:2653];trace_detailed=; 2026-01-07T17:51:04.563435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:51:04.563789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:04.564248Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.564467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.564945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:04.565160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.565362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.565626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:665:2653] finished for tablet 9437184 2026-01-07T17:51:04.566201Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:658:2647];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":4269812,"name":"_full_task","f":4269812,"d_finished":0,"c":0,"l":4273809,"d":3997},"events":[{"name":"bootstrap","f":4270254,"d_finished":2371,"c":1,"l":4272625,"d":2371},{"a":4272994,"name":"ack","f":4272994,"d_finished":0,"c":0,"l":4273809,"d":815},{"a":4272971,"name":"processing","f":4272971,"d_finished":0,"c":0,"l":4273809,"d":838},{"name":"ProduceResults","f":4272177,"d_finished":846,"c":2,"l":4273512,"d":846},{"a":4273518,"name":"Finish","f":4273518,"d_finished":0,"c":0,"l":4273809,"d":291}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.566312Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:658:2647];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:04.566797Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:658:2647];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":4269812,"name":"_full_task","f":4269812,"d_finished":0,"c":0,"l":4274484,"d":4672},"events":[{"name":"bootstrap","f":4270254,"d_finished":2371,"c":1,"l":4272625,"d":2371},{"a":4272994,"name":"ack","f":4272994,"d_finished":0,"c":0,"l":4274484,"d":1490},{"a":4272971,"name":"processing","f":4272971,"d_finished":0,"c":0,"l":4274484,"d":1513},{"name":"ProduceResults","f":4272177,"d_finished":846,"c":2,"l":4273512,"d":846},{"a":4273518,"name":"Finish","f":4273518,"d_finished":0,"c":0,"l":4274484,"d":966}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:04.566952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:04.492329Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:51:04.567004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:04.567147Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-07T17:51:03.254747Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:03.280274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:03.280534Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:03.287541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:03.287752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:03.287928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:03.288024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:03.288091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:03.288154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:03.288239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:03.288318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:03.288404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:03.288490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:03.288559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:03.288644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:03.288742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:03.315961Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:03.316396Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:03.316456Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:03.316659Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:03.316836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:03.316960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:03.317007Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:03.317109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:03.317170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:03.317214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:03.317244Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:03.317423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:03.317511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:03.317561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:03.317595Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:03.317682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:03.317737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:03.317793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:03.317848Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:03.317912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:03.317956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:03.317989Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:03.318053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:03.318105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:03.318143Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:03.318429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:03.318495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:03.318585Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:03.318748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:03.318804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:03.318841Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:03.318898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:03.318937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:03.318968Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:03.319012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:03.319050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:03.319080Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:03.319241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:03.319288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 19:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-07T17:51:04.728401Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136784329393344;op_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136990490948800;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:51:04.728601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136784329393344;op_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136990490948800;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:04.728671Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136784329393344;op_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808264333;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136990490948800;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-07T17:51:04.729095Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:04.729283Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808264333 at tablet 9437184, mediator 0 2026-01-07T17:51:04.729351Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-07T17:51:04.729718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-07T17:51:04.729795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-07T17:51:04.729886Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000202, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:51:04.729967Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000202; 2026-01-07T17:51:04.730047Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=1000000202; 2026-01-07T17:51:04.730362Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:51:04.731097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000202; 2026-01-07T17:51:04.743501Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:04.745305Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136784329396256;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1767808264336;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:04.757670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767808264336;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136784329396256;op_tx=120:TX_KIND_SCHEMA;min=1767808264336;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:04.757753Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767808264336;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136784329396256;op_tx=120:TX_KIND_SCHEMA;min=1767808264336;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:04.759244Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136784329398048;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1767808264337;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:04.771745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767808264337;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136784329398048;op_tx=121:TX_KIND_SCHEMA;min=1767808264337;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:04.771844Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767808264337;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136784329398048;op_tx=121:TX_KIND_SCHEMA;min=1767808264337;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:04.773376Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136784329399840;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1767808264339;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:04.785434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767808264339;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136784329399840;op_tx=122:TX_KIND_SCHEMA;min=1767808264339;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:04.785516Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767808264339;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136784329399840;op_tx=122:TX_KIND_SCHEMA;min=1767808264339;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 1672, MsgBus: 6849 2026-01-07T17:50:57.704046Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678653256407390:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:57.704106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:57.732523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:57.963297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:57.965151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:57.965265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:57.974588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:58.057080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678653256407362:2081] 1767808257702056 != 1767808257702059 2026-01-07T17:50:58.057313Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:58.067838Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:50:58.115206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:58.115224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:58.115229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:58.115305Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:58.235395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:58.513820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:58.567290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.675211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.718538Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:58.822943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:58.887932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:00.771260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678666141311124:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:00.771407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:00.772012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678666141311134:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:00.772077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:01.179890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:01.209863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:01.239453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:01.268744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:01.298031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:01.331049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:01.390745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:01.437473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:01.509412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678670436279303:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:01.509502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:01.509641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678670436279308:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:01.509934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678670436279310:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:01.509974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:01.513348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:51:01.528776Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678670436279311:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:51:01.609130Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678670436279363:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:51:02.704858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678653256407390:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:02.704958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> THiveTest::TestBridgeDisconnect [GOOD] >> THiveTest::TestBridgeDisconnectWithReboots >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-07T17:51:04.358394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:04.384502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:04.384746Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:04.391124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:04.391350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:04.391556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:04.391688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:04.391796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:04.391904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:04.392033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:04.392147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:04.392237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:04.392375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:04.392482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:04.392590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:04.392691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:04.419676Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:04.420095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:04.420172Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:04.420365Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:04.420517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:04.420608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:04.420651Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:04.420755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:04.420821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:04.420869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:04.420901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:04.421083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:04.421146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:04.421201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:04.421265Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:04.421365Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:04.421427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:04.421472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:04.421502Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:04.421570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:04.421623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:04.421658Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:04.421716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:04.421767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:04.421801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:04.422013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:04.422064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:04.422147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:04.422312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:04.422367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:04.422408Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:04.422459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:04.422497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:04.422532Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:04.422575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:04.422612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:04.422646Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:04.422825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:04.422884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-07T17:51:05.928969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136905064531136;op_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137111226097472;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:51:05.929046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136905064531136;op_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137111226097472;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:05.929119Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136905064531136;op_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808265443;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137111226097472;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-07T17:51:05.929392Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:05.929511Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808265443 at tablet 9437184, mediator 0 2026-01-07T17:51:05.929554Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-07T17:51:05.929773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-07T17:51:05.929804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-07T17:51:05.929864Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 20, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:51:05.929922Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=20; 2026-01-07T17:51:05.929959Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=20; 2026-01-07T17:51:05.930155Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:51:05.930628Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=20; 2026-01-07T17:51:05.942686Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:05.944361Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136905064534048;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1767808265446;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:05.956641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767808265446;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136905064534048;op_tx=120:TX_KIND_SCHEMA;min=1767808265446;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:05.956735Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767808265446;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136905064534048;op_tx=120:TX_KIND_SCHEMA;min=1767808265446;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:05.958066Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136905064535840;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1767808265447;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:05.970062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767808265447;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136905064535840;op_tx=121:TX_KIND_SCHEMA;min=1767808265447;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:05.970146Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767808265447;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136905064535840;op_tx=121:TX_KIND_SCHEMA;min=1767808265447;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:05.971564Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136905064537632;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1767808265449;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:05.983555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767808265449;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136905064537632;op_tx=122:TX_KIND_SCHEMA;min=1767808265449;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:05.983630Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767808265449;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136905064537632;op_tx=122:TX_KIND_SCHEMA;min=1767808265449;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [FAIL] Test command err: Trying to start YDB, gRPC: 16037, MsgBus: 23542 2026-01-07T17:50:38.864251Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678569873862931:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.864290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.973541Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678570483497425:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.973692Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:39.008948Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:39.385070Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.393072Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.397082Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:50:39.387847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.641098Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.659159Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.677346Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.816264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:39.816396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.821028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:39.821107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.822503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:39.822566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.850439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.922551Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:50:39.922604Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:50:39.982696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.982926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:40.003618Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.134364Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.131642Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.195587Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:40.197104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:40.197210Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:40.208494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:40.551587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:40.551625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:40.551633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:40.551729Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:41.704688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:41.816756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.131708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.479238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.717848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.864658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678569873862931:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.864729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.973173Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678570483497425:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.973222Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:44.669512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678595643668997:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.669634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.670059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678595643669007:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:44.670139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.077168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.130029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.197784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.248899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.306817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.367942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.431866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.528911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:45.622374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678599938637400:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.622482Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.622948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678599938637406:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.623016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678599938637404:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.623144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:45.626910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:45.654346Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678599938637409:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:45.732327Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678599938637485:4737] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:47.808431Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808247801, txId: 281474976715675] shutting down 2026-01-07T17:50:54.686988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:54.687024Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:904, auto NKikimr::NKqp::NTestSuiteKqpSystemView::TTestCaseQueryStatsSimple::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TStringBuf) const: (readBytesSet.contains(79)) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B6C4F0B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BBC19CB 2. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:904: operator() @ 0x1B2C6347 3. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:908: ?? @ 0x1B2C05B4 4. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34: operator() @ 0x1B2E3097 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34:1) &> @ 0x1B2E3097 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34:1) &> @ 0x1B2E3097 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1B2E3097 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1B2E3097 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1BBFA639 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1BBFA639 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1BBFA639 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BBC86A7 13. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34: Execute @ 0x1B2E23EC 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BBC9E5F 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1BBF449C 16. ??:0: ?? @ 0x7F16471F1D8F 17. ??:0: ?? @ 0x7F16471F1E3F 18. ??:0: ?? @ 0x189BB028 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [FAIL] Test command err: Trying to start YDB, gRPC: 3869, MsgBus: 32377 2026-01-07T17:50:38.506171Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678568006458225:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.506225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.535346Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:50:38.933119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:39.029093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:39.029231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.102970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.145044Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678568006457988:2081] 1767808238483024 != 1767808238483027 2026-01-07T17:50:39.157456Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.194276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.321590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.321622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.321636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.321720Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.496143Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.101439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.129272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.209135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.478411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.738866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:40.827524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:42.781492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678585186329046:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.781605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.781920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678585186329056:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.782083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.113695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.148290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.188762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.258780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.292039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.340316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.378743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.433916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:43.501910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678568006458225:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:43.501971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:43.506707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589481297222:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.506784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.506977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589481297227:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.507011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678589481297228:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.507090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:43.510546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:43.523347Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678589481297231:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:50:43.623524Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678589481297283:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:46.553007Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808245693, txId: 281474976715673] shutting down 2026-01-07T17:50:46.691980Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808246682, txId: 281474976715676] shutting down 2026-01-07T17:50:53.820714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:53.820754Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:956, auto NKikimr::NKqp::NTestSuiteKqpSystemView::TTestCaseQueryStatsScan::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TStringBuf) const: (readBytesSet.contains(192)) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B6C4F0B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BBC19CB 2. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:956: operator() @ 0x1B2CC571 3. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:959: Execute_ @ 0x1B2CC571 4. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34: operator() @ 0x1B2E3097 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34:1) &> @ 0x1B2E3097 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34:1) &> @ 0x1B2E3097 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1B2E3097 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1B2E3097 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1BBFA639 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1BBFA639 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1BBFA639 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BBC86A7 13. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34: Execute @ 0x1B2E23EC 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BBC9E5F 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1BBF449C 16. ??:0: ?? @ 0x7FCC809F4D8F 17. ??:0: ?? @ 0x7FCC809F4E3F 18. ??:0: ?? @ 0x189BB028 >> TColumnShardTestSchema::RebootOneColdTier >> TColumnShardTestSchema::RebootForgetAfterFail |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] >> TColumnShardTestSchema::RebootForgetWithLostAnswer >> THiveTest::TestStopTenant [GOOD] >> THiveTest::TestTabletAvailability >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] >> THiveTest::TestTabletAvailability [GOOD] >> THiveTest::TestSetDomain >> MoveTable::EmptyTable >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 19091, MsgBus: 6603 2026-01-07T17:50:38.489992Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678568765324610:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:38.490037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:38.877179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:38.883111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:38.883219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:39.002455Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:39.007512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:39.205018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:39.282622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:39.282652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:39.282658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:39.282717Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:39.477390Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:40.350884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:40.361434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:50:40.883821Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:50:40.883864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:50:40.883939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7592678568765324928:2145], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:50:40.883955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:50:41.884781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:50:41.884820Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:50:41.884858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7592678568765324928:2145], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:50:41.884867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:50:42.362194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678585945194651:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.362282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.362566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678585945194661:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.362605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:42.885226Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:50:42.885259Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:50:42.885299Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7592678568765324928:2145], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:50:42.885323Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:50:43.057501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:7592678590240161977:2511], Recipient [1:7592678568765324928:2145]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:50:43.057535Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:50:43.057554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T17:50:43.057589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:7592678590240161973:2508], Recipient [1:7592678568765324928:2145]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-07T17:50:43.057615Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T17:50:43.172567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T17:50:43.173116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-07T17:50:43.173252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2026-01-07T17:50:43.173905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-07T17:50:43.173961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2026-01-07T17:50:43.173981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710658:0 type: TxCreateTable target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2026-01-07T17:50:43.174012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:50:43.174171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-07T17:50:43.174189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710658:0 1 -> 2 2026-01-07T17:50:43.174927Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_create_table.cpp:766: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 38] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2026-01-07T17:50:43.175060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:43.175101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__o ... ardId 72075186224037888, followerId 0 2026-01-07T17:51:03.380280Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-01-07T17:51:03.380312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-07T17:51:03.380334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046644480:1 data size 0 row count 0 2026-01-07T17:51:03.380342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=2, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], pathId map=Followers, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-07T17:51:03.380345Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037888, followerId 2 2026-01-07T17:51:03.380361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:393: Do not want to split tablet 72075186224037888 by the CPU load from the follower ID 2, reason: SplitByLoadNotEnabledForTable 2026-01-07T17:51:03.380401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T17:51:03.380488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:03.380504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:03.380511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-07T17:51:03.380619Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0.001516 TabletId: 72075186224037888 NodeId: 1 StartTime: 1767808243184 AccessTime: 1767808244098 UpdateTime: 1767808243860 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:51:03.380725Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 2 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0.000123 TabletId: 72075186224037888 NodeId: 1 StartTime: 1767808243274 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:51:03.905405Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:03.905445Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:03.905491Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7592678568765324928:2145], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:03.905507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:04.905900Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:04.905940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:04.905983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7592678568765324928:2145], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:04.905998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:05.000521Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7592678568765324567:2070], interval end# 2026-01-07T17:51:05.000000Z, event interval end# 2026-01-07T17:51:05.000000Z 2026-01-07T17:51:05.000564Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7592678568765324567:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2026-01-07T17:51:05.001015Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7592678568765324684:2143], interval end# 2026-01-07T17:51:05.000000Z, event interval end# 2026-01-07T17:51:05.000000Z 2026-01-07T17:51:05.001057Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7592678568765324684:2143], query logs count# 1, processor ids count# 1, processor id to database count# 0 2026-01-07T17:51:05.906327Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:05.906373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:05.906426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7592678568765324928:2145], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:05.906443Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:06.049294Z node 1 :SYSTEM_VIEWS INFO: sysview_service.cpp:888: Navigate by database succeeded: service id# [1:7592678568765324684:2143], database# /Root, no sysview processor ... SELECT from partition_stats for /Root/Followers , attempt 2 2026-01-07T17:51:06.704724Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7592678689024410305:2475], owner: [1:7592678689024410303:2473], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-07T17:51:06.705722Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7592678689024410305:2475], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2026-01-07T17:51:06.705955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274595843, Sender [1:7592678689024410305:2475], Recipient [1:7592678568765324928:2145]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2026-01-07T17:51:06.705980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5313: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2026-01-07T17:51:06.706224Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7592678689024410305:2475], row count: 2, finished: 1 2026-01-07T17:51:06.706291Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7592678689024410305:2475], owner: [1:7592678689024410303:2473], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-07T17:51:06.707579Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7592678568765324684:2143], database# /Root, query hash# 3266603936201095014, cpu time# 263418 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/Followers' ... SELECT from partition_stats, attempt 0 2026-01-07T17:51:06.906721Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:06.906762Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:06.906812Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7592678568765324928:2145], Recipient [1:7592678568765324928:2145]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:06.906825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:07.094646Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7592678693319377617:2483], owner: [1:7592678693319377615:2481], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-07T17:51:07.095184Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7592678693319377617:2483], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2026-01-07T17:51:07.095418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274595843, Sender [1:7592678693319377617:2483], Recipient [1:7592678568765324928:2145]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2026-01-07T17:51:07.095452Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5313: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2026-01-07T17:51:07.095600Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7592678693319377617:2483], row count: 2, finished: 1 2026-01-07T17:51:07.095677Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7592678693319377617:2483], owner: [1:7592678693319377615:2481], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-07T17:51:07.098987Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7592678568765324684:2143], database# /Root, query hash# 18339066598126957035, cpu time# 366538 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> TColumnShardTestSchema::RebootHotTiersTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-07T17:51:05.394436Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:05.419431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:05.419652Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:05.424992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:05.425173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:05.425382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:05.425476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:05.425543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:05.425614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:05.425702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:05.425770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:05.425854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:05.425929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.426003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:05.426069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:05.426154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:05.449251Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:05.449979Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:05.450038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:05.450254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:05.450438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:05.450519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:05.450589Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:05.450707Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:05.450772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:05.450814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:05.450842Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:05.451007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:05.451077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:05.451118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:05.451147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:05.451231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:05.451294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:05.451345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:05.451373Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:05.451434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:05.451469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:05.451494Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:05.451538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:05.451588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:05.451615Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:05.451820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:05.451918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:05.451954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:05.452103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:05.452143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.452170Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.452232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:05.452270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:05.452309Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:05.452362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:05.452411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:05.452441Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:05.452628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:05.452682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... nd=cleanup;skip_reason=no_changes; 2026-01-07T17:51:08.215272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:51:08.215328Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:08.215792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.098000s; 2026-01-07T17:51:08.215844Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-07T17:51:08.317376Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808266549:max} readable: {1767808266549:max} at tablet 9437184 2026-01-07T17:51:08.317564Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:51:08.321567Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808266549:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:08.321686Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808266549:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:08.322419Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808266549:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:51:08.324226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808266549:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:51:08.368608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808266549:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:665:2653];trace_detailed=; 2026-01-07T17:51:08.369910Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:51:08.370165Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:08.370527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:08.370706Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:08.371084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:08.371247Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:08.371402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:08.371585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:665:2653] finished for tablet 9437184 2026-01-07T17:51:08.372024Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:658:2647];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3453283,"name":"_full_task","f":3453283,"d_finished":0,"c":0,"l":3456388,"d":3105},"events":[{"name":"bootstrap","f":3453650,"d_finished":1842,"c":1,"l":3455492,"d":1842},{"a":3455795,"name":"ack","f":3455795,"d_finished":0,"c":0,"l":3456388,"d":593},{"a":3455776,"name":"processing","f":3455776,"d_finished":0,"c":0,"l":3456388,"d":612},{"name":"ProduceResults","f":3455104,"d_finished":699,"c":2,"l":3456187,"d":699},{"a":3456191,"name":"Finish","f":3456191,"d_finished":0,"c":0,"l":3456388,"d":197}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:08.372131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:658:2647];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:08.372436Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:658:2647];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3453283,"name":"_full_task","f":3453283,"d_finished":0,"c":0,"l":3456932,"d":3649},"events":[{"name":"bootstrap","f":3453650,"d_finished":1842,"c":1,"l":3455492,"d":1842},{"a":3455795,"name":"ack","f":3455795,"d_finished":0,"c":0,"l":3456932,"d":1137},{"a":3455776,"name":"processing","f":3455776,"d_finished":0,"c":0,"l":3456932,"d":1156},{"name":"ProduceResults","f":3455104,"d_finished":699,"c":2,"l":3456187,"d":699},{"a":3456191,"name":"Finish","f":3456191,"d_finished":0,"c":0,"l":3456932,"d":741}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:08.372500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:08.324192Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:51:08.372530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:08.372625Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] |94.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestSetDomain [GOOD] >> THiveTest::TestSetDomainAlready >> MoveTable::EmptyTable [GOOD] >> THiveTest::TestSetDomainAlready [GOOD] >> THiveTest::TestSetDomainError >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::EmptyTable [GOOD] Test command err: 2026-01-07T17:51:09.063022Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:09.094086Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:09.094339Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:09.101501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:09.101719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:09.101919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:09.102043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:09.102153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:09.102271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:09.102363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:09.102479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:09.102627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:09.102764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.102876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:09.102996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:09.103140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:09.131810Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:09.132370Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:09.132427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:09.132617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:09.132772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:09.132856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:09.132893Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:09.133031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:09.133091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:09.133131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:09.133158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:09.133323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:09.133397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:09.133446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:09.133478Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:09.133591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:09.133652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:09.133709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:09.133737Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:09.133783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:09.133816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:09.133842Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:09.133887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:09.133926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:09.133954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:09.134166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:09.134287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:09.134320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:09.134438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:09.134477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.134502Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.134578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:09.134623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:09.134657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:09.134709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:09.134764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:09.134803Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:09.135036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:09.135104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2026-01-07T17:51:09.416160Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=24; 2026-01-07T17:51:09.416224Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=24; 2026-01-07T17:51:09.416265Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3132; 2026-01-07T17:51:09.416427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:09.416484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:09.416566Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:09.416861Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:09.416915Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-07T17:51:09.417014Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.171000s; 2026-01-07T17:51:09.417295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:09.417439Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:09.417492Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:09.417562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-07T17:51:09.417720Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.057000s; 2026-01-07T17:51:09.417775Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-07T17:51:10.026874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137002665826624;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:10.026957Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137002665826624;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-07T17:51:10.039623Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=137002665826624;op_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137208826979072;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:51:10.039754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=137002665826624;op_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137208826979072;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:158:2181]; 2026-01-07T17:51:10.039876Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=137002665826624;op_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767808270033;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137208826979072;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-07T17:51:10.040251Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:10.040432Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808270033 at tablet 9437184, mediator 0 2026-01-07T17:51:10.040492Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-07T17:51:10.040854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:10.041001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:10.041063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:10.041165Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:51:10.049219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767808270033;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-07T17:51:10.049341Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:51:10.049516Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-07T17:51:10.049598Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-07T17:51:10.049932Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:51:10.063535Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-07T17:51:10.088397Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-07T17:51:10.089291Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137002665876576;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=2; 2026-01-07T17:51:10.089357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137002665876576;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2026-01-07T17:51:10.101793Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=137002665876576;op_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=137208827047232;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:51:10.101896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=137002665876576;op_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=137208827047232;method=TTxController::FinishProposeOnComplete;tx_id=11;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-07T17:51:10.101977Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=137002665876576;op_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1767808270037;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=137208827047232;method=TTxController::FinishProposeOnComplete;tx_id=11;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=11; 2026-01-07T17:51:10.102365Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:10.102523Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808270037 at tablet 9437184, mediator 0 2026-01-07T17:51:10.102593Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-07T17:51:10.102932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=11;event=move_table_progress;old_path_id=1;new_path_id=2;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2026-01-07T17:51:10.115200Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2026-01-07T17:50:13.468067Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678463461639211:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:13.476196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:13.696236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:13.882934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:13.883041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:13.904481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:14.048051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:14.066199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:14.177243Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006467s 2026-01-07T17:50:14.482326Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:14.549430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:14.549451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:14.549465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:14.549534Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:15.092121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:17.272033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509726:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.272152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.272633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509738:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.272699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.534162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.760532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509945:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.760732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.761240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509962:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.761345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509963:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.761444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509964:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.761512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509965:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.761553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509966:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.761611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509967:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.762021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.781676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509988:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.781849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641510018:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.782310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641509986:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.782363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641510017:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.782541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.783325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641510034:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.783407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641510044:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.783481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.785552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641510060:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.785822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641510058:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.785856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.786253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:17.786271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480641510073:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.786330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.825719Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678480641509989:3059] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: pat ... .973373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710695 2026-01-07T17:51:02.973433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710695 2026-01-07T17:51:02.973453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710695, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-07T17:51:02.975949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:51:02.976295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:51:02.977619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:51:02.978259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710694 2026-01-07T17:51:02.978332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710694 2026-01-07T17:51:02.978357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 7 2026-01-07T17:51:02.981036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710695:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-07T17:51:02.981112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710695:0 progress is 1/1 2026-01-07T17:51:02.981133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710695:0 progress is 1/1 2026-01-07T17:51:02.981171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710695:0 2026-01-07T17:51:02.983204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710695:0 2026-01-07T17:51:02.984953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-01-07T17:51:02.985026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-01-07T17:51:02.985049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-01-07T17:51:02.985099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710694:0 2026-01-07T17:51:02.988183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710694:0 2026-01-07T17:51:03.064677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.064844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, next wakeup# 14.999807s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.064915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037894, next wakeup# 14.999733s, rate# 0, in queue# 1 shards, running# 2 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.065005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, next wakeup# 14.999644s, rate# 0, in queue# 1 shards, running# 3 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.065176Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678463461639563:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037891 2026-01-07T17:51:03.065272Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678463461639563:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037893 2026-01-07T17:51:03.065333Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678463461639563:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037894 2026-01-07T17:51:03.065668Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678463461639563:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037892 2026-01-07T17:51:03.076113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, shardIdx# 72057594046644480:4 in# 11, next wakeup# 14.988553s, rate# 0, in queue# 0 shards, running# 3 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.076871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, shardIdx# 72057594046644480:6 in# 12, next wakeup# 14.987788s, rate# 0, in queue# 0 shards, running# 2 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.078269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037894, shardIdx# 72057594046644480:7 in# 13, next wakeup# 14.986401s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.078401Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037891 CompletedLoansChanged 2026-01-07T17:51:03.078845Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037893 CompletedLoansChanged 2026-01-07T17:51:03.080691Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037894 CompletedLoansChanged 2026-01-07T17:51:03.084097Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-07T17:51:03.091325Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:51:03.092262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, shardIdx# 72057594046644480:5 in# 27, next wakeup# 14.972403s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.093366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:51:03.094700Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037892 CompletedLoansChanged 2026-01-07T17:51:03.102623Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-07T17:51:03.102832Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-07T17:51:03.102976Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-07T17:51:03.103382Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:51:03.103466Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:51:03.107266Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:51:03.108325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:51:03.113348Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:51:03.113815Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:51:03.117108Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:51:03.117164Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-07T17:51:03.177808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, next wakeup# 14.886857s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.177914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037894, next wakeup# 14.886737s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.178244Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678463461639563:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037893 2026-01-07T17:51:03.178305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, shardIdx# 72057594046644480:6 in# 0, next wakeup# 14.886351s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-07T17:51:03.178318Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678463461639563:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037894 2026-01-07T17:51:03.178358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037894, shardIdx# 72057594046644480:7 in# 0, next wakeup# 14.886291s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 Table has 4 shards |94.7%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> TColumnShardTestSchema::ForgetAfterFail >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> THiveTest::TestSetDomainError [GOOD] >> THiveTest::TestTabletsStartingCounter >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId [GOOD] >> THiveTest::TestTabletsStartingCounter [GOOD] >> THiveTest::TestTabletsStartingCounterExternalBoot >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2026-01-07T17:50:13.528838Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678463749815700:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:13.528878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:13.873093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:13.917586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:13.917688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:13.960323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:13.995050Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:14.277818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:14.549541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:14.549562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:14.549585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:14.549658Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:14.577958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:15.076485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) Triggering split by load 2026-01-07T17:50:17.139147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686222:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.139222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.141580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686232:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.141669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.534338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.707263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686413:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.707377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.707680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686415:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.707727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.742625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-07T17:50:17.853533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686516:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.853651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.853871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686544:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.853883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686549:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.853933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686545:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.854021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686547:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.854160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686548:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.854282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686550:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.854321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686551:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.854336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686554:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.854607Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.862866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686627:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.862915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686632:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.862958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686636:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.862975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.863058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686638:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.863219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2026-01-07T17:50:17.863446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715666:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.863481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715666:1, at schemeshard: 72057594046644480 2026-01-07T17:50:17.863509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678480929686643:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 2026-01-07T17:50:37.851380Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:50:37.852180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:50:37.887997Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:50:37.888876Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:50:37.889402Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:50:37.889522Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 Table has 2 shards Loading new shards 2026-01-07T17:50:52.770068Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.2669 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037889 table: [/Root/Foo] 2026-01-07T17:50:57.770252Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.2669 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037889 table: [/Root/Foo] 2026-01-07T17:51:02.770388Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.2669 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037889 table: [/Root/Foo] Table has 2 shards Fast forward > 10h to trigger the merge 2026-01-07T17:51:07.871280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:560: Propose merge request: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976710694 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037890 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2026-01-07T17:51:07.871477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710694:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2026-01-07T17:51:07.872117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976710694:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d4\374\207\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000d4\374\207\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037890 ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 4 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2026-01-07T17:51:07.872167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710694:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:51:07.874809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710694:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-07T17:51:07.879289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-07T17:51:07.879367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 2 -> 3 2026-01-07T17:51:07.881331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710694:0, at schemeshard: 72057594046644480 2026-01-07T17:51:07.884320Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7592678695678174313:9743] 2026-01-07T17:51:07.897663Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-07T17:51:07.897743Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-07T17:51:07.897852Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-07T17:51:07.901054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710694 TabletId: 72075186224037891 2026-01-07T17:51:07.901092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 3 -> 131 2026-01-07T17:51:07.902437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:51:07.912657Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-01-07T17:51:07.912743Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:51:07.912781Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-07T17:51:07.912801Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2026-01-07T17:51:07.913012Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-07T17:51:07.914436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037890 2026-01-07T17:51:07.914690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037889 2026-01-07T17:51:07.914946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 131 -> 132 2026-01-07T17:51:07.916695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:51:07.916964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:51:07.917021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:51:07.917347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710694 2026-01-07T17:51:07.917387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710694 2026-01-07T17:51:07.917405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-07T17:51:07.921948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-01-07T17:51:07.921949Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-07T17:51:07.921954Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-07T17:51:07.922224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-07T17:51:07.922286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-01-07T17:51:07.922311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-01-07T17:51:07.922344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710694:0 2026-01-07T17:51:07.923568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710694:0 2026-01-07T17:51:07.924895Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:51:07.924989Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:51:07.925182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:51:07.925355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:51:07.928732Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-07T17:51:07.928773Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:51:07.929339Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-07T17:51:07.929421Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:51:07.929428Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-07T17:51:07.929445Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:51:07.929532Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:51:07.929602Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 |94.7%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-07T17:51:11.157717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:11.191105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:11.191351Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:11.199117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:11.199351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:11.199582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:11.199724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:11.199850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:11.199960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:11.200069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:11.200215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:11.200324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:11.200476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:11.200592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:11.200732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:11.200841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:11.229982Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:11.230423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:11.230507Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:11.230725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:11.230922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:11.231028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:11.231080Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:11.231204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:11.231280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:11.231331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:11.231365Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:11.231526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:11.231590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:11.231628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:11.231687Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:11.231797Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:11.231860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:11.231904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:11.231936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:11.232005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:11.232056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:11.232091Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:11.232152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:11.232215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:11.232248Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:11.232472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:11.232526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:11.232610Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:11.232740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:11.232784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:11.232835Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:11.232896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:11.232961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:11.232995Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:11.233038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:11.233079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:11.233110Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:11.233320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:11.233373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-07T17:51:12.337238Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136709997881184;op_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136916159175232;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-07T17:51:12.337322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136709997881184;op_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136916159175232;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:12.337354Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136709997881184;op_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767808272184;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136916159175232;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-07T17:51:12.337623Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:12.337705Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808272184 at tablet 9437184, mediator 0 2026-01-07T17:51:12.337734Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2026-01-07T17:51:12.338014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-07T17:51:12.338058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-07T17:51:12.338112Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 20, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-07T17:51:12.338174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=20; 2026-01-07T17:51:12.338226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=20; 2026-01-07T17:51:12.338393Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-07T17:51:12.338808Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=20; 2026-01-07T17:51:12.350508Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:12.351813Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136709997884096;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1767808272187;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:12.363943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767808272187;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136709997884096;op_tx=120:TX_KIND_SCHEMA;min=1767808272187;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:12.364020Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767808272187;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136709997884096;op_tx=120:TX_KIND_SCHEMA;min=1767808272187;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:12.365242Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136709997885888;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1767808272189;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:12.376871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767808272189;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136709997885888;op_tx=121:TX_KIND_SCHEMA;min=1767808272189;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:12.376960Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767808272189;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136709997885888;op_tx=121:TX_KIND_SCHEMA;min=1767808272189;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-07T17:51:12.377915Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136709997887680;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1767808272190;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-07T17:51:12.389617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767808272190;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136709997887680;op_tx=122:TX_KIND_SCHEMA;min=1767808272190;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-07T17:51:12.389676Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767808272190;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136709997887680;op_tx=122:TX_KIND_SCHEMA;min=1767808272190;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] Test command err: 2026-01-07T17:50:37.284231Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.310544Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.310814Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.311602Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.311966Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:37.313077Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:37.313141Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.314102Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:53:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.314144Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.314251Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.314405Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.327382Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.327454Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.329857Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:61:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.330091Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:62:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.330239Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:63:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.330370Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:64:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.330492Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:65:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.330622Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:66:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.330759Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:67:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.330791Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.330871Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:53:2077] 2026-01-07T17:50:37.330907Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:53:2077] 2026-01-07T17:50:37.330962Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.331010Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.331928Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.332002Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.335192Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.335344Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.335704Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.335937Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.336756Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:78:2076] ControllerId# 72057594037932033 2026-01-07T17:50:37.336789Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.336847Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.336980Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.346794Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.346849Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.348658Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:84:2080] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.348813Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:85:2081] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.348958Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:86:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.349114Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:87:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.349259Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:88:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.349386Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:89:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.349507Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:90:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.349531Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.349612Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:78:2076] 2026-01-07T17:50:37.349639Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:78:2076] 2026-01-07T17:50:37.349713Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.349757Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.350167Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.350378Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.364977Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:53:2077] 2026-01-07T17:50:37.365117Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.366826Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.366893Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:43:2064] 2026-01-07T17:50:37.366922Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:43:2064] 2026-01-07T17:50:37.367521Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.367975Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:78:2076] 2026-01-07T17:50:37.368022Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.368228Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.368288Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.368488Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.368644Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:97:2093] 2026-01-07T17:50:37.368669Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:97:2093] 2026-01-07T17:50:37.368740Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.368764Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.369072Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.369239Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.369320Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.369392Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:97:2093] 2026-01-07T17:50:37.369452Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageC ... age_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:13.128245Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:51:13.128309Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:51:13.128338Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:51:13.128384Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:13.128463Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:13.128500Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:13.128768Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [25:325:2304] 2026-01-07T17:51:13.128822Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [25:325:2304] 2026-01-07T17:51:13.128980Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:13.129102Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:13.129203Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:51:13.129254Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:51:13.129279Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:51:13.129324Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:13.129385Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:13.129411Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:13.129514Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2026-01-07T17:51:13.129612Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [25:325:2304] 2026-01-07T17:51:13.129658Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [25:325:2304] 2026-01-07T17:51:13.129919Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [25:328:2306] 2026-01-07T17:51:13.129969Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [25:328:2306] 2026-01-07T17:51:13.130053Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [25:274:2265] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:13.130112Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 25 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [25:274:2265] 2026-01-07T17:51:13.130174Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [25:328:2306] 2026-01-07T17:51:13.130253Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [25:328:2306] 2026-01-07T17:51:13.130308Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [25:328:2306] 2026-01-07T17:51:13.130368Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [25:328:2306] 2026-01-07T17:51:13.130477Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [25:328:2306] 2026-01-07T17:51:13.130648Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [25:328:2306] 2026-01-07T17:51:13.130707Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [25:328:2306] 2026-01-07T17:51:13.130750Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [25:328:2306] 2026-01-07T17:51:13.130808Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [25:328:2306] 2026-01-07T17:51:13.130860Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [25:328:2306] 2026-01-07T17:51:13.130922Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [25:327:2305] EventType# 268697624 2026-01-07T17:51:13.131090Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2026-01-07T17:51:13.131158Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:51:13.131382Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 144b alter 0b annex 0, ~{ 1, 16 } -{ }, 0 gb} 2026-01-07T17:51:13.131461Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:51:13.142670Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [aeed6b7f2709b4c0] bootstrap ActorId# [25:331:2309] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:126:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:51:13.142868Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [aeed6b7f2709b4c0] Id# [72057594037927937:2:5:0:0:126:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:51:13.142961Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:5:0:0:126:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:51:13.143048Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG33 2026-01-07T17:51:13.143114Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG32 2026-01-07T17:51:13.143303Z node 25 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [25:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:126:1] FDS# 126 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:51:13.144502Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:126:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80992 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-07T17:51:13.144652Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-07T17:51:13.144749Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:51:13.144972Z node 25 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-07T17:51:13.145143Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2026-01-07T17:51:13.145504Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [25:333:2311] 2026-01-07T17:51:13.145554Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [25:333:2311] 2026-01-07T17:51:13.145663Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [25:274:2265] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:13.145735Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 25 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [25:274:2265] 2026-01-07T17:51:13.145818Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [25:333:2311] 2026-01-07T17:51:13.145884Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [25:333:2311] 2026-01-07T17:51:13.145951Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [25:333:2311] 2026-01-07T17:51:13.146029Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [25:333:2311] 2026-01-07T17:51:13.146167Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [25:333:2311] 2026-01-07T17:51:13.146313Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [25:333:2311] 2026-01-07T17:51:13.146371Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [25:333:2311] 2026-01-07T17:51:13.146413Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [25:333:2311] 2026-01-07T17:51:13.146487Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [25:333:2311] 2026-01-07T17:51:13.146552Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [25:333:2311] 2026-01-07T17:51:13.146644Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [25:332:2310] EventType# 268830214 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> MoveTable::WithData+Reboot |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-07T17:51:13.042787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:13.062722Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:13.062967Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:13.068233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:13.068406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:13.068572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:13.068652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:13.068713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:13.068771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:13.068835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:13.068904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:13.069020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:13.069118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:13.069215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:13.069278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:13.069342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:13.087494Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:13.088017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:13.088081Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:13.088224Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:13.088380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:13.088454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:13.088491Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:13.088579Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:13.088637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:13.088678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:13.088702Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:13.088872Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:13.088922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:13.088970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:13.089001Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:13.089080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:13.089129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:13.089179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:13.089203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:13.089242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:13.089282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:13.089316Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:13.089352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:13.089387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:13.089411Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:13.089583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:13.089685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:13.089713Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:13.089819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:13.089862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:13.089887Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:13.089939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:13.089985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:13.090016Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:13.090055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:13.090086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:13.090118Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:13.090248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:13.090295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... og.cpp:841: tablet_id=9437184;request_tx=104:TX_KIND_SCHEMA;min=1767808274147;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136534523300896;op_tx=104:TX_KIND_SCHEMA;min=1767808274147;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=104:TX_KIND_SCHEMA;min=1767808274147;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136740663555264;method=TTxController::FinishProposeOnComplete;tx_id=104;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=104; 2026-01-07T17:51:15.298506Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-07T17:51:15.298634Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808274147 at tablet 9437184, mediator 0 2026-01-07T17:51:15.298681Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2026-01-07T17:51:15.298878Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-07T17:51:15.310678Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2026-01-07T17:51:15.311115Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808274147:max} readable: {1767808274147:max} at tablet 9437184 2026-01-07T17:51:15.311272Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:51:15.313912Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808274147:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:15.313983Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808274147:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:15.314481Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808274147:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:51:15.315864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808274147:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:51:15.354605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808274147:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:563:2575];trace_detailed=; 2026-01-07T17:51:15.355439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:51:15.355623Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:15.355887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:15.355992Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:15.356206Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:15.356317Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:15.356458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:15.356627Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:563:2575] finished for tablet 9437184 2026-01-07T17:51:15.356918Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:557:2569];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":2747101,"name":"_full_task","f":2747101,"d_finished":0,"c":0,"l":2749227,"d":2126},"events":[{"name":"bootstrap","f":2747327,"d_finished":1241,"c":1,"l":2748568,"d":1241},{"a":2748736,"name":"ack","f":2748736,"d_finished":0,"c":0,"l":2749227,"d":491},{"a":2748720,"name":"processing","f":2748720,"d_finished":0,"c":0,"l":2749227,"d":507},{"name":"ProduceResults","f":2748320,"d_finished":503,"c":2,"l":2749044,"d":503},{"a":2749049,"name":"Finish","f":2749049,"d_finished":0,"c":0,"l":2749227,"d":178}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:15.357015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:557:2569];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:15.357258Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:557:2569];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":2747101,"name":"_full_task","f":2747101,"d_finished":0,"c":0,"l":2749603,"d":2502},"events":[{"name":"bootstrap","f":2747327,"d_finished":1241,"c":1,"l":2748568,"d":1241},{"a":2748736,"name":"ack","f":2748736,"d_finished":0,"c":0,"l":2749603,"d":867},{"a":2748720,"name":"processing","f":2748720,"d_finished":0,"c":0,"l":2749603,"d":883},{"name":"ProduceResults","f":2748320,"d_finished":503,"c":2,"l":2749044,"d":503},{"a":2749049,"name":"Finish","f":2749049,"d_finished":0,"c":0,"l":2749603,"d":554}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:15.357328Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:15.315826Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:51:15.357389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:15.357529Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> MoveTable::WithUncomittedData >> MoveTable::WithData+Reboot [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestDeleteTabletError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData+Reboot [GOOD] Test command err: 2026-01-07T17:51:14.973568Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:14.995934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:14.996117Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:15.001599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:15.001765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:15.001929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:15.002014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:15.002082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:15.002137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:15.002227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:15.002298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:15.002382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:15.002452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:15.002517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:15.002587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:15.002650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:15.023137Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:15.023807Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:15.023875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:15.024071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:15.024251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:15.024323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:15.024355Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:15.024449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:15.024492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:15.024523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:15.024545Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:15.024697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:15.024742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:15.024768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:15.024800Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:15.024876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:15.024947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:15.025003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:15.025030Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:15.025065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:15.025089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:15.025111Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:15.025139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:15.025163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:15.025180Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:15.025322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:15.025404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:15.025428Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:15.025512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:15.025535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:15.025552Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:15.025612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:15.025654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:15.025682Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:15.025718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:15.025746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:15.025766Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:15.025852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:15.025890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e=data_format;batch_size=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-07T17:51:16.013720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:380:2381];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-07T17:51:16.013930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:16.014114Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:16.014332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:16.014554Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:16.014731Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:16.014935Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:16.015250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:387:2387] finished for tablet 9437184 2026-01-07T17:51:16.015781Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:380:2381];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":1397685,"name":"_full_task","f":1397685,"d_finished":0,"c":0,"l":1412810,"d":15125},"events":[{"name":"bootstrap","f":1398037,"d_finished":2572,"c":1,"l":1400609,"d":2572},{"a":1412024,"name":"ack","f":1410197,"d_finished":1672,"c":1,"l":1411869,"d":2458},{"a":1412007,"name":"processing","f":1400840,"d_finished":5200,"c":3,"l":1411872,"d":6003},{"name":"ProduceResults","f":1400074,"d_finished":2950,"c":6,"l":1412465,"d":2950},{"a":1412470,"name":"Finish","f":1412470,"d_finished":0,"c":0,"l":1412810,"d":340},{"name":"task_result","f":1400856,"d_finished":3462,"c":2,"l":1410047,"d":3462}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:16.015866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:380:2381];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:16.016358Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:380:2381];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":1397685,"name":"_full_task","f":1397685,"d_finished":0,"c":0,"l":1413403,"d":15718},"events":[{"name":"bootstrap","f":1398037,"d_finished":2572,"c":1,"l":1400609,"d":2572},{"a":1412024,"name":"ack","f":1410197,"d_finished":1672,"c":1,"l":1411869,"d":3051},{"a":1412007,"name":"processing","f":1400840,"d_finished":5200,"c":3,"l":1411872,"d":6596},{"name":"ProduceResults","f":1400074,"d_finished":2950,"c":6,"l":1412465,"d":2950},{"a":1412470,"name":"Finish","f":1412470,"d_finished":0,"c":0,"l":1413403,"d":933},{"name":"task_result","f":1400856,"d_finished":3462,"c":2,"l":1410047,"d":3462}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:16.016453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:15.928055Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-07T17:51:16.016505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:16.016685Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-07T17:51:16.017494Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-07T17:51:16.017907Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1767808275962:12} readable: {1767808275962:max} at tablet 9437184 2026-01-07T17:51:16.018066Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-07T17:51:16.018160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:335:2344];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1767808275962:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-07T17:51:16.018288Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:335:2344];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1767808275962:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> MoveTable::WithUncomittedData [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithUncomittedData [GOOD] Test command err: 2026-01-07T17:51:16.448243Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:16.469274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:16.469441Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:16.474647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:16.474811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:16.474961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:16.475051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:16.475117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:16.475173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:16.475252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:16.475330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:16.475397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:16.475477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:16.475542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:16.475620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:16.475699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:16.495519Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:16.496067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:16.496123Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:16.496262Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:16.496405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:16.496471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:16.496499Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:16.496569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:16.496611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:16.496645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:16.496672Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:16.496790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:16.496830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:16.496864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:16.496898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:16.496978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:16.497022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:16.497058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:16.497076Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:16.497111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:16.497136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:16.497155Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:16.497191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:16.497217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:16.497234Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:16.497374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:16.497503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:16.497529Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:16.497619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:16.497648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:16.497679Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:16.497732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:16.497761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:16.497786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:16.497829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:16.497860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:16.497889Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:16.497978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:16.498009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... "id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"1,2,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":20,"id":21},"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":25,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":25,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":25,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10","t":"Projection"},"w":250,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":25,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":25,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":25,"id":16},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":25,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":25,"id":10},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":25,"id":6},"22":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"resource_type","id":2},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":25,"id":12}}}; 2026-01-07T17:51:17.208120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1767808277422:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:51:17.250633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1767808277422:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:330:2342];trace_detailed=; 2026-01-07T17:51:17.251522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);; 2026-01-07T17:51:17.251699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-07T17:51:17.252048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:17.252175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:17.252417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:17.252552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:17.252692Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:17.252847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:330:2342] finished for tablet 9437184 2026-01-07T17:51:17.253200Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:324:2336];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1246274,"name":"_full_task","f":1246274,"d_finished":0,"c":0,"l":1248589,"d":2315},"events":[{"name":"bootstrap","f":1246468,"d_finished":1438,"c":1,"l":1247906,"d":1438},{"a":1248090,"name":"ack","f":1248090,"d_finished":0,"c":0,"l":1248589,"d":499},{"a":1248078,"name":"processing","f":1248078,"d_finished":0,"c":0,"l":1248589,"d":511},{"name":"ProduceResults","f":1247623,"d_finished":549,"c":2,"l":1248412,"d":549},{"a":1248415,"name":"Finish","f":1248415,"d_finished":0,"c":0,"l":1248589,"d":174}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:17.253253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:324:2336];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:17.253524Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:324:2336];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1246274,"name":"_full_task","f":1246274,"d_finished":0,"c":0,"l":1248983,"d":2709},"events":[{"name":"bootstrap","f":1246468,"d_finished":1438,"c":1,"l":1247906,"d":1438},{"a":1248090,"name":"ack","f":1248090,"d_finished":0,"c":0,"l":1248983,"d":893},{"a":1248078,"name":"processing","f":1248078,"d_finished":0,"c":0,"l":1248983,"d":905},{"name":"ProduceResults","f":1247623,"d_finished":549,"c":2,"l":1248412,"d":549},{"a":1248415,"name":"Finish","f":1248415,"d_finished":0,"c":0,"l":1248983,"d":568}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:17.253586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:17.208092Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:51:17.253614Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:17.253715Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn >> THiveTest::TestDeleteTabletError [GOOD] >> THiveTest::TestDeleteTabletWithRestartAndRetry >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> THiveTest::TestDeleteTabletWithRestartAndRetry [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> TColumnShardTestSchema::ColdCompactionSmoke >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestDownCounterDeleteNode >> THiveTest::TestBridgeDisconnectWithReboots [GOOD] >> THiveTest::TestBridgeDemotion >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> THiveTest::TestDownCounterDeleteNode [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestDownCounterDeleteNode [GOOD] Test command err: 2026-01-07T17:50:37.096157Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.117124Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.117339Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.117972Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.118260Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:37.119179Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:37.119240Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.120097Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:74:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.120134Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.120260Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.120393Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.135588Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.135644Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.137910Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.138093Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.138214Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.138341Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.138466Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.138624Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:87:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.138751Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:88:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.138777Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.138850Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:74:2077] 2026-01-07T17:50:37.138881Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:74:2077] 2026-01-07T17:50:37.138940Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.138993Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.139898Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.139981Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.142809Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.142954Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.143289Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.143494Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.144338Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:99:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.144368Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.144424Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.144513Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.152272Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.152316Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.153524Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:106:2081] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.153627Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:107:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.153699Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:108:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.153768Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:109:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.153856Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:110:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.153938Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:111:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.154086Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:112:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.154113Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.154163Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:99:2077] 2026-01-07T17:50:37.154184Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:99:2077] 2026-01-07T17:50:37.154228Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.154255Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.154956Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.171828Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:74:2077] 2026-01-07T17:50:37.172027Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.173666Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.174004Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:99:2077] 2026-01-07T17:50:37.174057Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.174338Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.174406Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.177490Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.177628Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.177978Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.178212Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.179360Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:127:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.179392Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.179454Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.179558Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.181437Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.181469Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.181748Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37. ... ID: 72057594037936131 Cookie: 2} 2026-01-07T17:51:21.841435Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.841533Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.841577Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.841634Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-01-07T17:51:21.841713Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2026-01-07T17:51:21.841747Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [30:199:2162] 2026-01-07T17:51:21.883947Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [31:81:2064] 2026-01-07T17:51:21.883989Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [31:81:2064] 2026-01-07T17:51:21.884051Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:21.884082Z node 31 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 31 selfDC 2 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:21.884168Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:21.884483Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-07T17:51:21.884542Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-07T17:51:21.884582Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-07T17:51:21.884745Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.884818Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.884847Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.884909Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2026-01-07T17:51:21.884958Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [31:81:2064] 2026-01-07T17:51:21.884980Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936129] schedule retry [31:81:2064] 2026-01-07T17:51:21.895351Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [30:56:2064] 2026-01-07T17:51:21.895414Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [30:56:2064] 2026-01-07T17:51:21.895539Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:21.895587Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 30 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:21.895738Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:21.895848Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-07T17:51:21.895915Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-07T17:51:21.895953Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-07T17:51:21.896017Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.896112Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.896158Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.896219Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2026-01-07T17:51:21.896273Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [30:56:2064] 2026-01-07T17:51:21.896309Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936129] schedule retry [30:56:2064] 2026-01-07T17:51:21.970188Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936131] client retry [30:199:2162] 2026-01-07T17:51:21.970248Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936131] lookup [30:199:2162] 2026-01-07T17:51:21.970327Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936131 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:21.970373Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 30 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:21.970587Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:21.970700Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-07T17:51:21.970770Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-07T17:51:21.970825Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-07T17:51:21.970873Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.970927Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.970954Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:51:21.971025Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-01-07T17:51:21.971097Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2026-01-07T17:51:21.971140Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [30:199:2162] 2026-01-07T17:51:22.035757Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} queued, type NKikimr::NHive::TTxDeleteNode 2026-01-07T17:51:22.035864Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:51:22.036127Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} hope 1 -> done Change{10, redo 64b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-07T17:51:22.036216Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:51:22.036980Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [30:538:2318] 2026-01-07T17:51:22.037031Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [30:538:2318] 2026-01-07T17:51:22.037115Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [30:452:2266] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:22.037175Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 30 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [30:452:2266] 2026-01-07T17:51:22.037235Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [30:538:2318] 2026-01-07T17:51:22.037286Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [30:538:2318] 2026-01-07T17:51:22.037339Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [30:538:2318] 2026-01-07T17:51:22.037412Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [30:538:2318] 2026-01-07T17:51:22.037539Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [30:538:2318] 2026-01-07T17:51:22.037653Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [30:538:2318] 2026-01-07T17:51:22.037696Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [30:538:2318] 2026-01-07T17:51:22.037736Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [30:538:2318] 2026-01-07T17:51:22.037789Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [30:538:2318] 2026-01-07T17:51:22.037835Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [30:538:2318] 2026-01-07T17:51:22.037905Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [30:537:2317] EventType# 268830214 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2026-01-07T17:51:22.842828Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:23.007013Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:51:23.008823Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:51:23.009055Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:51:23.009143Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:51:23.029742Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:51:23.030502Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 2 2026-01-07T17:51:23.030611Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-07T17:51:23.030657Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 2 2026-01-07T17:51:23.030738Z node 1 :PQ_READ_PROXY INFO: caching_service.cpp:297: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2026-01-07T17:51:23.030805Z node 1 :PQ_READ_PROXY ALERT: caching_service.cpp:159: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2026-01-07T17:51:23.030853Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-07T17:51:23.030980Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> THiveTest::TestBridgeDemotion [GOOD] >> THiveTest::TestBridgeBalance >> TPQCachingProxyTest::TestPublishAndForget >> TPQCachingProxyTest::MultipleSessions >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> TPQCachingProxyTest::MultipleSessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2026-01-07T17:51:25.129595Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:25.177774Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:51:25.177835Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:51:25.177925Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:51:25.177976Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:51:25.190569Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:51:25.190664Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-07T17:51:25.190750Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-07T17:51:25.190782Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-07T17:51:25.190873Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2026-01-07T17:51:25.277619Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:25.326535Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:51:25.326608Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:51:25.326714Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:51:25.326784Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:51:25.341485Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:51:25.341564Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-07T17:51:25.341620Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-07T17:51:25.341666Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 2 for session: session1 2026-01-07T17:51:25.341700Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-07T17:51:25.341748Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 2 for session session1, Generation: 1 2026-01-07T17:51:25.341793Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 2 2026-01-07T17:51:25.341850Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 3 for session: session2 2026-01-07T17:51:25.341886Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 3 for session session2, Generation: 2 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession >> TPQCachingProxyTest::OutdatedSession [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2026-01-07T17:51:27.509659Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:27.556100Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:51:27.556157Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:51:27.556244Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:51:27.556296Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:51:27.567309Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:51:27.567384Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-07T17:51:27.567443Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-07T17:51:27.567472Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-07T17:51:27.567531Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:293: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets >> THiveTest::TestBridgeBalance [GOOD] >> THiveTest::TestBridgeFollowers >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2026-01-07T17:50:13.478983Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678461859169107:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:13.482520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:13.831489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:13.894237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:13.894347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:13.906221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:14.067621Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:14.085030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:14.495428Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:14.546343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:14.546374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:14.546403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:14.546480Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:15.092658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:17.137239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479039039635:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.137339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.137642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479039039645:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.137677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.534062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.725713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479039039822:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.725801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.725983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479039039827:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.726039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678479039039828:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.726086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.734500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:50:17.752110Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678479039039831:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:50:17.841585Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678479039039907:3097] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:50:18.476871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678461859169107:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:18.476959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:50:28.801969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:28.801993Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:56.870228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976717661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-07T17:50:57.617709Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.8802 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-07T17:51:02.618381Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 4.8752 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-07T17:51:07.618863Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 4.8752 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-07T17:51:12.619225Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 4.8752 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-07T17:51:13.906499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2026-01-07T17:51:13.906603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2026-01-07T17:51:13.906643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 2026-01-07T17:51:17.619655Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 12.3011 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-07T17:51:17.720622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:591: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 138782, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 123011, cpuUsageThreshold: 50000) 2026-01-07T17:51:17.720651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:694: Requesting full tablet stats 72075186224037888 to split it 2026-01-07T17:51:17.721059Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2279: Started key access sampling at datashard: 72075186224037888 2026-01-07T17:51:17.721214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 138782 row count 2000 buckets 9 ready 1 2026-01-07T17:51:17.721273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram Process detailed partition statistics at tablet 72057594046644480 from datashard 72075186224037888 from follower ID 0 for pathId [OwnerId: 72057594046644480, LocalPathId: 38], state Ready, data size buckets 9, key access buckets 0 2026-01-07T17:51:22.619912Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 12.3011 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-07T17:51:22.720746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:591: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 138782, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 123011, cpuUsageThreshold: 50000) 2026-01-07T17:51:22.720773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:694: Requesting full tablet stats 72075186224037888 to split it 2026-01-07T17:51:22.724127Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2281: Extended key access sampling at datashard: 72075186224037888 2026-01-07T17:51:22.724270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 138782 row count 2000 buckets 9 ready 1 2026-01-07T17:51:22.724307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram ... 1474976710693 TabletId: 72057594046644480 2026-01-07T17:51:22.725129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710693:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000Z3\364x\000\000\000\200" } SchemeshardId: 72057594046644480 2026-01-07T17:51:22.725626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976710693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000Z3\364x\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000Z3\364x\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000Z3\364x\000\000\000\200" } SchemeshardId: 72057594046644480 2026-01-07T17:51:22.725654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:51:22.727967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710693:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-07T17:51:22.731813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-07T17:51:22.731960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-07T17:51:22.732007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710693:0 2 -> 3 2026-01-07T17:51:22.733266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2026-01-07T17:51:22.739091Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7592678758212094244:5965] 2026-01-07T17:51:22.742047Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:7592678758212094246:5586] 2026-01-07T17:51:22.757017Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T17:51:22.757093Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T17:51:22.757236Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:51:22.757341Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T17:51:22.757408Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-07T17:51:22.757514Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-07T17:51:22.760807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710693 TabletId: 72075186224037890 2026-01-07T17:51:22.761092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710693 TabletId: 72075186224037889 2026-01-07T17:51:22.761121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710693:0 3 -> 131 2026-01-07T17:51:22.762280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:51:22.774733Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-01-07T17:51:22.774844Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:51:22.774885Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-07T17:51:22.774910Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-07T17:51:22.774911Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2026-01-07T17:51:22.774971Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:51:22.774996Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-07T17:51:22.775012Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-07T17:51:22.775206Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T17:51:22.775246Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-07T17:51:22.776212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710693 TabletId: 72075186224037888 2026-01-07T17:51:22.776448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710693:0 131 -> 132 2026-01-07T17:51:22.777568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T17:51:22.777749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T17:51:22.777795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T17:51:22.779677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710693 2026-01-07T17:51:22.779720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710693 2026-01-07T17:51:22.779739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-01-07T17:51:22.782724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-07T17:51:22.782802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-07T17:51:22.782816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-07T17:51:22.782849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-01-07T17:51:22.783769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 2026-01-07T17:51:22.876852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:51:22.877016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 14.999805s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-07T17:51:22.877381Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678461859169438:2191] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-01-07T17:51:22.877711Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7592678461859169438:2191] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-01-07T17:51:22.887390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 10, next wakeup# 14.989440s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-07T17:51:22.889321Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2026-01-07T17:51:22.890628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 13, next wakeup# 14.986198s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-07T17:51:22.891923Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2026-01-07T17:51:22.896115Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-07T17:51:22.898576Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:51:22.901809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:51:22.904877Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-07T17:51:22.905050Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:51:22.905959Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T17:51:22.906056Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 Table has 2 shards |94.8%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> THiveTest::TestBridgeFollowers [GOOD] >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-07T17:51:01.246469Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:51:01.252420Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:51:01.252992Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.282504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.282808Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.289796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.290048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.290327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.290476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.290574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.290662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.290729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.290823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.290921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.291022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.291109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.291244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.291372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:01.313962Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:51:01.320404Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:01.321374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:01.321441Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:01.321658Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.321850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:01.321940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:01.321990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:01.322128Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:01.322211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:01.322261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:01.322294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:01.322524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.322616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:01.322659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:01.322689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:01.322792Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:01.322852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:01.322895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:01.322925Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:01.322972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:01.323014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:01.323046Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:01.323103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:01.323171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:01.323268Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:01.323529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:01.323584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:01.323618Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:01.323750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:01.323798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.323831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.323879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:01.323916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:01.323946Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:01.324002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:01.324040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... esult=0;count=0;finished=1; 2026-01-07T17:51:30.998531Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:51:30.998572Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:51:30.998714Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:30.998864Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:30.998900Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:30.999041Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-07T17:51:30.999090Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-07T17:51:30.999302Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-07T17:51:30.999447Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:30.999546Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:30.999672Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:30.999785Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:30.999853Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:30.999915Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.000130Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2623] finished for tablet 9437184 2026-01-07T17:51:31.000517Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":30226586,"name":"_full_task","f":30226586,"d_finished":0,"c":0,"l":30235728,"d":9142},"events":[{"name":"bootstrap","f":30226777,"d_finished":1035,"c":1,"l":30227812,"d":1035},{"a":30235323,"name":"ack","f":30232903,"d_finished":2241,"c":2,"l":30235245,"d":2646},{"a":30235316,"name":"processing","f":30227936,"d_finished":4527,"c":5,"l":30235248,"d":4939},{"name":"ProduceResults","f":30227432,"d_finished":2987,"c":9,"l":30235475,"d":2987},{"a":30235479,"name":"Finish","f":30235479,"d_finished":0,"c":0,"l":30235728,"d":249},{"name":"task_result","f":30227951,"d_finished":2208,"c":3,"l":30232758,"d":2208}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.000605Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:31.001007Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":30226586,"name":"_full_task","f":30226586,"d_finished":0,"c":0,"l":30236192,"d":9606},"events":[{"name":"bootstrap","f":30226777,"d_finished":1035,"c":1,"l":30227812,"d":1035},{"a":30235323,"name":"ack","f":30232903,"d_finished":2241,"c":2,"l":30235245,"d":3110},{"a":30235316,"name":"processing","f":30227936,"d_finished":4527,"c":5,"l":30235248,"d":5403},{"name":"ProduceResults","f":30227432,"d_finished":2987,"c":9,"l":30235475,"d":2987},{"a":30235479,"name":"Finish","f":30235479,"d_finished":0,"c":0,"l":30236192,"d":713},{"name":"task_result","f":30227951,"d_finished":2208,"c":3,"l":30232758,"d":2208}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.001089Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:30.989384Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-07T17:51:31.001142Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:31.001281Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestBridgeFollowers [GOOD] Test command err: 2026-01-07T17:50:48.201501Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:48.235047Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:48.235325Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:48.236165Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:48.236558Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:48.237755Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:48.237831Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:48.238853Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-07T17:50:48.238897Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:48.239025Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:48.239194Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:48.241669Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-07T17:50:48.241716Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-07T17:50:48.241780Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:48.241837Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:48.242118Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:48.242279Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:48.254817Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:48.254886Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:48.257457Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:48.257657Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:48.257797Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:48.257963Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:48.258120Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:48.258258Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:48.258387Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:48.258420Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:48.258593Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-07T17:50:48.258673Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:48.274893Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:48.274962Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:48.275131Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:48.277403Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:48.277483Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:50:48.277541Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:50:48.277576Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:50:48.277624Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:48.277870Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-07T17:50:48.277925Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-07T17:50:48.278093Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-07T17:50:48.278178Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-07T17:50:48.278213Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-07T17:50:48.278284Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-07T17:50:48.289219Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-07T17:50:48.289570Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:48.289723Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:48.289907Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2026-01-07T17:50:48.289939Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:51:2091] 2026-01-07T17:50:48.290065Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:48.290144Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:51:2091] 2026-01-07T17:50:48.290551Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-07T17:50:48.290603Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-07T17:50:48.290711Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:48.290872Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-07T17:50:48.291047Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-07T17:50:48.291095Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-07T17:50:48.291975Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:48.292236Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:48.292437Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-07T17:50:48.292849Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-07T17:50:48.292910Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-07T17:50:48.293002Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:48.303197Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-07T17:50:48.303284Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-07T17:50:48.303338Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-07T17:50:48.303442Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-07T17:50:48.307749Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-07T17:50:48.307824Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 202 ... tatus: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.377784Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.377827Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:641:2158] followers: 2 2026-01-07T17:51:30.377850Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.377877Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:892:2133] 2026-01-07T17:51:30.377896Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:892:2133] 2026-01-07T17:51:30.378093Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [49:894:2134] 2026-01-07T17:51:30.378113Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [49:894:2134] 2026-01-07T17:51:30.378149Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [48:641:2158] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-07T17:51:30.378170Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.378233Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:30.378422Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:51:30.378452Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:51:30.378478Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:51:30.378618Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.378667Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.378711Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:641:2158] followers: 2 2026-01-07T17:51:30.378735Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.378764Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:894:2134] 2026-01-07T17:51:30.378780Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:894:2134] 2026-01-07T17:51:30.378905Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [49:896:2135] 2026-01-07T17:51:30.378922Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [49:896:2135] 2026-01-07T17:51:30.378953Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [48:641:2158] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-07T17:51:30.378976Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.379090Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:30.379259Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:51:30.379291Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:51:30.379319Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:51:30.379471Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.379528Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.379573Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:641:2158] followers: 2 2026-01-07T17:51:30.379596Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.379637Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:896:2135] 2026-01-07T17:51:30.379655Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:896:2135] 2026-01-07T17:51:30.379777Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [49:898:2136] 2026-01-07T17:51:30.379805Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [49:898:2136] 2026-01-07T17:51:30.379855Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [48:641:2158] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-07T17:51:30.379876Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.379933Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:30.380113Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:51:30.380142Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:51:30.380166Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:51:30.380323Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.380379Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.380427Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:641:2158] followers: 2 2026-01-07T17:51:30.380449Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.380478Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:898:2136] 2026-01-07T17:51:30.380495Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:898:2136] 2026-01-07T17:51:30.380639Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [49:900:2137] 2026-01-07T17:51:30.380657Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [49:900:2137] 2026-01-07T17:51:30.380695Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [48:641:2158] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-07T17:51:30.380716Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.380777Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:30.380994Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:51:30.381026Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:51:30.381054Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:51:30.381204Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.381260Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:641:2158] CurrentLeaderTablet: [48:671:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:51:30.381317Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:641:2158] followers: 2 2026-01-07T17:51:30.381344Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-07T17:51:30.381395Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:900:2137] 2026-01-07T17:51:30.381413Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:900:2137] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-07T17:51:00.983981Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:51:01.012465Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:51:01.020386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.054849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.055792Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.077078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.077396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.077694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.077852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.077972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.078085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.078245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.078384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.078527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.078644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.078793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.078911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.079052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:01.082969Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:51:01.105695Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:01.105961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:01.106048Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:01.106303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.108617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:01.108766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:01.108859Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:01.109019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:01.109148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:01.109231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:01.109276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:01.109484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.109571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:01.109614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:01.109647Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:01.109786Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:01.109847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:01.109932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:01.109972Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:01.110035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:01.110086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:01.110119Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:01.110193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:01.110240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:01.110283Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:01.110535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:01.110599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:01.110640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:01.110793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:01.110861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.110896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.110955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:01.111001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:01.111034Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:01.111081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:01.111128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... d=1; 2026-01-07T17:51:31.801546Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:51:31.801592Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:51:31.801746Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:31.801894Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.801932Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:31.802080Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-07T17:51:31.802129Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-07T17:51:31.802371Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:719:2700];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-07T17:51:31.802524Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.802627Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.802760Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.802880Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:31.802944Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.803005Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.803226Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:720:2701] finished for tablet 9437184 2026-01-07T17:51:31.803643Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:719:2700];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":31823729,"name":"_full_task","f":31823729,"d_finished":0,"c":0,"l":31833553,"d":9824},"events":[{"name":"bootstrap","f":31823964,"d_finished":1147,"c":1,"l":31825111,"d":1147},{"a":31833144,"name":"ack","f":31830759,"d_finished":2192,"c":2,"l":31833063,"d":2601},{"a":31833135,"name":"processing","f":31825246,"d_finished":4711,"c":5,"l":31833066,"d":5129},{"name":"ProduceResults","f":31824673,"d_finished":2978,"c":9,"l":31833292,"d":2978},{"a":31833294,"name":"Finish","f":31833294,"d_finished":0,"c":0,"l":31833553,"d":259},{"name":"task_result","f":31825263,"d_finished":2441,"c":3,"l":31830597,"d":2441}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.803725Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:719:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:31.804131Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:719:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":31823729,"name":"_full_task","f":31823729,"d_finished":0,"c":0,"l":31834043,"d":10314},"events":[{"name":"bootstrap","f":31823964,"d_finished":1147,"c":1,"l":31825111,"d":1147},{"a":31833144,"name":"ack","f":31830759,"d_finished":2192,"c":2,"l":31833063,"d":3091},{"a":31833135,"name":"processing","f":31825246,"d_finished":4711,"c":5,"l":31833066,"d":5619},{"name":"ProduceResults","f":31824673,"d_finished":2978,"c":9,"l":31833292,"d":2978},{"a":31833294,"name":"Finish","f":31833294,"d_finished":0,"c":0,"l":31834043,"d":749},{"name":"task_result","f":31825263,"d_finished":2441,"c":3,"l":31830597,"d":2441}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:31.804187Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:31.791687Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-07T17:51:31.804243Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:31.804390Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TPQCachingProxyTest::TestDeregister >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner >> TPQCachingProxyTest::TestDeregister [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2026-01-07T17:51:33.107714Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:33.154538Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:51:33.154605Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:51:33.154700Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:51:33.154754Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:51:33.166586Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:51:33.166670Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-07T17:51:33.166707Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 1 2026-01-07T17:51:33.166798Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: session1 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.8%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestFollowerCompatability1 |94.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |94.8%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |94.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> TS3WrapperTests::CompleteUnknownUpload >> TS3WrapperTests::HeadObject |94.8%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> TS3WrapperTests::UploadUnknownPart >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TS3WrapperTests::HeadObject [GOOD] >> KqpSinkTx::TIsolationSettingTest+IsOlap-UsePragma [GOOD] >> KqpSinkTx::TIsolationSettingTest-IsOlap+UsePragma >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] >> TS3WrapperTests::GetUnknownObject ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2026-01-07T17:51:34.833507Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 2E986F05-5638-42D1-92AF-A166800B5555, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:25694 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 208F3F74-2A83-4F52-8DD2-A8306F4852FE amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-07T17:51:34.865503Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 2E986F05-5638-42D1-92AF-A166800B5555, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-07T17:51:34.865791Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 1D7FD424-603E-4D21-AA02-E59FC2CC4B3E, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:25694 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F032F409-EC9D-494B-8702-0F1A5D4FD507 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-07T17:51:34.871504Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 1D7FD424-603E-4D21-AA02-E59FC2CC4B3E, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:32.253158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:32.253292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.253357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:32.253412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:32.253458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:32.253488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:32.253554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:32.254859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:32.255807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:32.257108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:32.358021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:32.358084Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:32.376639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:32.377021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:32.380747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:32.390538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:32.394035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:32.396060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:32.400822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:32.411019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.412250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:32.421873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:32.421974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:32.422148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:32.422222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:32.422355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:32.422628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:32.600589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.602736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.602904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.602998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:32.603687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 103 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 132 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-07T17:51:33.869355Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:51:33.869434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0103 2026-01-07T17:51:33.869577Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:51:33.869619Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-07T17:51:33.911159Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:33.911236Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:33.911266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-07T17:51:33.911338Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-07T17:51:33.911368Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-07T17:51:33.911475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-07T17:51:33.911536Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-07T17:51:33.911565Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-07T17:51:33.911660Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-07T17:51:33.911761Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:51:33.922234Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:33.922309Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:33.922332Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:51:33.953458Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1122:3085]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T17:51:33.953727Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 39 2026-01-07T17:51:33.954049Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1122:3085], Recipient [3:129:2153]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 39 Generation: 2 Round: 6 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 34 Memory: 124368 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 302 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-07T17:51:33.954087Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-07T17:51:33.954141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0034 2026-01-07T17:51:33.954249Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-07T17:51:33.954300Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-07T17:51:33.995687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:92: Operation queue wakeup 2026-01-07T17:51:33.995777Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:65: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-07T17:51:33.995815Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-07T17:51:33.995887Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 3 seconds 2026-01-07T17:51:33.995913Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2026-01-07T17:51:33.996019Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:33.996051Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:33.996081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-07T17:51:33.996136Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-07T17:51:33.996159Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-07T17:51:33.996239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 13940 row count 100 2026-01-07T17:51:33.996289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2026-01-07T17:51:33.996324Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2026-01-07T17:51:33.996398Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:582: Want to split tablet 72075186233409547 by size: split by size (shardCount: 1, maxShardCount: 2, shardSize: 13940, maxShardSize: 1) 2026-01-07T17:51:33.996421Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:669: Postpone split tablet 72075186233409547 because it has borrow parts, enqueue compact them first 2026-01-07T17:51:33.996449Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2026-01-07T17:51:33.996516Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-07T17:51:34.007030Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:34.007089Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-07T17:51:34.007112Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-07T17:51:34.214672Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:34.214744Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T17:51:34.214835Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:129:2153], Recipient [3:129:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T17:51:34.214858Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |94.8%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2026-01-07T17:51:34.849927Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 57FA3A4A-64E2-4A0B-9289-F1808C60D6B3, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:10214 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A56ABC5F-0098-4B22-89D8-D1E5D4D972DC amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2026-01-07T17:51:34.863386Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 57FA3A4A-64E2-4A0B-9289-F1808C60D6B3, response# ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2026-01-07T17:51:34.840077Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# AA875C35-ACF8-439F-8F5F-40DC8DFC4C70, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:30195 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3BABBE59-556E-4F7A-979B-CC9B1DCB17A4 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2026-01-07T17:51:34.863454Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# AA875C35-ACF8-439F-8F5F-40DC8DFC4C70, response# |94.8%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |94.8%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |94.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |94.8%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2026-01-07T17:51:35.348555Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# F9E693F2-44F3-4069-9124-E718BCF2D668, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:61744 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 808A9265-A800-4389-BB80-B9AF819E6A52 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-07T17:51:35.353458Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# F9E693F2-44F3-4069-9124-E718BCF2D668, response# No response body. |94.8%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject |94.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] >> TS3WrapperTests::HeadUnknownObject [GOOD] |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] |94.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |94.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] >> THiveTest::TestFollowerCompatability1 [GOOD] >> THiveTest::TestFollowerCompatability2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-07T17:51:08.354119Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:51:08.358747Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:51:08.359154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:08.389320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:08.389555Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:08.396867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:08.397095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:08.397364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:08.397476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:08.397567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:08.397665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:08.397764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:08.397882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:08.397989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:08.398079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.398205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:08.398312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:08.398425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:08.423381Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:51:08.427064Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:08.427614Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:08.427669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:08.427879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:08.428028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:08.428125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:08.428170Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:08.428272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:08.428329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:08.428368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:08.428397Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:08.428572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:08.428636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:08.428674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:08.428707Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:08.428793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:08.428840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:08.428884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:08.428910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:08.428981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:08.429028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:08.429085Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:08.429133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:08.429169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:08.429246Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:08.429536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:08.429588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:08.429618Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:08.429729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:08.429781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.429811Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.429858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:08.429894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:08.429920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:08.429957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:08.429988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... s;result=1;count=1000;finished=1; 2026-01-07T17:51:36.250955Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:51:36.250980Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:51:36.251154Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:36.251344Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.251395Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:36.251578Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-07T17:51:36.251663Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-07T17:51:36.251934Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:484:2488];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-07T17:51:36.252115Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.252222Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.252342Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.252493Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:36.252654Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.252831Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.253185Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:485:2489] finished for tablet 9437184 2026-01-07T17:51:36.253685Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:484:2488];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.004},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.006}],"full":{"a":28290033,"name":"_full_task","f":28290033,"d_finished":0,"c":0,"l":28296662,"d":6629},"events":[{"name":"bootstrap","f":28290233,"d_finished":1001,"c":1,"l":28291234,"d":1001},{"a":28295879,"name":"ack","f":28294535,"d_finished":1244,"c":1,"l":28295779,"d":2027},{"a":28295864,"name":"processing","f":28291353,"d_finished":2725,"c":3,"l":28295782,"d":3523},{"name":"ProduceResults","f":28290832,"d_finished":2171,"c":6,"l":28296283,"d":2171},{"a":28296290,"name":"Finish","f":28296290,"d_finished":0,"c":0,"l":28296662,"d":372},{"name":"task_result","f":28291368,"d_finished":1435,"c":2,"l":28294399,"d":1435}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.253753Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:484:2488];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:36.254230Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:484:2488];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.004},{"events":["l_ProduceResults","f_Finish"],"t":0.006},{"events":["l_ack","l_processing","l_Finish"],"t":0.007}],"full":{"a":28290033,"name":"_full_task","f":28290033,"d_finished":0,"c":0,"l":28297191,"d":7158},"events":[{"name":"bootstrap","f":28290233,"d_finished":1001,"c":1,"l":28291234,"d":1001},{"a":28295879,"name":"ack","f":28294535,"d_finished":1244,"c":1,"l":28295779,"d":2556},{"a":28295864,"name":"processing","f":28291353,"d_finished":2725,"c":3,"l":28295782,"d":4052},{"name":"ProduceResults","f":28290832,"d_finished":2171,"c":6,"l":28296283,"d":2171},{"a":28296290,"name":"Finish","f":28296290,"d_finished":0,"c":0,"l":28297191,"d":901},{"name":"task_result","f":28291368,"d_finished":1435,"c":2,"l":28294399,"d":1435}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.254341Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:36.245476Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-07T17:51:36.254413Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:36.254573Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2026-01-07T17:51:36.301216Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 7D36C15F-5613-4E8F-B72A-1421BDFA6DB0, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:17758 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E1427464-06FC-4FE9-9839-D73D07E06B85 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-07T17:51:36.305643Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 7D36C15F-5613-4E8F-B72A-1421BDFA6DB0, response# No response body. >> TS3WrapperTests::MultipartUpload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-07T17:51:08.746501Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:51:08.750672Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:51:08.751117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:08.780964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:08.781201Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:08.788287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:08.788551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:08.788795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:08.788912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:08.789032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:08.789128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:08.789224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:08.789375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:08.789488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:08.789586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.789705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:08.789811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:08.789945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:08.814942Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:51:08.818530Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:08.819153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:08.819230Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:08.819433Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:08.819610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:08.819698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:08.819739Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:08.819837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:08.819898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:08.819937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:08.819965Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:08.820156Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:08.820232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:08.820274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:08.820302Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:08.820384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:08.820436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:08.820473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:08.820503Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:08.820559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:08.820617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:08.820655Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:08.820701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:08.820740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:08.820826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:08.821082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:08.821130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:08.821158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:08.821294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:08.821345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.821381Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.821427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:08.821485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:08.821516Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:08.821557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:08.821591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... inished=1; 2026-01-07T17:51:36.257145Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:51:36.257188Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:51:36.257451Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:36.257650Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.257716Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:36.257910Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-07T17:51:36.257978Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-07T17:51:36.258272Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:588:2568];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-07T17:51:36.258511Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.258635Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.258802Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.258965Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:36.259130Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.259306Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.259605Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:589:2569] finished for tablet 9437184 2026-01-07T17:51:36.260071Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:588:2568];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":27961479,"name":"_full_task","f":27961479,"d_finished":0,"c":0,"l":27969576,"d":8097},"events":[{"name":"bootstrap","f":27961757,"d_finished":1293,"c":1,"l":27963050,"d":1293},{"a":27968843,"name":"ack","f":27967328,"d_finished":1405,"c":1,"l":27968733,"d":2138},{"a":27968825,"name":"processing","f":27963176,"d_finished":3417,"c":3,"l":27968735,"d":4168},{"name":"ProduceResults","f":27962554,"d_finished":2501,"c":6,"l":27969246,"d":2501},{"a":27969251,"name":"Finish","f":27969251,"d_finished":0,"c":0,"l":27969576,"d":325},{"name":"task_result","f":27963193,"d_finished":1954,"c":2,"l":27967115,"d":1954}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.260151Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:588:2568];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:36.260654Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:588:2568];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":27961479,"name":"_full_task","f":27961479,"d_finished":0,"c":0,"l":27970096,"d":8617},"events":[{"name":"bootstrap","f":27961757,"d_finished":1293,"c":1,"l":27963050,"d":1293},{"a":27968843,"name":"ack","f":27967328,"d_finished":1405,"c":1,"l":27968733,"d":2658},{"a":27968825,"name":"processing","f":27963176,"d_finished":3417,"c":3,"l":27968735,"d":4688},{"name":"ProduceResults","f":27962554,"d_finished":2501,"c":6,"l":27969246,"d":2501},{"a":27969251,"name":"Finish","f":27969251,"d_finished":0,"c":0,"l":27970096,"d":845},{"name":"task_result","f":27963193,"d_finished":1954,"c":2,"l":27967115,"d":1954}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.260739Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:36.249925Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-07T17:51:36.260784Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:36.260966Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TS3WrapperTests::CopyPartUpload >> TS3WrapperTests::MultipartUpload [GOOD] |94.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] >> TS3WrapperTests::PutObject ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-07T17:51:08.643766Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:51:08.648319Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:51:08.648803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:08.681800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:08.682078Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:08.690034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:08.690295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:08.690571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:08.690704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:08.690809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:08.690900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:08.690993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:08.691110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:08.691212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:08.691326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.691434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:08.691549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:08.691661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:08.716806Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:51:08.720632Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:08.721217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:08.721277Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:08.721496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:08.721649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:08.721733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:08.721804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:08.721931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:08.722007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:08.722056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:08.722095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:08.722277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:08.722347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:08.722377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:08.722398Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:08.722467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:08.722511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:08.722546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:08.722567Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:08.722617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:08.722646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:08.722667Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:08.722725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:08.722758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:08.722822Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:08.723020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:08.723076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:08.723116Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:08.723267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:08.723324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.723359Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:08.723406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:08.723452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:08.723489Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:08.723531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:08.723567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:36.452035Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:51:36.452074Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:51:36.452216Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:36.452387Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.452428Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:36.452574Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-07T17:51:36.452636Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-07T17:51:36.452840Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-07T17:51:36.453013Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.453135Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.453278Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.453403Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:36.453477Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.453536Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.453768Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2623] finished for tablet 9437184 2026-01-07T17:51:36.454157Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":28296206,"name":"_full_task","f":28296206,"d_finished":0,"c":0,"l":28305962,"d":9756},"events":[{"name":"bootstrap","f":28296391,"d_finished":1126,"c":1,"l":28297517,"d":1126},{"a":28305531,"name":"ack","f":28302978,"d_finished":2361,"c":2,"l":28305441,"d":2792},{"a":28305522,"name":"processing","f":28297647,"d_finished":4770,"c":5,"l":28305444,"d":5210},{"name":"ProduceResults","f":28297053,"d_finished":3180,"c":9,"l":28305687,"d":3180},{"a":28305689,"name":"Finish","f":28305689,"d_finished":0,"c":0,"l":28305962,"d":273},{"name":"task_result","f":28297665,"d_finished":2319,"c":3,"l":28302811,"d":2319}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.454234Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:36.454627Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":28296206,"name":"_full_task","f":28296206,"d_finished":0,"c":0,"l":28306410,"d":10204},"events":[{"name":"bootstrap","f":28296391,"d_finished":1126,"c":1,"l":28297517,"d":1126},{"a":28305531,"name":"ack","f":28302978,"d_finished":2361,"c":2,"l":28305441,"d":3240},{"a":28305522,"name":"processing","f":28297647,"d_finished":4770,"c":5,"l":28305444,"d":5658},{"name":"ProduceResults","f":28297053,"d_finished":3180,"c":9,"l":28305687,"d":3180},{"a":28305689,"name":"Finish","f":28305689,"d_finished":0,"c":0,"l":28306410,"d":721},{"name":"task_result","f":28297665,"d_finished":2319,"c":3,"l":28302811,"d":2319}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.454691Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:36.442211Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-07T17:51:36.454730Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:36.454875Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-07T17:51:05.929512Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:51:05.933551Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:51:05.933922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:05.960963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:05.961245Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:05.968254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:05.968457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:05.968663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:05.968790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:05.968855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:05.968945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:05.969042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:05.969189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:05.969269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:05.969342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.969422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:05.969516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:05.969592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:05.987407Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:51:05.990531Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:05.991077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:05.991122Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:05.991285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:05.991418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:05.991496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:05.991542Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:05.991629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:05.991675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:05.991701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:05.991724Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:05.991921Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:05.992016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:05.992062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:05.992093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:05.992188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:05.992245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:05.992289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:05.992319Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:05.992370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:05.992407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:05.992473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:05.992561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:05.992601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:05.992706Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:05.992966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:05.993026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:05.993058Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:05.993192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:05.993258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.993292Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.993341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:05.993382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:05.993433Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:05.993485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:05.993527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... nished=1; 2026-01-07T17:51:36.726405Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:51:36.726435Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:51:36.726748Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:36.727012Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.727077Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:36.727313Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-07T17:51:36.727397Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-07T17:51:36.727748Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:758:2738];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-07T17:51:36.728010Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.728206Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.728436Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.728725Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:36.728964Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.729184Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.729555Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:759:2739] finished for tablet 9437184 2026-01-07T17:51:36.730201Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:758:2738];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":31254014,"name":"_full_task","f":31254014,"d_finished":0,"c":0,"l":31264533,"d":10519},"events":[{"name":"bootstrap","f":31254287,"d_finished":1674,"c":1,"l":31255961,"d":1674},{"a":31263581,"name":"ack","f":31261599,"d_finished":1793,"c":1,"l":31263392,"d":2745},{"a":31263555,"name":"processing","f":31256231,"d_finished":4271,"c":3,"l":31263397,"d":5249},{"name":"ProduceResults","f":31255351,"d_finished":3078,"c":6,"l":31264110,"d":3078},{"a":31264119,"name":"Finish","f":31264119,"d_finished":0,"c":0,"l":31264533,"d":414},{"name":"task_result","f":31256257,"d_finished":2394,"c":2,"l":31261342,"d":2394}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.730336Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:758:2738];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:36.730968Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:758:2738];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":31254014,"name":"_full_task","f":31254014,"d_finished":0,"c":0,"l":31265285,"d":11271},"events":[{"name":"bootstrap","f":31254287,"d_finished":1674,"c":1,"l":31255961,"d":1674},{"a":31263581,"name":"ack","f":31261599,"d_finished":1793,"c":1,"l":31263392,"d":3497},{"a":31263555,"name":"processing","f":31256231,"d_finished":4271,"c":3,"l":31263397,"d":6001},{"name":"ProduceResults","f":31255351,"d_finished":3078,"c":6,"l":31264110,"d":3078},{"a":31264119,"name":"Finish","f":31264119,"d_finished":0,"c":0,"l":31265285,"d":1166},{"name":"task_result","f":31256257,"d_finished":2394,"c":2,"l":31261342,"d":2394}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:36.731078Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:36.717143Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59288;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59288;selected_rows=0; 2026-01-07T17:51:36.731135Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:36.731385Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2026-01-07T17:51:37.164020Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# B3A11A42-2515-4EB1-9EC3-7E55D8178E93, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:8820 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FC43D409-61B4-4687-B124-4B1D66B071C4 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2026-01-07T17:51:37.170779Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# B3A11A42-2515-4EB1-9EC3-7E55D8178E93, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2026-01-07T17:51:37.171103Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 1C69DC7D-E4CA-47AB-AC92-8BB2F79B525D, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8820 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AADECEE6-ECC7-4375-99F2-DF3F097160DE amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2026-01-07T17:51:37.174526Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 1C69DC7D-E4CA-47AB-AC92-8BB2F79B525D, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-07T17:51:37.174844Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 2C869E95-452C-4F76-BBC2-9C08043D4748, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8820 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 305876A8-541B-484A-9BF9-1FF7BA48C98B amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2026-01-07T17:51:37.180682Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 2C869E95-452C-4F76-BBC2-9C08043D4748, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-07T17:51:37.180904Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# E3C278F2-118E-4FE6-96E0-56B289A28402, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:8820 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FE9C8EC9-F9C2-492B-AF0F-079376E38F45 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-07T17:51:37.182972Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# E3C278F2-118E-4FE6-96E0-56B289A28402, response# GetObjectResult { } >> TS3WrapperTests::PutObject [GOOD] |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2026-01-07T17:50:09.613860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:50:09.649108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:50:09.649328Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:50:09.685300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:50:09.685549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:50:09.685812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:50:09.685938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:50:09.686054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:50:09.686170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:50:09.686294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:50:09.686402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:50:09.686534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:50:09.686646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.686758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:50:09.686883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:50:09.686977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:50:09.726451Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:50:09.727094Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:50:09.727154Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:50:09.727372Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:50:09.729823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:50:09.729907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:50:09.729967Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:50:09.730334Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:50:09.730430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:50:09.730491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:50:09.730540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:50:09.730760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:50:09.730823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:50:09.730861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:50:09.730893Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:50:09.730986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:50:09.731043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:50:09.731114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:50:09.731143Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:50:09.731190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:50:09.731224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:50:09.731259Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:50:09.731298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:50:09.731351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:50:09.731377Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:50:09.731622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:50:09.731679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:50:09.731706Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:50:09.731815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:50:09.731850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.731876Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:50:09.731921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:50:09.731950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:50:09.731990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:50:09.732026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:50:09.732050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:50:09.732070Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:50:09.732167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:50:09.732202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... E:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=3958; 2026-01-07T17:51:34.707796Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2026-01-07T17:51:34.708514Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=662; 2026-01-07T17:51:34.708571Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=4914; 2026-01-07T17:51:34.708607Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=5041; 2026-01-07T17:51:34.708662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-07T17:51:34.708727Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=32; 2026-01-07T17:51:34.708755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5530; 2026-01-07T17:51:34.708866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=70; 2026-01-07T17:51:34.708983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=84; 2026-01-07T17:51:34.709105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=88; 2026-01-07T17:51:34.709208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=76; 2026-01-07T17:51:34.709579Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=338; 2026-01-07T17:51:34.710095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=474; 2026-01-07T17:51:34.710139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2026-01-07T17:51:34.710171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2026-01-07T17:51:34.710197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:51:34.710250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=29; 2026-01-07T17:51:34.710292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-07T17:51:34.710350Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=36; 2026-01-07T17:51:34.710380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2026-01-07T17:51:34.710428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2026-01-07T17:51:34.710504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=37; 2026-01-07T17:51:34.710563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=36; 2026-01-07T17:51:34.710590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=14472; 2026-01-07T17:51:34.710711Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=126218384;raw_bytes=174224032;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:51:34.710793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:34.710846Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:34.710907Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:34.710947Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-07T17:51:34.711047Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:34.711098Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:51:34.711127Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:51:34.711159Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:51:34.711204Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:34.711252Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:34.711286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:34.711357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:34.711499Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.022000s; 2026-01-07T17:51:34.712737Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:34.713345Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:34.713388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:34.713437Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-07T17:51:34.713470Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:51:34.713510Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:51:34.713583Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:34.713630Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:34.713665Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:34.713721Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-07T17:51:34.713760Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:34.714054Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.015000s; 2026-01-07T17:51:34.714089Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2026-01-07T17:51:37.370546Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# AC800025-9CBA-4A16-B9F2-756C9E1691CF, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:11691 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4234F805-1C9C-48F7-8FAA-9C4934F65A0A amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-07T17:51:37.374861Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# AC800025-9CBA-4A16-B9F2-756C9E1691CF, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-07T17:51:37.375139Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 2EF23815-FC13-4261-8113-D22BA3480D88, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:11691 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DC93FC95-A084-4105-8EDC-20835BE0C64A amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2026-01-07T17:51:37.377515Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 2EF23815-FC13-4261-8113-D22BA3480D88, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2026-01-07T17:51:37.377727Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 99471013-F8D0-4B80-829D-B8EF5710C41E, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:11691 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 218B0D52-85E6-4ECF-B283-2A1C69574A60 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2026-01-07T17:51:37.381746Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 99471013-F8D0-4B80-829D-B8EF5710C41E, response# UploadPartCopyResult { } 2026-01-07T17:51:37.381997Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 350CEA5A-2AA0-448C-8A0D-5CA0427520D9, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:11691 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4B73C8AE-CA59-4F1F-925E-B4C02319440C amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2026-01-07T17:51:37.384703Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 350CEA5A-2AA0-448C-8A0D-5CA0427520D9, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2026-01-07T17:51:37.384936Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 95458E2A-1860-4495-9C42-4727943AAB85, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:11691 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8904F08B-4C18-4C86-B72E-9FD340984FB2 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2026-01-07T17:51:37.386969Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 95458E2A-1860-4495-9C42-4727943AAB85, response# GetObjectResult { } >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2026-01-07T17:51:37.609714Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 61B1E1DE-EFE6-432C-9A45-0DBA8031D3FE, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:65465 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 93B4247B-48BC-4D74-99B1-49639152DA64 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-07T17:51:37.613352Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 61B1E1DE-EFE6-432C-9A45-0DBA8031D3FE, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload |94.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> THiveTest::TestFollowerCompatability2 [GOOD] >> THiveTest::TestFollowerCompatability3 >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:48:57.117664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:48:57.117752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:57.117786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:48:57.117809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:48:57.117838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:48:57.117859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:48:57.117892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:48:57.117985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:48:57.118641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:48:57.121721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:48:57.239870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:48:57.239930Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:48:57.254698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:48:57.254994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:48:57.255163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:48:57.261048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:48:57.261345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:48:57.261990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:48:57.265073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:48:57.272531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:57.273967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:48:57.280150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:48:57.280233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:48:57.280371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:48:57.280422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:48:57.280521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:48:57.281231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:48:57.445164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.446927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.447010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.447077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.447162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:48:57.447225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-07T17:51:37.698816Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 5 2026-01-07T17:51:37.698876Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/5, is published: true 2026-01-07T17:51:37.700064Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-07T17:51:37.700131Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:51:37.700443Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-07T17:51:37.700617Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 4/5 2026-01-07T17:51:37.700665Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2026-01-07T17:51:37.700710Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 4/5 2026-01-07T17:51:37.700763Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2026-01-07T17:51:37.700818Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2026-01-07T17:51:37.701054Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:51:37.701084Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:51:37.701260Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-07T17:51:37.701327Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2026-01-07T17:51:37.701346Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-07T17:51:37.701374Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2026-01-07T17:51:37.701398Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-07T17:51:37.701427Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2026-01-07T17:51:37.701494Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [20:780:2746] message: TxId: 103 2026-01-07T17:51:37.701557Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-07T17:51:37.701626Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:51:37.701681Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:51:37.701792Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:51:37.701845Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2026-01-07T17:51:37.701863Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:1 2026-01-07T17:51:37.701885Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:51:37.701903Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2026-01-07T17:51:37.701920Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:2 2026-01-07T17:51:37.701949Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-07T17:51:37.701969Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:3 2026-01-07T17:51:37.701985Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:3 2026-01-07T17:51:37.702008Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:51:37.702025Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:4 2026-01-07T17:51:37.702043Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:4 2026-01-07T17:51:37.702081Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-01-07T17:51:37.702515Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:51:37.702585Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-07T17:51:37.702669Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:51:37.702732Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:51:37.702759Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:51:37.703615Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:51:37.703859Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:51:37.704536Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:51:37.704589Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:51:37.704622Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:51:37.706110Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-07T17:51:37.706317Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:51:37.706372Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [20:1128:3027] 2026-01-07T17:51:37.706513Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2026-01-07T17:51:37.707046Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:51:37.707367Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 353us result status StatusPathDoesNotExist 2026-01-07T17:51:37.707547Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:51:37.708002Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:51:37.708278Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 303us result status StatusPathDoesNotExist 2026-01-07T17:51:37.708409Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2026-01-07T17:51:38.536094Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 242DE95F-E27D-4848-ADA6-D3B230178502, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:6667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B1319B47-4CEF-4B3C-83A7-A324883EB368 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2026-01-07T17:51:38.540398Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 242DE95F-E27D-4848-ADA6-D3B230178502, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2026-01-07T17:51:38.540607Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 79E7650E-FD93-4654-A131-120B1B784447, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:6667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 74CEA818-E77F-4EF6-A6AD-2D568BA572EE amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2026-01-07T17:51:38.542760Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 79E7650E-FD93-4654-A131-120B1B784447, response# AbortMultipartUploadResult { } 2026-01-07T17:51:38.542952Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 6FDA8E95-6143-463D-8CB7-709723B8F313, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:6667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F8994D50-74E3-40CB-B172-C9D8A11208BF amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-07T17:51:38.544790Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 6FDA8E95-6143-463D-8CB7-709723B8F313, response# No response body. |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> TS3WrapperTests::AbortUnknownUpload [GOOD] |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2026-01-07T17:51:39.760184Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 58EA8B8B-FAA1-40E2-BD43-E5E54EB6ACCC, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:2360 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 71E19078-E91D-49D6-B194-5BECB477524E amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2026-01-07T17:51:39.764381Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 58EA8B8B-FAA1-40E2-BD43-E5E54EB6ACCC, response# |94.9%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] >> TColumnShardTestSchema::ExportAfterFail [GOOD] |94.9%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-07T17:51:00.985176Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:51:01.012992Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:51:01.021171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.054926Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.055793Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.081070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.081399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.081647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.081767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.081889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.081992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.082112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.082264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.082374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.082480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.082619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.082721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.082854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:01.085911Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:51:01.115639Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:01.115884Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:01.115957Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:01.116153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.116300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:01.116374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:01.116433Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:01.116579Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:01.116669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:01.116717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:01.116747Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:01.116908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:01.117054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:01.117097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:01.117127Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:01.117243Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:01.117308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:01.117348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:01.117381Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:01.117428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:01.117471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:01.117503Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:01.117569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:01.117620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:01.117657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:01.117877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:01.117934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:01.117967Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:01.118106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:01.118185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.118219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.118274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:01.118313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:01.118348Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:01.118391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:01.118430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... nished=1; 2026-01-07T17:51:40.227497Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:51:40.227534Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:51:40.227848Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:40.228105Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.228176Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:40.228381Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-07T17:51:40.228458Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-07T17:51:40.228782Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:512:2515];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-07T17:51:40.229031Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.229221Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.229445Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.229663Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:40.229858Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.230046Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.230402Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:513:2516] finished for tablet 9437184 2026-01-07T17:51:40.231010Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:512:2515];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":40236197,"name":"_full_task","f":40236197,"d_finished":0,"c":0,"l":40245509,"d":9312},"events":[{"name":"bootstrap","f":40236466,"d_finished":1487,"c":1,"l":40237953,"d":1487},{"a":40244666,"name":"ack","f":40242847,"d_finished":1666,"c":1,"l":40244513,"d":2509},{"a":40244645,"name":"processing","f":40238107,"d_finished":3869,"c":3,"l":40244517,"d":4733},{"name":"ProduceResults","f":40237392,"d_finished":2842,"c":6,"l":40245105,"d":2842},{"a":40245112,"name":"Finish","f":40245112,"d_finished":0,"c":0,"l":40245509,"d":397},{"name":"task_result","f":40238130,"d_finished":2132,"c":2,"l":40242584,"d":2132}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.231113Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:512:2515];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:40.231673Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:512:2515];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":40236197,"name":"_full_task","f":40236197,"d_finished":0,"c":0,"l":40246192,"d":9995},"events":[{"name":"bootstrap","f":40236466,"d_finished":1487,"c":1,"l":40237953,"d":1487},{"a":40244666,"name":"ack","f":40242847,"d_finished":1666,"c":1,"l":40244513,"d":3192},{"a":40244645,"name":"processing","f":40238107,"d_finished":3869,"c":3,"l":40244517,"d":5416},{"name":"ProduceResults","f":40237392,"d_finished":2842,"c":6,"l":40245105,"d":2842},{"a":40245112,"name":"Finish","f":40245112,"d_finished":0,"c":0,"l":40246192,"d":1080},{"name":"task_result","f":40238130,"d_finished":2132,"c":2,"l":40242584,"d":2132}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.231765Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:40.219345Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-07T17:51:40.231842Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:40.232030Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808860.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808860.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807660.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-07T17:51:02.081098Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:02.106132Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:02.106351Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:02.112111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:02.112316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:02.112493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:02.112573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:02.112657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:02.112747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:02.112827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:02.112896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:02.113014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:02.113091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.113179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:02.113274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:02.113388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:02.138172Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:02.141325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:02.141439Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:02.141644Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:02.141877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:02.141970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:02.142024Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:02.142132Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:02.142227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:02.142281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:02.142310Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:02.142458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:02.142511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:02.142540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:02.142563Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:02.142640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:02.142699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:02.142742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:02.142764Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:02.142804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:02.142839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:02.142868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:02.142909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:02.142949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:02.142975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:02.143127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:02.143160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:02.143181Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:02.143265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:02.143294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.143314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.143363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:02.143397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:02.143420Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:02.143452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... X_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:40.496964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.496993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:40.497068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=80000; 2026-01-07T17:51:40.497130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=80000;batch_columns=timestamp; 2026-01-07T17:51:40.497299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:832:2801];bytes=1280000;rows=160000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:833:2802]->[1:832:2801] 2026-01-07T17:51:40.497378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.497449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.497528Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.497611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:40.497664Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.497744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.497877Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:833:2802] finished for tablet 9437184 2026-01-07T17:51:40.498113Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:832:2801];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":40514227,"name":"_full_task","f":40514227,"d_finished":0,"c":0,"l":40524862,"d":10635},"events":[{"name":"bootstrap","f":40514363,"d_finished":681,"c":1,"l":40515044,"d":681},{"a":40524553,"name":"ack","f":40522738,"d_finished":1687,"c":2,"l":40524500,"d":1996},{"a":40524545,"name":"processing","f":40515145,"d_finished":4126,"c":5,"l":40524502,"d":4443},{"name":"ProduceResults","f":40514785,"d_finished":2414,"c":9,"l":40524708,"d":2414},{"a":40524711,"name":"Finish","f":40524711,"d_finished":0,"c":0,"l":40524862,"d":151},{"name":"task_result","f":40515154,"d_finished":2379,"c":3,"l":40522591,"d":2379}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:40.498174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:832:2801];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:40.498459Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:832:2801];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":40514227,"name":"_full_task","f":40514227,"d_finished":0,"c":0,"l":40525160,"d":10933},"events":[{"name":"bootstrap","f":40514363,"d_finished":681,"c":1,"l":40515044,"d":681},{"a":40524553,"name":"ack","f":40522738,"d_finished":1687,"c":2,"l":40524500,"d":2294},{"a":40524545,"name":"processing","f":40515145,"d_finished":4126,"c":5,"l":40524502,"d":4741},{"name":"ProduceResults","f":40514785,"d_finished":2414,"c":9,"l":40524708,"d":2414},{"a":40524711,"name":"Finish","f":40524711,"d_finished":0,"c":0,"l":40525160,"d":449},{"name":"task_result","f":40515154,"d_finished":2379,"c":3,"l":40522591,"d":2379}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:833:2802]->[1:832:2801] 2026-01-07T17:51:40.498530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:40.486262Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9739224;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9739224;selected_rows=0; 2026-01-07T17:51:40.498556Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:40.498643Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 >> THiveTest::TestFollowerCompatability3 [GOOD] >> THiveTest::TestGetStorageInfo |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestExternalBoot |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard >> HttpRequest::Probe >> ColumnStatistics::CountMinSketchStatistics |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808865.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808865.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808865.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808865.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808865.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147808865.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=167808865.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808865.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147807665.000000s;Name=;Codec=}; 2026-01-07T17:51:05.956912Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:05.991000Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:05.991242Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:05.998756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:05.999000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:05.999244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:05.999380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:05.999510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:05.999651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:05.999758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:05.999864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:06.000009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:06.000131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.000244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:06.000354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:06.000465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:06.031137Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:06.031780Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:06.031853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:06.032040Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:06.032198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:06.032286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:06.032334Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:06.032433Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:06.032516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:06.032573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:06.032613Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:06.032790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:06.032857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:06.032898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:06.032944Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:06.033046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:06.033126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:06.033186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:06.033220Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:06.033279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:06.033321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:06.033356Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:06.033403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:06.033443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:06.033474Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:06.033661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:06.033733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:06.033776Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:06.033917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:06.033965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.033998Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.034048Z node 1 :TX ... nShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:41.656379Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:41.656518Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808302269:max} readable: {1767808302269:max} at tablet 9437184 2026-01-07T17:51:41.656601Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:51:41.656722Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302269:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:41.656764Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302269:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:41.657152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302269:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:51:41.658234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302269:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:51:41.658913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302269:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1327:3296];trace_detailed=; 2026-01-07T17:51:41.659268Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:51:41.659424Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:41.659577Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:41.659704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:41.659930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:41.660029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:41.660111Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:41.660277Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1327:3296] finished for tablet 9437184 2026-01-07T17:51:41.660593Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1326:3295];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":39421391,"name":"_full_task","f":39421391,"d_finished":0,"c":0,"l":39422858,"d":1467},"events":[{"name":"bootstrap","f":39421602,"d_finished":662,"c":1,"l":39422264,"d":662},{"a":39422440,"name":"ack","f":39422440,"d_finished":0,"c":0,"l":39422858,"d":418},{"a":39422428,"name":"processing","f":39422428,"d_finished":0,"c":0,"l":39422858,"d":430},{"name":"ProduceResults","f":39422028,"d_finished":412,"c":2,"l":39422654,"d":412},{"a":39422660,"name":"Finish","f":39422660,"d_finished":0,"c":0,"l":39422858,"d":198}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:41.660651Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1326:3295];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:41.660953Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1326:3295];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":39421391,"name":"_full_task","f":39421391,"d_finished":0,"c":0,"l":39423208,"d":1817},"events":[{"name":"bootstrap","f":39421602,"d_finished":662,"c":1,"l":39422264,"d":662},{"a":39422440,"name":"ack","f":39422440,"d_finished":0,"c":0,"l":39423208,"d":768},{"a":39422428,"name":"processing","f":39422428,"d_finished":0,"c":0,"l":39423208,"d":780},{"name":"ProduceResults","f":39422028,"d_finished":412,"c":2,"l":39422654,"d":412},{"a":39422660,"name":"Finish","f":39422660,"d_finished":0,"c":0,"l":39423208,"d":548}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1327:3296]->[1:1326:3295] 2026-01-07T17:51:41.661021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:41.658209Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:51:41.661054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:41.661140Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> BasicStatistics::TwoServerlessDbs |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808861.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808861.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807661.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-07T17:51:03.662959Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:03.695280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:03.695700Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:03.703479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:03.703745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:03.703989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:03.704108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:03.704228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:03.704366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:03.704474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:03.704573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:03.704711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:03.704826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:03.704954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:03.705059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:03.705175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:03.736074Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:03.736741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:03.736857Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:03.737085Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:03.737266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:03.737393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:03.737445Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:03.737550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:03.737647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:03.737722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:03.737759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:03.737940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:03.738001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:03.738043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:03.738073Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:03.738176Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:03.738247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:03.738306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:03.738342Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:03.738415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:03.738455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:03.738484Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:03.738536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:03.738576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:03.738607Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:03.738809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:03.738877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:03.738914Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:03.739060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:03.739107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:03.739140Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:03.739223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:03.739274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:03.739314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:03.739361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... E:granule/portions;fline=constructor_portion.cpp:44;memory_size=286;data_size=260;sum=5688;count=20;size_of_portion=192; 2026-01-07T17:51:41.827293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=2044; 2026-01-07T17:51:41.827336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2026-01-07T17:51:41.827798Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=432; 2026-01-07T17:51:41.827828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2674; 2026-01-07T17:51:41.827853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2750; 2026-01-07T17:51:41.827889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=7; 2026-01-07T17:51:41.827931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=13; 2026-01-07T17:51:41.827951Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3147; 2026-01-07T17:51:41.828034Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=52; 2026-01-07T17:51:41.828104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=40; 2026-01-07T17:51:41.828182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=51; 2026-01-07T17:51:41.828243Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=38; 2026-01-07T17:51:41.829459Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1185; 2026-01-07T17:51:41.830675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1160; 2026-01-07T17:51:41.830768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2026-01-07T17:51:41.830819Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-07T17:51:41.830873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:51:41.830944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2026-01-07T17:51:41.830978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-07T17:51:41.831051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2026-01-07T17:51:41.831086Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:51:41.831146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2026-01-07T17:51:41.831230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=40; 2026-01-07T17:51:41.831440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=168; 2026-01-07T17:51:41.831476Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12390; 2026-01-07T17:51:41.831582Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:51:41.831733Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:41.831773Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:41.831826Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:41.837929Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:51:41.838091Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:41.838155Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:51:41.838216Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:41.838256Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:41.838283Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:41.838368Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:41.838534Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.175000s; 2026-01-07T17:51:41.840428Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:41.840659Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:41.840703Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:41.840779Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-07T17:51:41.840830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:41.840861Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:41.840889Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:41.840991Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:41.841326Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.021000s; 2026-01-07T17:51:41.841358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::SimpleColumnStatistics |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics >> BasicStatistics::TwoTables |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] >> THiveTest::TestExternalBootCounters >> BasicStatistics::Simple >> BasicStatistics::TwoNodes |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessTimeIntervals >> THiveTest::TestExternalBootCounters [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned |95.0%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpRequest::ProbeBaseStats >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> ColumnStatistics::EqWidthHistogramSmallParamTypes >> BasicStatistics::SimpleGlobalIndex >> YdbTableSplit::RenameTablesAndSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootCounters [GOOD] Test command err: 2026-01-07T17:50:37.081541Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.106510Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.106769Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.107552Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.107878Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:37.109019Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:37.109085Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.109909Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:74:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.109949Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.110061Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.110195Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.122411Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.122466Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.124516Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.124659Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.124734Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.124819Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.124888Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.125044Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:87:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.125169Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:88:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.125187Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.125245Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:74:2077] 2026-01-07T17:50:37.125275Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:74:2077] 2026-01-07T17:50:37.125342Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.125383Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.126175Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.126254Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.128345Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.128453Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.128730Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.128914Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.129640Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:99:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.129661Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.129701Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.129789Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.137969Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.138022Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.139617Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:106:2081] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.139781Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:107:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.139917Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:108:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.140037Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:109:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.140171Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:110:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.140286Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:111:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.140426Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:112:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.140452Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.140503Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:99:2077] 2026-01-07T17:50:37.140528Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:99:2077] 2026-01-07T17:50:37.140571Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.140602Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.141348Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.159222Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:74:2077] 2026-01-07T17:50:37.159367Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.160819Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.161137Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:99:2077] 2026-01-07T17:50:37.161171Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.161364Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.161408Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.163833Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.163926Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.164175Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.164331Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.165454Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:127:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.165479Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.165524Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.165591Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.167094Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.167124Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.167433Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37. ... 42: [cd7cbca5baaa92c4] Id# [72057594037927937:2:13:0:0:105:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:51:45.035609Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [cd7cbca5baaa92c4] restore Id# [72057594037927937:2:13:0:0:105:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:51:45.035710Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [cd7cbca5baaa92c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:13:0:0:105:1] Marker# BPG33 2026-01-07T17:51:45.035787Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [cd7cbca5baaa92c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:13:0:0:105:1] Marker# BPG32 2026-01-07T17:51:45.035994Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:81:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:13:0:0:105:1] FDS# 105 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:51:45.037254Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [cd7cbca5baaa92c4] received {EvVPutResult Status# OK ID# [72057594037927937:2:13:0:0:105:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 28 } Cost# 80826 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 29 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-07T17:51:45.037420Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [cd7cbca5baaa92c4] Result# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-07T17:51:45.037525Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [cd7cbca5baaa92c4] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:51:45.037757Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-07T17:51:45.037923Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} commited cookie 1 for step 13 2026-01-07T17:51:45.038129Z node 68 :HIVE NOTICE: tx__unlock_tablet.cpp:91: HIVE#72057594037927937 THive::TTxUnlockTabletExecution::Complete TabletId: 72075186224037889 SideEffects: {Notifications: 0x1004020F [68:483:2316] NKikimrHive.TEvLockTabletExecutionLost TabletID: 72075186224037889 Reason: LOCK_LOST_REASON_UNLOCKED,0x1004020E [68:483:2316] NKikimrHive.TEvUnlockTabletExecutionResult TabletID: 72075186224037889 Status: OK StatusMessage: ""} 2026-01-07T17:51:45.038717Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [68:510:2342] 2026-01-07T17:51:45.038781Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [68:510:2342] 2026-01-07T17:51:45.038883Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:45.038954Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 68 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [68:331:2201] 2026-01-07T17:51:45.039044Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [68:510:2342] 2026-01-07T17:51:45.039110Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [68:510:2342] 2026-01-07T17:51:45.039164Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [68:510:2342] 2026-01-07T17:51:45.039226Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [68:510:2342] 2026-01-07T17:51:45.039352Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [68:510:2342] 2026-01-07T17:51:45.039582Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [68:510:2342] 2026-01-07T17:51:45.039655Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [68:510:2342] 2026-01-07T17:51:45.039712Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [68:510:2342] 2026-01-07T17:51:45.039791Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [68:510:2342] 2026-01-07T17:51:45.039839Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [68:510:2342] 2026-01-07T17:51:45.039908Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:483:2316] EventType# 268697612 2026-01-07T17:51:45.040117Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2026-01-07T17:51:45.040188Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:51:45.040506Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{22, redo 182b alter 0b annex 0, ~{ 1, 4 } -{ }, 0 gb} 2026-01-07T17:51:45.040586Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:51:45.040841Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} queued, type NKikimr::NHive::TTxProcessTabletMetrics 2026-01-07T17:51:45.040916Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:51:45.041181Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} hope 1 -> done Change{23, redo 514b alter 0b annex 0, ~{ 16 } -{ }, 0 gb} 2026-01-07T17:51:45.041270Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:51:45.052563Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [667a4d0d3c364da9] bootstrap ActorId# [68:513:2345] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:14:0:0:321:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:51:45.052783Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [667a4d0d3c364da9] Id# [72057594037927937:2:14:0:0:321:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:51:45.052858Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [667a4d0d3c364da9] restore Id# [72057594037927937:2:14:0:0:321:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:51:45.052965Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [667a4d0d3c364da9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:14:0:0:321:1] Marker# BPG33 2026-01-07T17:51:45.053032Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [667a4d0d3c364da9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:14:0:0:321:1] Marker# BPG32 2026-01-07T17:51:45.053221Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:81:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:14:0:0:321:1] FDS# 321 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:51:45.054493Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [667a4d0d3c364da9] received {EvVPutResult Status# OK ID# [72057594037927937:2:14:0:0:321:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 29 } Cost# 82527 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 30 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-07T17:51:45.054651Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [667a4d0d3c364da9] Result# TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-07T17:51:45.054734Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [667a4d0d3c364da9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:51:45.054960Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-07T17:51:45.055101Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} commited cookie 1 for step 14 2026-01-07T17:51:45.055644Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [68:515:2347] 2026-01-07T17:51:45.055697Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [68:515:2347] 2026-01-07T17:51:45.055828Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:45.055896Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 68 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [68:331:2201] 2026-01-07T17:51:45.055980Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [68:515:2347] 2026-01-07T17:51:45.056052Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [68:515:2347] 2026-01-07T17:51:45.056133Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [68:515:2347] 2026-01-07T17:51:45.056202Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [68:515:2347] 2026-01-07T17:51:45.056314Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [68:515:2347] 2026-01-07T17:51:45.056429Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [68:515:2347] 2026-01-07T17:51:45.056510Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [68:515:2347] 2026-01-07T17:51:45.056563Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [68:515:2347] 2026-01-07T17:51:45.056635Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [68:515:2347] 2026-01-07T17:51:45.056684Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [68:515:2347] 2026-01-07T17:51:45.056748Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:514:2346] EventType# 268830214 |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2026-01-07T17:50:13.506359Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678460714463934:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:13.506666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:50:13.689040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:50:13.887350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:50:13.887444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:50:13.927369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:50:14.094641Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:50:14.121108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:50:14.156955Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007517s 2026-01-07T17:50:14.518698Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:50:14.545565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:50:14.545588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:50:14.545662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:50:14.545735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:50:15.087589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:50:16.947887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678473599367085:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:16.948014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:16.948475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678473599367094:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:16.948604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.531245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/Dir, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-07T17:50:17.531471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.531587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Dir/Foo, opId: 281474976710658:1, at schemeshard: 72057594046644480 2026-01-07T17:50:17.534091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.534127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:50:17.536126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Dir/Foo 2026-01-07T17:50:17.621177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808217665, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:17.671284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710658:0 2026-01-07T17:50:17.671337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710658:1 2026-01-07T17:50:17.697438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678477894334636:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.697537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.698755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678477894334639:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.698857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:50:17.742107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /Root/Dir/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2026-01-07T17:50:17.742595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:17.742632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:50:17.744257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Dir/Foo 2026-01-07T17:50:17.754410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767808217798, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T17:50:17.760721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2026-01-07T17:50:18.498802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678460714463934:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:50:18.498877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2026-01-07T17:50:27.790075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2026-01-07T17:50:27.795366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" TabletID: 72075186224037888 ShardIdx: 1 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2026-01-07T17:50:27.795439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:27.875222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-07T17:50:27.887117Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-07T17:50:27.887156Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-07T17:50:28.810439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:50:28.810464Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded partitions 1 2026-01-07T17:50:29.870129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_move_table.cpp:668: TMoveTable Propose, from: /Root/Dir/Foo, to: /Root/Dir/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2026-01-07T17:50:29.872603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T17:50:29.877869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Dir ... son remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 5 2026-01-07T17:51:46.321326Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037892 parts [ [72075186224037890:1:114:1:12288:10704:0] ] return ack processed 2026-01-07T17:51:46.321371Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:51:46.321393Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:91: 72075186224037890 ack parts [ [72075186224037890:1:114:1:12288:10704:0] ] return to tablet 72075186224037891 2026-01-07T17:51:46.321464Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037892 Initiating switch from PreOffline to Offline state 2026-01-07T17:51:46.321475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710664:0 progress is 1/1 2026-01-07T17:51:46.321488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2026-01-07T17:51:46.321501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710664:0 progress is 1/1 2026-01-07T17:51:46.321510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2026-01-07T17:51:46.321520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710664, ready parts: 1/1, is published: true 2026-01-07T17:51:46.321595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7592678860146428157:2822] message: TxId: 281474976710664 2026-01-07T17:51:46.321617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2026-01-07T17:51:46.321637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710664:0 2026-01-07T17:51:46.321660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710664:0 2026-01-07T17:51:46.321687Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7592678860146428221:2826], serverId# [1:7592678860146428224:5125], sessionId# [0:0:0] 2026-01-07T17:51:46.321733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-07T17:51:46.322869Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:51:46.323555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592678520844008017 RawX2: 4503603922340185 } TabletId: 72075186224037890 State: 4 2026-01-07T17:51:46.323602Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-07T17:51:46.323641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:51:46.323677Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2026-01-07T17:51:46.324272Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:51:46.324518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592678842966558550 RawX2: 4503603922340577 } TabletId: 72075186224037892 State: 4 2026-01-07T17:51:46.324548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:51:46.324797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-07T17:51:46.324798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:51:46.324857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:51:46.325207Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037891 parts [ [72075186224037890:1:114:1:12288:10704:0] ] return ack processed 2026-01-07T17:51:46.325302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7592678860146428222:2827], serverId# [1:7592678860146428229:5130], sessionId# [0:0:0] 2026-01-07T17:51:46.325598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:51:46.325607Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-07T17:51:46.325624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:51:46.326424Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-07T17:51:46.326614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7592678842966558549 RawX2: 4503603922340576 } TabletId: 72075186224037891 State: 4 2026-01-07T17:51:46.326674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-07T17:51:46.327082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-07T17:51:46.327089Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-07T17:51:46.327271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-07T17:51:46.327389Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-07T17:51:46.327494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-07T17:51:46.327551Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-07T17:51:46.327610Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-07T17:51:46.327642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-07T17:51:46.328029Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-07T17:51:46.328096Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-07T17:51:46.328109Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-07T17:51:46.328123Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-07T17:51:46.329120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-07T17:51:46.329131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-07T17:51:46.329161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-07T17:51:46.329467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-07T17:51:46.329485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-07T17:51:46.329504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-07T17:51:46.329516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-07T17:51:46.331637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-07T17:51:46.331733Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-07T17:51:46.331775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-07T17:51:46.331967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T17:51:46.331995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-07T17:51:46.332046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-07T17:51:46.332053Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-07T17:51:46.332388Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-07T17:51:46.332453Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-07T17:51:46.333719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-07T17:51:46.333744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-07T17:51:46.333771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |95.0%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest |95.0%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808860.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167808860.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808860.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147808860.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807660.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147807660.000000s;Name=;Codec=}; 2026-01-07T17:51:01.990927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:02.013840Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:02.014047Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:02.020418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:02.020649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:02.020871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:02.021010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:02.021141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:02.021264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:02.021359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:02.021463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:02.021606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:02.021730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.021827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:02.021917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:02.022024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:02.041239Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:02.044311Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:02.044416Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:02.044628Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:02.044810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:02.044901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:02.044970Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:02.045084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:02.045165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:02.045219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:02.045288Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:02.045467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:02.045538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:02.045585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:02.045616Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:02.045720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:02.045794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:02.045852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:02.045893Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:02.045952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:02.045991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:02.046022Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:02.046068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:02.046108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:02.046143Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:02.046366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:02.046425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:02.046460Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:02.046603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:02.046647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.046682Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.046739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:02.046782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:02.046815Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:02.046864Z node 1 :TX_COLUM ... ;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:50.088917Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:876:2834];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:50.089537Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-07T17:51:50.089708Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808298142:max} readable: {1767808298142:max} at tablet 9437184 2026-01-07T17:51:50.089802Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:51:50.089922Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808298142:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:50.089960Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808298142:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:50.090290Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808298142:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:51:50.091337Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808298142:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:51:50.091924Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808298142:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:884:2842];trace_detailed=; 2026-01-07T17:51:50.092208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:51:50.092333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:50.092494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:50.092624Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:50.092791Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:50.092878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:50.092982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:50.093144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:884:2842] finished for tablet 9437184 2026-01-07T17:51:50.093489Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:883:2841];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":50169578,"name":"_full_task","f":50169578,"d_finished":0,"c":0,"l":50170906,"d":1328},"events":[{"name":"bootstrap","f":50169739,"d_finished":624,"c":1,"l":50170363,"d":624},{"a":50170490,"name":"ack","f":50170490,"d_finished":0,"c":0,"l":50170906,"d":416},{"a":50170473,"name":"processing","f":50170473,"d_finished":0,"c":0,"l":50170906,"d":433},{"name":"ProduceResults","f":50170130,"d_finished":419,"c":2,"l":50170711,"d":419},{"a":50170715,"name":"Finish","f":50170715,"d_finished":0,"c":0,"l":50170906,"d":191}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:50.093549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:883:2841];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:50.093810Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:883:2841];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":50169578,"name":"_full_task","f":50169578,"d_finished":0,"c":0,"l":50171298,"d":1720},"events":[{"name":"bootstrap","f":50169739,"d_finished":624,"c":1,"l":50170363,"d":624},{"a":50170490,"name":"ack","f":50170490,"d_finished":0,"c":0,"l":50171298,"d":808},{"a":50170473,"name":"processing","f":50170473,"d_finished":0,"c":0,"l":50171298,"d":825},{"name":"ProduceResults","f":50170130,"d_finished":419,"c":2,"l":50170711,"d":419},{"a":50170715,"name":"Finish","f":50170715,"d_finished":0,"c":0,"l":50171298,"d":583}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:884:2842]->[1:883:2841] 2026-01-07T17:51:50.093879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:50.091321Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:51:50.093921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:50.094008Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpRboPg::TestCrossInnerJoin-ColumnStore >> KqpRboYql::OlapPredicatePushdown >> KqpRboPg::Bench_Select >> KqpRboPg::ConstantFolding >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2026-01-07T17:50:36.995715Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.023397Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.023687Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.024485Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.024854Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:37.025871Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:37.025928Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.026825Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:74:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.026868Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.026963Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.027074Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.039527Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.039573Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.041160Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.041255Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.041327Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.041398Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.041479Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.041555Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:87:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.041627Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:73:2076] Create Queue# [1:88:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.041646Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.041707Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:74:2077] 2026-01-07T17:50:37.041731Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:74:2077] 2026-01-07T17:50:37.041778Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.041816Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.042464Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.042525Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.045529Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.045706Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.046014Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.046270Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.047148Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:99:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.047184Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.047243Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.047352Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.057530Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.057596Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.059355Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:106:2081] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.059524Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:107:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.059667Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:108:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.059786Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:109:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.059920Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:110:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.060031Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:111:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.060193Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:98:2076] Create Queue# [2:112:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.060222Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.060291Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:99:2077] 2026-01-07T17:50:37.060318Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:99:2077] 2026-01-07T17:50:37.060371Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.060410Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.061348Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.080307Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:74:2077] 2026-01-07T17:50:37.080495Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.082414Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.085760Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:99:2077] 2026-01-07T17:50:37.085840Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.086158Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.086244Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.089258Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.089524Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.089897Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.090186Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.091111Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:127:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.091139Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.091182Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.091260Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.093243Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.093298Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.093728Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37. ... BUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1972:2267] 2026-01-07T17:51:49.584490Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037892] forward result remote node 67 [62:2101:2491] 2026-01-07T17:51:49.584552Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037892] remote node connected [62:2101:2491] 2026-01-07T17:51:49.584574Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037892]::SendEvent [62:2101:2491] 2026-01-07T17:51:49.584725Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037892] Accept Connect Originator# [62:2101:2491] 2026-01-07T17:51:49.584960Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037892] connected with status OK role: Leader [62:2101:2491] 2026-01-07T17:51:49.584983Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037892] send queued [62:2101:2491] 2026-01-07T17:51:49.585451Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037893] ::Bootstrap [62:2105:2493] 2026-01-07T17:51:49.585474Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037893] lookup [62:2105:2493] 2026-01-07T17:51:49.585508Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal leader: [67:1310:2100] followers: 0 ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:49.585534Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1310:2100] 2026-01-07T17:51:49.585606Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037893] forward result remote node 67 [62:2105:2493] 2026-01-07T17:51:49.585684Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037893] remote node connected [62:2105:2493] 2026-01-07T17:51:49.585710Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037893]::SendEvent [62:2105:2493] 2026-01-07T17:51:49.585814Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037893] Accept Connect Originator# [62:2105:2493] 2026-01-07T17:51:49.585955Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037893] connected with status OK role: Leader [62:2105:2493] 2026-01-07T17:51:49.585979Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037893] send queued [62:2105:2493] 2026-01-07T17:51:49.586466Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037894] ::Bootstrap [62:2108:2495] 2026-01-07T17:51:49.586489Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037894] lookup [62:2108:2495] 2026-01-07T17:51:49.586521Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal leader: [66:1313:2141] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:49.586543Z node 62 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:51:49.586628Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:51:49.586689Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2026-01-07T17:51:49.586720Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2026-01-07T17:51:49.586743Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2026-01-07T17:51:49.586776Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [67:1973:2268] CurrentLeaderTablet: [67:1980:2271] CurrentGeneration: 3 CurrentStep: 0} 2026-01-07T17:51:49.586827Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [67:1973:2268] CurrentLeaderTablet: [67:1980:2271] CurrentGeneration: 3 CurrentStep: 0} 2026-01-07T17:51:49.586861Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037894 leader: [67:1973:2268] followers: 0 2026-01-07T17:51:49.586905Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1973:2268] 2026-01-07T17:51:49.586955Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037894] forward result remote node 67 [62:2108:2495] 2026-01-07T17:51:49.587017Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037894] remote node connected [62:2108:2495] 2026-01-07T17:51:49.587040Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037894]::SendEvent [62:2108:2495] 2026-01-07T17:51:49.587193Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037894] Accept Connect Originator# [62:2108:2495] 2026-01-07T17:51:49.587415Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037894] connected with status OK role: Leader [62:2108:2495] 2026-01-07T17:51:49.587440Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037894] send queued [62:2108:2495] 2026-01-07T17:51:49.587892Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037895] ::Bootstrap [62:2112:2497] 2026-01-07T17:51:49.587913Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037895] lookup [62:2112:2497] 2026-01-07T17:51:49.587946Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal leader: [67:1820:2195] followers: 0 ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:49.587971Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1820:2195] 2026-01-07T17:51:49.588020Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037895] forward result remote node 67 [62:2112:2497] 2026-01-07T17:51:49.588068Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037895] remote node connected [62:2112:2497] 2026-01-07T17:51:49.588091Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037895]::SendEvent [62:2112:2497] 2026-01-07T17:51:49.588188Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037895] Accept Connect Originator# [62:2112:2497] 2026-01-07T17:51:49.588357Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037895] connected with status OK role: Leader [62:2112:2497] 2026-01-07T17:51:49.588388Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037895] send queued [62:2112:2497] 2026-01-07T17:51:49.588837Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037896] ::Bootstrap [62:2115:2499] 2026-01-07T17:51:49.588857Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037896] lookup [62:2115:2499] 2026-01-07T17:51:49.588889Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal leader: [67:1823:2197] followers: 0 ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:49.588914Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1823:2197] 2026-01-07T17:51:49.588973Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037896] forward result remote node 67 [62:2115:2499] 2026-01-07T17:51:49.589025Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037896] remote node connected [62:2115:2499] 2026-01-07T17:51:49.589052Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037896]::SendEvent [62:2115:2499] 2026-01-07T17:51:49.589170Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037896] Accept Connect Originator# [62:2115:2499] 2026-01-07T17:51:49.589336Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037896] connected with status OK role: Leader [62:2115:2499] 2026-01-07T17:51:49.589361Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037896] send queued [62:2115:2499] 2026-01-07T17:51:49.589905Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [62:2117:2500] 2026-01-07T17:51:49.589953Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [62:2117:2500] 2026-01-07T17:51:49.590052Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [62:618:2179] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:51:49.590109Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [62:618:2179] 2026-01-07T17:51:49.590180Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [62:2117:2500] 2026-01-07T17:51:49.590250Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [62:2117:2500] 2026-01-07T17:51:49.590312Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [62:2117:2500] 2026-01-07T17:51:49.590382Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [62:2117:2500] 2026-01-07T17:51:49.590506Z node 62 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [62:2117:2500] 2026-01-07T17:51:49.590716Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [62:2117:2500] 2026-01-07T17:51:49.590766Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [62:2117:2500] 2026-01-07T17:51:49.590815Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [62:2117:2500] 2026-01-07T17:51:49.590877Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [62:2117:2500] 2026-01-07T17:51:49.590925Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [62:2117:2500] 2026-01-07T17:51:49.590982Z node 62 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [62:589:2174] EventType# 268697616 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808860.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167808860.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808860.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147808860.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807660.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147807660.000000s;Name=;Codec=}; 2026-01-07T17:51:01.959681Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:01.989987Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:01.990232Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:01.997495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:01.997791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:01.998006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:01.998122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:01.998269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:01.998387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:01.998487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:01.998592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:01.998744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:01.998863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:01.998972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:01.999068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:01.999178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:02.028119Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:02.028778Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:02.028843Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:02.029032Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:02.029184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:02.029280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:02.029327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:02.029418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:02.029482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:02.029532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:02.029562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:02.029717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:02.029789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:02.029830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:02.029857Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:02.029965Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:02.030029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:02.030070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:02.030103Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:02.030167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:02.030203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:02.030231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:02.030297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:02.030339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:02.030367Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:02.030561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:02.030639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:02.030678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:02.030798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:02.030838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.030867Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.030918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:02.030958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:02.030992Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:02.031035Z node 1 :TX_COLUM ... hard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=6; 2026-01-07T17:51:50.811193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=55; 2026-01-07T17:51:50.811217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1814; 2026-01-07T17:51:50.811248Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1894; 2026-01-07T17:51:50.811283Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=6; 2026-01-07T17:51:50.811322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=15; 2026-01-07T17:51:50.811345Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2310; 2026-01-07T17:51:50.811417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=39; 2026-01-07T17:51:50.811493Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=46; 2026-01-07T17:51:50.811579Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=50; 2026-01-07T17:51:50.811647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=44; 2026-01-07T17:51:50.812717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1028; 2026-01-07T17:51:50.814135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1372; 2026-01-07T17:51:50.814207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-07T17:51:50.814244Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-01-07T17:51:50.814269Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2026-01-07T17:51:50.814319Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=29; 2026-01-07T17:51:50.814358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-07T17:51:50.814408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=30; 2026-01-07T17:51:50.814445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-07T17:51:50.814489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2026-01-07T17:51:50.814536Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=28; 2026-01-07T17:51:50.814696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=135; 2026-01-07T17:51:50.814726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11534; 2026-01-07T17:51:50.814810Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:51:50.814883Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:50.814920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:50.814966Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:50.820663Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:51:50.820762Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:50.820831Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:51:50.820889Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807999487;tx_id=18446744073709551615;;current_snapshot_ts=1767808287374; 2026-01-07T17:51:50.820944Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:50.820983Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:50.821012Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:50.821074Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:50.821226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.086000s; 2026-01-07T17:51:50.822248Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:50.822949Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:50.822990Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:50.823056Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:51:50.823100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767807999487;tx_id=18446744073709551615;;current_snapshot_ts=1767808287374; 2026-01-07T17:51:50.823130Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:50.823171Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:50.823201Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:50.823270Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:50.823532Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.117000s; 2026-01-07T17:51:50.823563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 >> TColumnShardTestSchema::OneColdTier [GOOD] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808871.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808871.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147808871.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=167808871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808871.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147807671.000000s;Name=;Codec=}; 2026-01-07T17:51:11.919474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:11.950586Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:11.950811Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:11.957668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:11.957894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:11.958105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:11.958235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:11.958378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:11.958509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:11.958611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:11.958716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:11.958873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:11.958992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:11.959096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:11.959199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:11.959310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:11.990326Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:11.990966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:11.991036Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:11.991200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:11.991358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:11.991467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:11.991508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:11.991618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:11.991688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:11.991731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:11.991765Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:11.991926Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:11.991985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:11.992024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:11.992052Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:11.992139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:11.992204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:11.992256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:11.992286Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:11.992342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:11.992378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:11.992408Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:11.992458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:11.992492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:11.992518Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:11.992690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:11.992739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:11.992769Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:11.992884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:11.992946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:11.992977Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:11.993021Z node 1 :TX ... nLoadingTime=15; 2026-01-07T17:51:51.021027Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=114; 2026-01-07T17:51:51.021066Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7113; 2026-01-07T17:51:51.021107Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7219; 2026-01-07T17:51:51.021165Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-07T17:51:51.021244Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=34; 2026-01-07T17:51:51.021277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7791; 2026-01-07T17:51:51.021413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=87; 2026-01-07T17:51:51.021516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2026-01-07T17:51:51.021636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=83; 2026-01-07T17:51:51.021759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=71; 2026-01-07T17:51:51.027718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5889; 2026-01-07T17:51:51.032086Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4251; 2026-01-07T17:51:51.032176Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-07T17:51:51.032233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-07T17:51:51.032266Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:51:51.032328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2026-01-07T17:51:51.032361Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-07T17:51:51.032430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2026-01-07T17:51:51.032515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-07T17:51:51.032584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-07T17:51:51.032664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=47; 2026-01-07T17:51:51.032975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=264; 2026-01-07T17:51:51.033014Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=28797; 2026-01-07T17:51:51.033141Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=24365192;raw_bytes=35131129;count=5;records=400000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:51:51.033240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:51.033298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:51.033366Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:51.051505Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:51:51.051660Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:51.051762Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-07T17:51:51.051832Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808009644;tx_id=18446744073709551615;;current_snapshot_ts=1767808273236; 2026-01-07T17:51:51.051894Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:51.051945Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:51.051982Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:51.052067Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:51.052292Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.143000s; 2026-01-07T17:51:51.054000Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:51.054322Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:51.054389Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:51.054472Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-07T17:51:51.054531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808009644;tx_id=18446744073709551615;;current_snapshot_ts=1767808273236; 2026-01-07T17:51:51.054572Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:51.054617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:51.054658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:51.054763Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:51.055139Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.187000s; 2026-01-07T17:51:51.055177Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808864.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167808864.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808864.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147808864.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807664.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147807664.000000s;Name=;Codec=}; 2026-01-07T17:51:06.403156Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:06.435242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:06.435504Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:06.443388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:06.443642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:06.443906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:06.444044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:06.444178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:06.444312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:06.444423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:06.444542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:06.444693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:06.444814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.444958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:06.445077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:06.445198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:06.475743Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:06.476438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:06.476506Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:06.476651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:06.476787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:06.476859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:06.476896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:06.478373Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:06.478729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:06.479669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:06.479954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:06.480563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:06.480761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:06.480830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:06.480884Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:06.481170Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:06.481377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:06.481463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:06.481518Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:06.481603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:06.481668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:06.481726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:06.481793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:06.481860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:06.481910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:06.482416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:06.482491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:06.482546Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:06.482825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:06.482902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.482954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.483060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:06.483150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:06.483202Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:06.483284Z node 1 :TX_COLUM ... son=unexpected on destructor; 2026-01-07T17:51:51.681508Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:876:2834];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:51.682372Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-07T17:51:51.682898Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808302552:max} readable: {1767808302552:max} at tablet 9437184 2026-01-07T17:51:51.683040Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:51:51.683238Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302552:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:51.683304Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302552:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:51:51.683900Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302552:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:51:51.685174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302552:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:51:51.685859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808302552:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:884:2842];trace_detailed=; 2026-01-07T17:51:51.686222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:51:51.686378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:51:51.686573Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:51.686719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:51.686959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:51.687097Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:51.687192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:51.687366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:884:2842] finished for tablet 9437184 2026-01-07T17:51:51.687732Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:883:2841];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":47122021,"name":"_full_task","f":47122021,"d_finished":0,"c":0,"l":47123623,"d":1602},"events":[{"name":"bootstrap","f":47122207,"d_finished":768,"c":1,"l":47122975,"d":768},{"a":47123139,"name":"ack","f":47123139,"d_finished":0,"c":0,"l":47123623,"d":484},{"a":47123123,"name":"processing","f":47123123,"d_finished":0,"c":0,"l":47123623,"d":500},{"name":"ProduceResults","f":47122694,"d_finished":482,"c":2,"l":47123418,"d":482},{"a":47123422,"name":"Finish","f":47123422,"d_finished":0,"c":0,"l":47123623,"d":201}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:51.687822Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:883:2841];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:51.688129Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:883:2841];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":47122021,"name":"_full_task","f":47122021,"d_finished":0,"c":0,"l":47124075,"d":2054},"events":[{"name":"bootstrap","f":47122207,"d_finished":768,"c":1,"l":47122975,"d":768},{"a":47123139,"name":"ack","f":47123139,"d_finished":0,"c":0,"l":47124075,"d":936},{"a":47123123,"name":"processing","f":47123123,"d_finished":0,"c":0,"l":47124075,"d":952},{"name":"ProduceResults","f":47122694,"d_finished":482,"c":2,"l":47123418,"d":482},{"a":47123422,"name":"Finish","f":47123422,"d_finished":0,"c":0,"l":47124075,"d":653}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:884:2842]->[1:883:2841] 2026-01-07T17:51:51.688217Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:51.685146Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:51:51.688249Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:51.688351Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpRboYql::Select >> KqpRboPg::Bench_Select [GOOD] >> KqpRboPg::Bench_Filter >> KqpRboPg::ConstantFolding [GOOD] >> KqpRboPg::ExpressionSubquery >> KqpRboPg::Aggregation+ColumnStore >> KqpRboPg::TestCrossInnerJoin-ColumnStore [GOOD] >> KqpRboPg::UnionAll >> KqpRboYql::Select [GOOD] >> KqpRboYql::TPCH_YDB_PERF >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-07T17:51:18.788065Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:51:18.791358Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:51:18.791691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:18.810566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:18.810785Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:18.816817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:18.817005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:18.817217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:18.817335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:18.817434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:18.817532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:18.817632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:18.817717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:18.817780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:18.817839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:18.817926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:18.817997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:18.818088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:18.834663Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:51:18.837695Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:18.838263Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:18.838325Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:18.838489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:18.838617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:18.838677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:18.838706Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:18.838773Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:18.838816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:18.838844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:18.838861Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:18.838978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:18.839022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:18.839044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:18.839062Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:18.839127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:18.839168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:18.839215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:18.839234Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:18.839274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:18.839303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:18.839323Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:18.839353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:18.839375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:18.839433Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:18.839567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:18.839598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:18.839617Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:18.839697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:18.839727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:18.839763Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:18.839794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:18.839819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:18.839848Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:18.839876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:18.839906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... s;result=1;count=1000;finished=1; 2026-01-07T17:51:56.265508Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-07T17:51:56.265536Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-07T17:51:56.265721Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:56.265886Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:56.265922Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:51:56.266106Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-07T17:51:56.266176Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-07T17:51:56.266411Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2595];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-07T17:51:56.266564Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:56.266689Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:56.266821Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:56.266991Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:51:56.267147Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:56.267285Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:56.267528Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2596] finished for tablet 9437184 2026-01-07T17:51:56.267910Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2595];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.004},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.006}],"full":{"a":37841642,"name":"_full_task","f":37841642,"d_finished":0,"c":0,"l":37848350,"d":6708},"events":[{"name":"bootstrap","f":37841847,"d_finished":1031,"c":1,"l":37842878,"d":1031},{"a":37847743,"name":"ack","f":37846480,"d_finished":1159,"c":1,"l":37847639,"d":1766},{"a":37847732,"name":"processing","f":37843001,"d_finished":2771,"c":3,"l":37847642,"d":3389},{"name":"ProduceResults","f":37842487,"d_finished":2084,"c":6,"l":37848091,"d":2084},{"a":37848094,"name":"Finish","f":37848094,"d_finished":0,"c":0,"l":37848350,"d":256},{"name":"task_result","f":37843016,"d_finished":1565,"c":2,"l":37846328,"d":1565}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:56.267976Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2595];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:51:56.268385Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2595];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.004},{"events":["l_ProduceResults","f_Finish"],"t":0.006},{"events":["l_ack","l_processing","l_Finish"],"t":0.007}],"full":{"a":37841642,"name":"_full_task","f":37841642,"d_finished":0,"c":0,"l":37848785,"d":7143},"events":[{"name":"bootstrap","f":37841847,"d_finished":1031,"c":1,"l":37842878,"d":1031},{"a":37847743,"name":"ack","f":37846480,"d_finished":1159,"c":1,"l":37847639,"d":2201},{"a":37847732,"name":"processing","f":37843001,"d_finished":2771,"c":3,"l":37847642,"d":3824},{"name":"ProduceResults","f":37842487,"d_finished":2084,"c":6,"l":37848091,"d":2084},{"a":37848094,"name":"Finish","f":37848094,"d_finished":0,"c":0,"l":37848785,"d":691},{"name":"task_result","f":37843016,"d_finished":1565,"c":2,"l":37846328,"d":1565}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:51:56.268465Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:51:56.259452Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-07T17:51:56.268500Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:51:56.268679Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpRboYql::OlapPredicatePushdown [GOOD] >> KqpRboYql::OrderBy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808867.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167808867.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808867.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147808867.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807667.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147807667.000000s;Name=;Codec=}; 2026-01-07T17:51:09.372207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:09.390257Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:09.390442Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:09.396366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:09.396531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:09.396699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:09.396775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:09.396852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:09.396920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:09.396995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:09.397059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:09.397160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:09.397278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.397372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:09.397465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:09.397538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:09.416605Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:09.417179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:09.417229Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:09.417382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:09.417526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:09.417590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:09.417633Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:09.417703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:09.417750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:09.417784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:09.417807Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:09.417941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:09.417987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:09.418040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:09.418063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:09.418136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:09.418181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:09.418233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:09.418254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:09.418290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:09.418316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:09.418337Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:09.418369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:09.418393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:09.418412Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:09.418580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:09.418620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:09.418643Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:09.418752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:09.418783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.418804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.418842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:09.418869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:09.418890Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:09.418920Z node 1 :TX_COLUM ... shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=6; 2026-01-07T17:51:56.659303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=66; 2026-01-07T17:51:56.659329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1851; 2026-01-07T17:51:56.659361Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1952; 2026-01-07T17:51:56.659404Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2026-01-07T17:51:56.659446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=19; 2026-01-07T17:51:56.659471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2329; 2026-01-07T17:51:56.659551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=42; 2026-01-07T17:51:56.659628Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=39; 2026-01-07T17:51:56.659726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2026-01-07T17:51:56.659790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2026-01-07T17:51:56.660896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1074; 2026-01-07T17:51:56.661970Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=993; 2026-01-07T17:51:56.662013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2026-01-07T17:51:56.662041Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2026-01-07T17:51:56.662064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2026-01-07T17:51:56.662123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2026-01-07T17:51:56.662153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-07T17:51:56.662209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=38; 2026-01-07T17:51:56.662244Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-07T17:51:56.662285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2026-01-07T17:51:56.662336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=30; 2026-01-07T17:51:56.662523Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=141; 2026-01-07T17:51:56.662548Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11151; 2026-01-07T17:51:56.662636Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:51:56.662709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:56.662741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:56.662784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:56.668179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:51:56.668269Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:56.668349Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:51:56.668400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808006913;tx_id=18446744073709551615;;current_snapshot_ts=1767808294800; 2026-01-07T17:51:56.668428Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:56.668458Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:56.668484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:56.668541Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:56.668683Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.118000s; 2026-01-07T17:51:56.670015Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:56.670097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:56.670125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:56.670208Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:51:56.670261Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808006913;tx_id=18446744073709551615;;current_snapshot_ts=1767808294800; 2026-01-07T17:51:56.670294Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:56.670325Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:56.670355Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:56.670426Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:56.670672Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.185000s; 2026-01-07T17:51:56.670697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpRboPg::ExpressionSubquery [GOOD] >> KqpRboPg::FallbackToYqlEnabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808867.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167808867.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808867.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147808867.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807667.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147807667.000000s;Name=;Codec=}; 2026-01-07T17:51:09.259108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:09.280432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:09.280671Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:09.287418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:09.287646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:09.287821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:09.287903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:09.287988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:09.288069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:09.288132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:09.288200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:09.288285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:09.288362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.288423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:09.288482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:09.288546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:09.307487Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:09.308002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:09.308064Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:09.308200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:09.308304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:09.308360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:09.308398Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:09.308471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:09.308525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:09.308564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:09.308596Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:09.308748Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:09.308798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:09.308840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:09.308868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:09.308971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:09.309019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:09.309069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:09.309093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:09.309129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:09.309155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:09.309173Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:09.309211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:09.309246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:09.309265Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:09.309416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:09.309462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:09.309489Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:09.309576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:09.309621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.309651Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.309688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:09.309726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:09.309747Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:09.309783Z node 1 :TX_COLUM ... rd;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-07T17:51:56.634058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=59; 2026-01-07T17:51:56.634083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2082; 2026-01-07T17:51:56.634112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2153; 2026-01-07T17:51:56.634145Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=6; 2026-01-07T17:51:56.634197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=24; 2026-01-07T17:51:56.634235Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2539; 2026-01-07T17:51:56.634318Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=47; 2026-01-07T17:51:56.634395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=50; 2026-01-07T17:51:56.634484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=50; 2026-01-07T17:51:56.634566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=47; 2026-01-07T17:51:56.635638Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1043; 2026-01-07T17:51:56.636701Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1023; 2026-01-07T17:51:56.636743Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2026-01-07T17:51:56.636771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2026-01-07T17:51:56.636793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2026-01-07T17:51:56.636832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=23; 2026-01-07T17:51:56.636855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-07T17:51:56.636908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=30; 2026-01-07T17:51:56.636961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=22; 2026-01-07T17:51:56.637026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-07T17:51:56.637079Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=31; 2026-01-07T17:51:56.637238Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=130; 2026-01-07T17:51:56.637264Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11462; 2026-01-07T17:51:56.637348Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:51:56.637426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:51:56.637475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:51:56.637530Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:51:56.643207Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:51:56.643308Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:56.643377Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:51:56.643432Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808006799;tx_id=18446744073709551615;;current_snapshot_ts=1767808294686; 2026-01-07T17:51:56.643469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:56.643506Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:56.643538Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:56.643604Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:56.643738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.076000s; 2026-01-07T17:51:56.645091Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:51:56.645176Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:51:56.645206Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:51:56.645272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:51:56.645319Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808006799;tx_id=18446744073709551615;;current_snapshot_ts=1767808294686; 2026-01-07T17:51:56.645347Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:51:56.645377Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:56.645404Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:51:56.645463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:51:56.645825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.055000s; 2026-01-07T17:51:56.645856Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpRboPg::UnionAll [GOOD] >> KqpRboYql::Aggregation+ColumnStore >> KqpRboPg::Bench_Filter [GOOD] >> KqpRboPg::Bench_CrossFilter >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::Validation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] Test command err: Trying to start YDB, gRPC: 21509, MsgBus: 3918 2026-01-07T17:49:26.383358Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678262235582425:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:26.387946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:26.841037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:26.895443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:26.895597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:26.938697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:26.979501Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:26.984768Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678262235582322:2081] 1767808166375399 != 1767808166375402 2026-01-07T17:49:27.089170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:27.170273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:27.170292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:27.170302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:27.170360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:27.383599Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:27.889803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:27.896445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:29.679024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275120485109:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.679025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275120485100:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.679123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.679347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678275120485115:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.679408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:29.682831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:29.694576Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678275120485114:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:29.811425Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678275120485167:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:30.112264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:30.236734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.303727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:31.381193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678262235582425:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:31.381264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:49:33.223431Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Read conflict with concurrent transaction.;tx_id=3; 2026-01-07T17:49:33.233974Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-07T17:49:33.234142Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-07T17:49:33.234309Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7592678292300362682:2951], Table: `/Root/Test` ([72057594046644480:42:1]), SessionActorId: [1:7592678288005395119:2951]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7592678292300362682:2951].{
: Error: Read conflict with concurrent transaction., code: 2001 } 2026-01-07T17:49:33.234828Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7592678292300362676:2951], SessionActorId: [1:7592678288005395119:2951], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Read conflict with concurrent transaction., code: 2001 . sessionActorId=[1:7592678288005395119:2951]. 2026-01-07T17:49:33.235100Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MmI0NmRmNzMtYWY5OWU4N2EtNmUwYzUwNDEtNDNmMTMyZDk=, ActorId: [1:7592678288005395119:2951], ActorState: ExecuteState, LegacyTraceId: 01kecs8135b2ywqx2p4zyqkcss, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7592678292300362677:2951] from: [1:7592678292300362676:2951] trace_id# 2026-01-07T17:49:33.235224Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592678292300362677:2951] TxId: 281474976710666. Ctx: { TraceId: 01kecs8135b2ywqx2p4zyqkcss, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmI0NmRmNzMtYWY5OWU4N2EtNmUwYzUwNDEtNDNmMTMyZDk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Read conflict with concurrent transaction., code: 2001 } } trace_id# 2026-01-07T17:49:33.235481Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MmI0NmRmNzMtYWY5OWU4N2EtNmUwYzUwNDEtNDNmMTMyZDk=, ActorId: [1:7592678288005395119:2951], ActorState: ExecuteState, LegacyTraceId: 01kecs8135b2ywqx2p4zyqkcss, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 23344, MsgBus: 30030 2026-01-07T17:49:34.533984Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678293124673256:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:34.534021Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:34.570468Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:34.646723Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:34.684525Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:34.684605Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:34.689893Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:34.809912Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:34.861483Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, wi ... 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.883603Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.890508Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.890562Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.890573Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.890606Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.890664Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.890692Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.897784Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.897838Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.897851Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.899920Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.899983Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.900001Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.904766Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.904811Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.904824Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.909075Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.909135Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.909151Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038042;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.911676Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.911720Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.911732Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.918149Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.918197Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.918208Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.918740Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.918782Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.918794Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.925258Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.925315Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.925330Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.925596Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.925637Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.925650Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.932281Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.932286Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.932319Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.932329Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.932332Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.932343Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.939538Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.939608Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.939621Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.940364Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.940405Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:51:54.940416Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] >> KqpRboPg::Bench_CrossFilter [GOOD] >> KqpRboPg::Bench_JoinFilter |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> VDiskIOTest::HugeBlobIOCount >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808861.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808861.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808861.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808861.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808861.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808861.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807661.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808861.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147808861.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807661.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807661.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147807661.000000s;Name=;Codec=}; 2026-01-07T17:51:02.093760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:02.116947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:02.117180Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:02.123984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:02.124221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:02.124429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:02.124567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:02.124686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:02.124782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:02.124851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:02.124970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:02.125088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:02.125199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.125283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:02.125385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:02.125492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:02.148901Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:02.149598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:02.149678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:02.149858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:02.150023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:02.150110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:02.150168Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:02.150252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:02.150342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:02.150407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:02.150451Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:02.150651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:02.150732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:02.150787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:02.150824Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:02.150930Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:02.151037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:02.151097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:02.151131Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:02.151203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:02.151255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:02.151288Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:02.151345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:02.151393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:02.151427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:02.151622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:02.151684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:02.151718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:02.151838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:02.151881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.151913Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:02.151966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:02.152017Z node 1 :TX_COLUMNSHARD WARN: l ... hing=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:52:00.910394Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 9 at tablet 9437184 2026-01-07T17:52:00.910592Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808310252:max} readable: {1767808310252:max} at tablet 9437184 2026-01-07T17:52:00.910694Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:52:00.910821Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808310252:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:00.910880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808310252:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:00.911202Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808310252:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:52:00.912374Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808310252:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:52:00.913118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808310252:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:963:2908];trace_detailed=; 2026-01-07T17:52:00.913468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:52:00.913641Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:52:00.913826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:00.913993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:00.914211Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:52:00.914335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:00.914440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:00.914606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:963:2908] finished for tablet 9437184 2026-01-07T17:52:00.915044Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:962:2907];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":60934993,"name":"_full_task","f":60934993,"d_finished":0,"c":0,"l":60936593,"d":1600},"events":[{"name":"bootstrap","f":60935205,"d_finished":753,"c":1,"l":60935958,"d":753},{"a":60936132,"name":"ack","f":60936132,"d_finished":0,"c":0,"l":60936593,"d":461},{"a":60936117,"name":"processing","f":60936117,"d_finished":0,"c":0,"l":60936593,"d":476},{"name":"ProduceResults","f":60935675,"d_finished":498,"c":2,"l":60936397,"d":498},{"a":60936405,"name":"Finish","f":60936405,"d_finished":0,"c":0,"l":60936593,"d":188}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:00.915127Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:962:2907];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:52:00.915440Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:962:2907];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":60934993,"name":"_full_task","f":60934993,"d_finished":0,"c":0,"l":60937110,"d":2117},"events":[{"name":"bootstrap","f":60935205,"d_finished":753,"c":1,"l":60935958,"d":753},{"a":60936132,"name":"ack","f":60936132,"d_finished":0,"c":0,"l":60937110,"d":978},{"a":60936117,"name":"processing","f":60936117,"d_finished":0,"c":0,"l":60937110,"d":993},{"name":"ProduceResults","f":60935675,"d_finished":498,"c":2,"l":60936397,"d":498},{"a":60936405,"name":"Finish","f":60936405,"d_finished":0,"c":0,"l":60937110,"d":705}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:963:2908]->[1:962:2907] 2026-01-07T17:52:00.915518Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:52:00.912347Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:52:00.915563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:52:00.915647Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] >> KqpRboPg::FallbackToYqlEnabled [GOOD] >> KqpRboPg::FallbackToYqlDisabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808874.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167808874.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808874.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147808874.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807674.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147807674.000000s;Name=;Codec=}; 2026-01-07T17:51:16.054129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:16.074180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:16.074368Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:16.079901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:16.080074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:16.080232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:16.080320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:16.080394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:16.080465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:16.080539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:16.080617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:16.080706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:16.080796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:16.080886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:16.080984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:16.081066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:16.099833Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:16.100298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:16.100346Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:16.100468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:16.100590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:16.100653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:16.100681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:16.100736Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:16.100787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:16.100822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:16.100845Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:16.100976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:16.101024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:16.101049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:16.101067Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:16.101122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:16.101167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:16.101201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:16.101227Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:16.101277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:16.101302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:16.101320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:16.101346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:16.101370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:16.101405Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:16.101533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:16.101567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:16.101585Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:16.101654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:16.101678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:16.101696Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:16.101727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:16.101749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:16.101768Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:16.101794Z node 1 :TX_COLUM ... ;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:52:01.567406Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:876:2834];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:52:01.568051Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-07T17:52:01.568240Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808312212:max} readable: {1767808312212:max} at tablet 9437184 2026-01-07T17:52:01.568331Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:52:01.568478Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312212:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:01.568533Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312212:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:01.568886Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312212:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:52:01.570044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312212:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:52:01.570700Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312212:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:884:2842];trace_detailed=; 2026-01-07T17:52:01.571031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:52:01.571167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:52:01.571348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.571479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.571640Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:52:01.571738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.571846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.572038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:884:2842] finished for tablet 9437184 2026-01-07T17:52:01.572400Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:883:2841];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":46915424,"name":"_full_task","f":46915424,"d_finished":0,"c":0,"l":46916877,"d":1453},"events":[{"name":"bootstrap","f":46915605,"d_finished":680,"c":1,"l":46916285,"d":680},{"a":46916408,"name":"ack","f":46916408,"d_finished":0,"c":0,"l":46916877,"d":469},{"a":46916396,"name":"processing","f":46916396,"d_finished":0,"c":0,"l":46916877,"d":481},{"name":"ProduceResults","f":46916042,"d_finished":447,"c":2,"l":46916644,"d":447},{"a":46916648,"name":"Finish","f":46916648,"d_finished":0,"c":0,"l":46916877,"d":229}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.572462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:883:2841];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:52:01.572738Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:883:2841];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":46915424,"name":"_full_task","f":46915424,"d_finished":0,"c":0,"l":46917276,"d":1852},"events":[{"name":"bootstrap","f":46915605,"d_finished":680,"c":1,"l":46916285,"d":680},{"a":46916408,"name":"ack","f":46916408,"d_finished":0,"c":0,"l":46917276,"d":868},{"a":46916396,"name":"processing","f":46916396,"d_finished":0,"c":0,"l":46917276,"d":880},{"name":"ProduceResults","f":46916042,"d_finished":447,"c":2,"l":46916644,"d":447},{"a":46916648,"name":"Finish","f":46916648,"d_finished":0,"c":0,"l":46917276,"d":628}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:884:2842]->[1:883:2841] 2026-01-07T17:52:01.572831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:52:01.570025Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:52:01.572881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:52:01.573000Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=167808863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808863.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808863.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807663.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147808863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807663.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807663.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147807663.000000s;Name=;Codec=}; 2026-01-07T17:51:04.183521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:04.206444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:04.206669Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:04.214180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:04.214447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:04.214743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:04.214917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:04.215042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:04.215204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:04.215329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:04.215450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:04.215581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:04.215721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:04.215849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:04.215953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:04.216075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:04.246286Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:04.246890Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:04.246961Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:04.247170Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:04.247359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:04.247471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:04.247526Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:04.247630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:04.247720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:04.247770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:04.247814Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:04.247998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:04.248076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:04.248126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:04.248170Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:04.248292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:04.248366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:04.248404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:04.248429Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:04.248471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:04.248501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:04.248522Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:04.248552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:04.248580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:04.248602Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:04.248748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:04.248805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:04.248844Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:04.248995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:04.249069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:04.249106Z nod ... te::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:01.743756Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:01.743893Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808312512:max} readable: {1767808312512:max} at tablet 9437184 2026-01-07T17:52:01.743983Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:52:01.744155Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312512:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:01.744201Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312512:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:01.744583Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312512:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:52:01.745766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312512:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:52:01.746437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808312512:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1451:3396];trace_detailed=; 2026-01-07T17:52:01.746787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:52:01.746939Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:52:01.747115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.747233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.747514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:52:01.747624Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.747717Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.747881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1451:3396] finished for tablet 9437184 2026-01-07T17:52:01.748223Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1450:3395];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":61307372,"name":"_full_task","f":61307372,"d_finished":0,"c":0,"l":61308924,"d":1552},"events":[{"name":"bootstrap","f":61307556,"d_finished":701,"c":1,"l":61308257,"d":701},{"a":61308496,"name":"ack","f":61308496,"d_finished":0,"c":0,"l":61308924,"d":428},{"a":61308478,"name":"processing","f":61308478,"d_finished":0,"c":0,"l":61308924,"d":446},{"name":"ProduceResults","f":61308033,"d_finished":421,"c":2,"l":61308737,"d":421},{"a":61308741,"name":"Finish","f":61308741,"d_finished":0,"c":0,"l":61308924,"d":183}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:01.748297Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1450:3395];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:52:01.748626Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1450:3395];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":61307372,"name":"_full_task","f":61307372,"d_finished":0,"c":0,"l":61309343,"d":1971},"events":[{"name":"bootstrap","f":61307556,"d_finished":701,"c":1,"l":61308257,"d":701},{"a":61308496,"name":"ack","f":61308496,"d_finished":0,"c":0,"l":61309343,"d":847},{"a":61308478,"name":"processing","f":61308478,"d_finished":0,"c":0,"l":61309343,"d":865},{"name":"ProduceResults","f":61308033,"d_finished":421,"c":2,"l":61308737,"d":421},{"a":61308741,"name":"Finish","f":61308741,"d_finished":0,"c":0,"l":61309343,"d":602}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1451:3396]->[1:1450:3395] 2026-01-07T17:52:01.748703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:52:01.745744Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:52:01.748734Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:52:01.748830Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpRboYql::OrderBy [GOOD] >> KqpRboYql::LeftJoins >> KqpRboPg::Bench_JoinFilter [GOOD] >> TColumnShardTestSchema::HotTiers [GOOD] >> KqpRboPg::FallbackToYqlDisabled [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::SimpleDataHandlers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808866.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808866.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808866.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808866.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808866.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808866.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807666.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808866.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147808866.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807666.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807666.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147807666.000000s;Name=;Codec=}; 2026-01-07T17:51:06.738099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:06.766151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:06.766381Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:06.772773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:06.773046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:06.773269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:06.773374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:06.773482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:06.773591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:06.773678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:06.773785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:06.773890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:06.773984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.774072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:06.774168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:06.774289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:06.794216Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:06.794884Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:06.794955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:06.795138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:06.795297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:06.795376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:06.795438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:06.795544Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:06.795618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:06.795668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:06.795703Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:06.795858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:06.795927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:06.795972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:06.796000Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:06.796082Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:06.796154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:06.796204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:06.796239Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:06.796289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:06.796321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:06.796349Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:06.796392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:06.796427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:06.796454Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:06.796620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:06.796666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:06.796694Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:06.796780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:06.796810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.796 ... e::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:05.852862Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:05.852996Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808315064:max} readable: {1767808315064:max} at tablet 9437184 2026-01-07T17:52:05.853085Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:52:05.853214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808315064:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:05.853272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808315064:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:05.853625Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808315064:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:52:05.854646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808315064:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:52:05.855222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808315064:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1457:3401];trace_detailed=; 2026-01-07T17:52:05.855522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:52:05.855672Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:52:05.855819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:05.855936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:05.856192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:52:05.856288Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:05.856361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:05.856535Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1457:3401] finished for tablet 9437184 2026-01-07T17:52:05.856819Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1456:3400];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":62293796,"name":"_full_task","f":62293796,"d_finished":0,"c":0,"l":62295188,"d":1392},"events":[{"name":"bootstrap","f":62293974,"d_finished":620,"c":1,"l":62294594,"d":620},{"a":62294793,"name":"ack","f":62294793,"d_finished":0,"c":0,"l":62295188,"d":395},{"a":62294780,"name":"processing","f":62294780,"d_finished":0,"c":0,"l":62295188,"d":408},{"name":"ProduceResults","f":62294362,"d_finished":389,"c":2,"l":62294988,"d":389},{"a":62295001,"name":"Finish","f":62295001,"d_finished":0,"c":0,"l":62295188,"d":187}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:05.856870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1456:3400];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:52:05.857245Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1456:3400];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":62293796,"name":"_full_task","f":62293796,"d_finished":0,"c":0,"l":62295523,"d":1727},"events":[{"name":"bootstrap","f":62293974,"d_finished":620,"c":1,"l":62294594,"d":620},{"a":62294793,"name":"ack","f":62294793,"d_finished":0,"c":0,"l":62295523,"d":730},{"a":62294780,"name":"processing","f":62294780,"d_finished":0,"c":0,"l":62295523,"d":743},{"name":"ProduceResults","f":62294362,"d_finished":389,"c":2,"l":62294988,"d":389},{"a":62295001,"name":"Finish","f":62295001,"d_finished":0,"c":0,"l":62295523,"d":522}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1457:3401]->[1:1456:3400] 2026-01-07T17:52:05.857312Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:52:05.854623Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:52:05.857341Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:52:05.857428Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::Bench_JoinFilter [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: Trying to start YDB, gRPC: 11320, MsgBus: 16801 2026-01-07T17:51:51.268986Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678884689860564:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:51.269086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:51.492478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:51.600196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:51.600364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:51.600612Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678884689860527:2081] 1767808311265878 != 1767808311265881 2026-01-07T17:51:51.604392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:51.611661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:51.773583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:51.822087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:51.822112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:51.822125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:51.822203Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:52.276431Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:52.334978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:53.532668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678893279796006:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.532668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678893279796011:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.532743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.532946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678893279796021:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.533008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.539638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:51:53.547281Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678893279796020:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:51:53.658881Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678893279796073:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64219, MsgBus: 64695 2026-01-07T17:51:54.811612Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678895969742614:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:54.811671Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:54.819638Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:54.866474Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:54.867368Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678895969742586:2081] 1767808314810917 != 1767808314810920 2026-01-07T17:51:54.920322Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:54.920411Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:54.921859Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:54.926733Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:54.926753Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:54.926758Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:54.926821Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:55.116284Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:55.185428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:55.815850Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:56.710812Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678904559678053:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.710873Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.711022Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678904559678062:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.711050Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.871464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:56.970894Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678904559678159:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.970988Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.971016Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678904559678164:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.971142Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678904559678166:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.971192Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.973747Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:51:56.980980Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPo ... OR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:00.322896Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678923214519155:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.323002Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.323263Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678923214519165:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.323329Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.342732Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:00.370082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:00.411177Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678923214519331:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.411247Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.411260Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678923214519336:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.411403Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678923214519338:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.411463Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:00.414546Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:00.422277Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678923214519339:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:52:00.523654Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678923214519391:2637] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11509, MsgBus: 24337 2026-01-07T17:52:01.767255Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678925579634470:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:01.767340Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:01.777465Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:01.841104Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:01.875447Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:01.875534Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:01.877043Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:01.910949Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:01.910977Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:01.910986Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:01.911087Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:02.013403Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:02.274154Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:02.771385Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:04.314150Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678938464537206:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.314224Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.314442Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678938464537216:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.314492Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.331864Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:04.365652Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:04.470424Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678938464537380:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.470503Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.470507Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678938464537385:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.470669Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678938464537388:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.470712Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.473484Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:04.480627Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678938464537387:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:52:04.539373Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678938464537440:2637] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::FallbackToYqlDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 23858, MsgBus: 24498 2026-01-07T17:51:51.269325Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678883158029234:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:51.269393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:51.503903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:51.542082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:51.542212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:51.558828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:51.609764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:51.610729Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678883158029197:2081] 1767808311265865 != 1767808311265868 2026-01-07T17:51:51.718041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:51.822019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:51.822056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:51.822074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:51.822148Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:52.277641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:52.332514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:53.483324Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678891747964673:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.483450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.483787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678891747964682:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.483854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.878071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:54.023808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678896042932081:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.023866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.023874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678896042932086:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.024012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678896042932088:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.024059Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.027026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:51:54.035339Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678896042932089:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:51:54.116190Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678896042932141:2597] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9114, MsgBus: 15284 2026-01-07T17:51:54.811755Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678895372518942:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:54.811793Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:54.820590Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:54.872444Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:54.873909Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678895372518899:2081] 1767808314810803 != 1767808314810806 2026-01-07T17:51:54.918964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:54.919038Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:54.935936Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:54.935962Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:54.935967Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:54.936036Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:54.937958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:55.053202Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:55.232407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:55.815905Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:56.975090Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678903962454369:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.975151Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.975296Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678903962454378:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.975322Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.999134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:57.022570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:57.075625Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_act ... urce pool default not found or you don't have access permissions } 2026-01-07T17:51:59.937874Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:59.938050Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678918793959087:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:59.938093Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678918793959086:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:59.938159Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:59.940406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:51:59.947284Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678918793959090:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:52:00.035068Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678923088926437:2587] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:00.097845Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with new RBO failed, retrying with YQL optimizer, self: [3:7592678923088926453:2345], database: /Root, text: "\n --!syntax_pg\n SET TablePathPrefix = \"/Root/\";\n select sum(distinct t1.c), sum(distinct t1.a) from t1 group by t1.b order by t1.b;\n " 2026-01-07T17:52:01.912292Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with new RBO failed, retrying with YQL optimizer, self: [3:7592678927383893800:2367], database: /Root, text: "\n INSERT INTO `/Root/t1` (a, b, c) VALUES (1, 2, 3);\n " Trying to start YDB, gRPC: 1660, MsgBus: 4018 2026-01-07T17:52:02.713329Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678929553993674:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:02.713376Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:02.728064Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:02.785697Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:02.785715Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678929553993647:2081] 1767808322712988 != 1767808322712991 2026-01-07T17:52:02.822617Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:02.822692Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:02.833400Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:02.853142Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:02.853177Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:02.853185Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:02.853268Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:03.026679Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:03.215277Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:03.719152Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:05.183287Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678942438896416:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.183405Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.183644Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678942438896426:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.183709Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.201146Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:05.317971Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678942438896522:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.318022Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678942438896527:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.318066Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.318183Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592678942438896530:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.318216Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:05.320479Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:05.327269Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592678942438896529:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:52:05.405324Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592678942438896582:2591] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:05.454814Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7592678942438896598:2345], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:4:1: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:1110: Multiple distinct is not supported, code: 1 2026-01-07T17:52:05.455224Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=ZDQxNjQ3MWYtN2RhMzRhN2MtYzQxMmQ2ZmItNzM0MTYwNjI=, ActorId: [4:7592678942438896519:2334], ActorState: ExecuteState, LegacyTraceId: 01kecscnp55q7tb5gb2wet724a, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 4 column: 1 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:1110: Multiple distinct is not supported" end_position { row: 4 column: 1 } issue_code: 1 } } tx_id# trace_id# 2026-01-07T17:52:05.500798Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7592678942438896616:2355], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/core/expr_nodes_gen/yql_expr_nodes_gen.h:294 TCallable(): requirement Match(node.Get()) failed, code: 1 2026-01-07T17:52:05.501094Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=YTVlYmJhNWItN2Q0ZGE3MjctZjY1ZjJhM2EtNWQ2ZGIzZWQ=, ActorId: [4:7592678942438896608:2350], ActorState: ExecuteState, LegacyTraceId: 01kecscnv2enr3c8ac62ep8eb6, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/core/expr_nodes_gen/yql_expr_nodes_gen.h:294 TCallable(): requirement Match(node.Get()) failed" issue_code: 1 } } tx_id# trace_id# |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2026-01-07T17:52:06.679076Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.679136Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.679172Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:52:06.681773Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:52:06.681819Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.681855Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.682745Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009518s 2026-01-07T17:52:06.683171Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:52:06.686142Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:52:06.686223Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.688154Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.688170Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.688188Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:52:06.688444Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-07T17:52:06.688473Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.688496Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.688572Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006223s 2026-01-07T17:52:06.688881Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:52:06.689190Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:52:06.689259Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.689756Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.689769Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.689778Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:52:06.690052Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-07T17:52:06.690080Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.690093Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.690134Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.183918s 2026-01-07T17:52:06.690457Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:52:06.690733Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:52:06.690788Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.691205Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.691215Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.691222Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:52:06.691471Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-07T17:52:06.691501Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.691513Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.691549Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.177920s 2026-01-07T17:52:06.691928Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:52:06.692319Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-07T17:52:06.692380Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.692972Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.692990Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.693002Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:52:06.693228Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:52:06.693616Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:52:06.705168Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.705388Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2026-01-07T17:52:06.705424Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.705437Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.705492Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.289234s 2026-01-07T17:52:06.705793Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-07T17:52:06.706648Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.706671Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.706686Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:52:06.706974Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:52:06.707291Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:52:06.707421Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.708613Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:52:06.713336Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:52:06.713759Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-07T17:52:06.713957Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-07T17:52:06.714026Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-07T17:52:06.714115Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:52:06.714142Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-07T17:52:06.714179Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:52:06.714206Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-07T17:52:06.715083Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.715110Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.715127Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:52:06.715411Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:52:06.715873Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:52:06.715993Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.716328Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:52:06.817015Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.817310Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:52:06.817415Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:52:06.817481Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-07T17:52:06.817565Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-07T17:52:06.917873Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:52:06.918265Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-07T17:52:06.919148Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.919164Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.919179Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-07T17:52:06.919456Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-07T17:52:06.919815Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-07T17:52:06.919963Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:06.920308Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-07T17:52:07.021216Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T17:52:07.021396Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-07T17:52:07.021445Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-07T17:52:07.021472Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-07T17:52:07.021551Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-07T17:52:07.021646Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-07T17:52:07.021704Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-07T17:52:07.021766Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-07T17:52:07.021871Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |95.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> TColumnShardTestSchema::RebootColdTiers [GOOD] >> KqpEffects::EmptyUpdate+UseSink >> KqpEffects::InsertAbort_Literal_Success >> KqpEffects::DeletePkPrefixWithIndex >> KqpImmediateEffects::UpsertDuplicates >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink >> KqpEffects::UpdateOn_Literal >> KqpInplaceUpdate::SingleRowArithm+UseSink >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808864.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808864.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808864.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808864.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808864.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808864.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807664.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808864.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147808864.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807664.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807664.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147807664.000000s;Name=;Codec=}; 2026-01-07T17:51:05.293825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:05.321465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:05.321694Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:05.328061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:05.328259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:05.328478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:05.328583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:05.328676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:05.328777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:05.328860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:05.328974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:05.329076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:05.329189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.329271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:05.329353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:05.329444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:05.355417Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:05.355975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:05.356038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:05.356202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:05.356335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:05.356404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:05.356443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:05.356527Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:05.356613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:05.356684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:05.356714Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:05.356868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:05.356918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:05.356974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:05.357000Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:05.357084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:05.357187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:05.357237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:05.357267Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:05.357326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:05.357362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:05.357392Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:05.357447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:05.357488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:05.357518Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:05.357704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:05.357770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:05.357807Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:05.357922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:05.357980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.358008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:05.358075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:05.358118Z node 1 :TX_COLUMNSHARD WARN: l ... ime=7; 2026-01-07T17:52:07.782592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=58; 2026-01-07T17:52:07.782615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2304; 2026-01-07T17:52:07.782647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2383; 2026-01-07T17:52:07.782696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=6; 2026-01-07T17:52:07.782755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=19; 2026-01-07T17:52:07.782781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2833; 2026-01-07T17:52:07.782864Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=49; 2026-01-07T17:52:07.782950Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=48; 2026-01-07T17:52:07.783049Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=62; 2026-01-07T17:52:07.783121Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=45; 2026-01-07T17:52:07.784340Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1185; 2026-01-07T17:52:07.785520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1128; 2026-01-07T17:52:07.785584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-07T17:52:07.785626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-07T17:52:07.785653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-07T17:52:07.785699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=27; 2026-01-07T17:52:07.785725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-07T17:52:07.785791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=35; 2026-01-07T17:52:07.785820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2026-01-07T17:52:07.785865Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2026-01-07T17:52:07.785921Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=32; 2026-01-07T17:52:07.786138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=189; 2026-01-07T17:52:07.786162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12666; 2026-01-07T17:52:07.786260Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:52:07.786375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:52:07.786426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:52:07.786469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:52:07.798675Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:52:07.798793Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:07.798865Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:52:07.798912Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808014936;tx_id=18446744073709551615;;current_snapshot_ts=1767808302823; 2026-01-07T17:52:07.798948Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:07.798979Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:07.799036Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:07.799130Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:07.799287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.170000s; 2026-01-07T17:52:07.800285Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:52:07.801237Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:52:07.801291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:07.801370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:52:07.801418Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808014936;tx_id=18446744073709551615;;current_snapshot_ts=1767808302823; 2026-01-07T17:52:07.801449Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:07.801487Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:07.801524Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:07.801591Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:07.801815Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.044000s; 2026-01-07T17:52:07.801840Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpEffects::UpdateOn_Select >> KqpImmediateEffects::Delete >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> TColumnShardTestSchema::RebootHotTiers [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> KqpImmediateEffects::UpdateAfterUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808863.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808863.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807663.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147808863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807663.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807663.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147807663.000000s;Name=;Codec=}; 2026-01-07T17:51:04.287070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:04.308449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:04.308669Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:04.314385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:04.314542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:04.314721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:04.314788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:04.314886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:04.314949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:04.315002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:04.315066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:04.315137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:04.315209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:04.315282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:04.315352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:04.315412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:04.340470Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:04.341112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:04.341177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:04.341348Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:04.341492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:04.341561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:04.341608Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:04.341676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:04.341718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:04.341746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:04.341774Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:04.341888Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:04.341927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:04.341952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:04.341982Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:04.342055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:04.342120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:04.342183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:04.342215Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:04.342264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:04.342298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:04.342325Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:04.342363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:04.342396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:04.342426Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:04.342603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:04.342676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:04.342721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:04.342827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:04.342869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:04.342896Z node 1 :TX_ ... init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=105; 2026-01-07T17:52:09.974808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8437; 2026-01-07T17:52:09.974854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8549; 2026-01-07T17:52:09.974916Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-07T17:52:09.974997Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=40; 2026-01-07T17:52:09.975034Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9158; 2026-01-07T17:52:09.975180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=93; 2026-01-07T17:52:09.975298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=68; 2026-01-07T17:52:09.975426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=86; 2026-01-07T17:52:09.975536Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=69; 2026-01-07T17:52:09.980850Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5241; 2026-01-07T17:52:09.985325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4331; 2026-01-07T17:52:09.985442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-07T17:52:09.985500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-01-07T17:52:09.985538Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:52:09.985607Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2026-01-07T17:52:09.985647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:52:09.985733Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2026-01-07T17:52:09.985774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:52:09.985839Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2026-01-07T17:52:09.985928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2026-01-07T17:52:09.986249Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=285; 2026-01-07T17:52:09.986289Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29305; 2026-01-07T17:52:09.986434Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:52:09.986548Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:52:09.986602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:52:09.986664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:52:10.005359Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:52:10.005526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:10.005620Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-07T17:52:10.005691Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808014114;tx_id=18446744073709551615;;current_snapshot_ts=1767808265593; 2026-01-07T17:52:10.005740Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:10.005792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.005831Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.005922Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:10.006146Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.122000s; 2026-01-07T17:52:10.006258Z node 1 :TX_TIERING WARN: log.cpp:841: TEST_STEP=4;fline=fetcher.h:165;error=event_undelivered_to_scheme_cache;reason=ActorUnknown; 2026-01-07T17:52:10.010176Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:52:10.010531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:52:10.010598Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:10.010712Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-07T17:52:10.010782Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808014114;tx_id=18446744073709551615;;current_snapshot_ts=1767808265593; 2026-01-07T17:52:10.010835Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:10.010888Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.010935Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.011035Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:10.011295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.142000s; 2026-01-07T17:52:10.011326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807671.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147808871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807671.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807671.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147807671.000000s;Name=;Codec=}; 2026-01-07T17:51:11.417026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:11.444327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:11.444546Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:11.451033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:11.451257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:11.451472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:11.451584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:11.451694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:11.451807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:11.451901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:11.452008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:11.452123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:11.452217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:11.452315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:11.452408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:11.452509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:11.479175Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:11.479837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:11.479906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:11.480077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:11.480223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:11.480297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:11.480345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:11.480436Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:11.480488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:11.480523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:11.480556Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:11.480694Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:11.480746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:11.480782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:11.480820Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:11.480910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:11.480982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:11.481021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:11.481049Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:11.481098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:11.481131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:11.481160Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:11.481197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:11.481231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:11.481276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:11.481444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:11.481493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:11.481520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:11.481624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:11.481658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:11.481683Z node 1 :TX_ ... hard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.763764Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:10.763976Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808319743:max} readable: {1767808319743:max} at tablet 9437184 2026-01-07T17:52:10.764119Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:52:10.764330Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808319743:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:10.764423Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808319743:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:10.765057Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808319743:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:52:10.766866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808319743:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:52:10.767895Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808319743:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1457:3401];trace_detailed=; 2026-01-07T17:52:10.768434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:52:10.768673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:52:10.768955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:10.769148Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:10.769576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:52:10.769732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:10.769863Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:10.770111Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1457:3401] finished for tablet 9437184 2026-01-07T17:52:10.770660Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1456:3400];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":62082744,"name":"_full_task","f":62082744,"d_finished":0,"c":0,"l":62085110,"d":2366},"events":[{"name":"bootstrap","f":62083037,"d_finished":1074,"c":1,"l":62084111,"d":1074},{"a":62084476,"name":"ack","f":62084476,"d_finished":0,"c":0,"l":62085110,"d":634},{"a":62084452,"name":"processing","f":62084452,"d_finished":0,"c":0,"l":62085110,"d":658},{"name":"ProduceResults","f":62083736,"d_finished":660,"c":2,"l":62084827,"d":660},{"a":62084834,"name":"Finish","f":62084834,"d_finished":0,"c":0,"l":62085110,"d":276}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:10.770752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1456:3400];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:52:10.771222Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1456:3400];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":62082744,"name":"_full_task","f":62082744,"d_finished":0,"c":0,"l":62085732,"d":2988},"events":[{"name":"bootstrap","f":62083037,"d_finished":1074,"c":1,"l":62084111,"d":1074},{"a":62084476,"name":"ack","f":62084476,"d_finished":0,"c":0,"l":62085732,"d":1256},{"a":62084452,"name":"processing","f":62084452,"d_finished":0,"c":0,"l":62085732,"d":1280},{"name":"ProduceResults","f":62083736,"d_finished":660,"c":2,"l":62084827,"d":660},{"a":62084834,"name":"Finish","f":62084834,"d_finished":0,"c":0,"l":62085732,"d":898}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1457:3401]->[1:1456:3400] 2026-01-07T17:52:10.771328Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:52:10.766834Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:52:10.771380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:52:10.771518Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808866.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808866.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808866.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808866.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808866.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808866.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807666.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808866.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147808866.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807666.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807666.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147807666.000000s;Name=;Codec=}; 2026-01-07T17:51:06.744694Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:06.776271Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:06.776520Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:06.784106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:06.784360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:06.784623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:06.784753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:06.784860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:06.785010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:06.785113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:06.785229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:06.785353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:06.785446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.785540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:06.785638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:06.785738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:06.814440Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:06.815217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:06.815313Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:06.815509Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:06.815692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:06.815781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:06.815851Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:06.815972Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:06.816065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:06.816120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:06.816161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:06.816341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:06.816406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:06.816452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:06.816484Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:06.816578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:06.816655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:06.816715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:06.816749Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:06.816814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:06.816859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:06.816890Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:06.816960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:06.817006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:06.817035Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:06.817241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:06.817300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:06.817337Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:06.817464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:06.817507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:06.817 ... ite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=72; 2026-01-07T17:52:10.906840Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=5113; 2026-01-07T17:52:10.906869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=5203; 2026-01-07T17:52:10.906908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2026-01-07T17:52:10.906960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=24; 2026-01-07T17:52:10.906982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5680; 2026-01-07T17:52:10.907065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=51; 2026-01-07T17:52:10.907143Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=49; 2026-01-07T17:52:10.907226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=53; 2026-01-07T17:52:10.907299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2026-01-07T17:52:10.909614Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2278; 2026-01-07T17:52:10.911947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2273; 2026-01-07T17:52:10.911999Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-07T17:52:10.912031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2026-01-07T17:52:10.912061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-07T17:52:10.912105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=23; 2026-01-07T17:52:10.912129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-07T17:52:10.912178Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=29; 2026-01-07T17:52:10.912204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:52:10.912243Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2026-01-07T17:52:10.912293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=28; 2026-01-07T17:52:10.912517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=199; 2026-01-07T17:52:10.912547Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17206; 2026-01-07T17:52:10.912646Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:52:10.912747Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:52:10.912790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:52:10.912839Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:52:10.924882Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:52:10.925043Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:10.925118Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-07T17:52:10.925168Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808016569;tx_id=18446744073709551615;;current_snapshot_ts=1767808268048; 2026-01-07T17:52:10.925198Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:10.925240Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.925269Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.925348Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:10.925526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.050000s; 2026-01-07T17:52:10.925606Z node 1 :TX_TIERING WARN: log.cpp:841: TEST_STEP=4;fline=fetcher.h:165;error=event_undelivered_to_scheme_cache;reason=ActorUnknown; 2026-01-07T17:52:10.926546Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:52:10.927200Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:52:10.927252Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:10.927335Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-07T17:52:10.927392Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808016569;tx_id=18446744073709551615;;current_snapshot_ts=1767808268048; 2026-01-07T17:52:10.927429Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:10.927460Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.927485Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:10.927551Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:10.927916Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.143000s; 2026-01-07T17:52:10.927947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 >> KqpEffects::InsertAbort_Params_Success >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> KqpEffects::EmptyUpdate+UseSink [GOOD] >> KqpEffects::EmptyUpdate-UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> KqpImmediateEffects::MultiShardUpsertAfterRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 9426, MsgBus: 25373 2026-01-07T17:49:33.564791Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678292723284824:2192];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:49:33.570120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:49:33.763393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:49:33.801710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:33.801901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:33.854737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:33.855516Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678292723284658:2081] 1767808173535649 != 1767808173535652 2026-01-07T17:49:33.862302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:33.989590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:33.989610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:33.989616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:33.989682Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:34.051300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:49:34.533530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:34.551554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:49:34.576040Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:36.694246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678305608187423:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.694353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.694636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678305608187451:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.694698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.695033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678305608187449:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:36.698828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:36.710865Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678305608187453:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:49:36.880646Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678305608187504:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:37.153508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:37.341970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:37.342250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:37.342468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:37.342572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:37.342643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:37.342714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:37.342789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:37.342868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:37.342966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:37.343058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:37.343124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:37.343211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:37.343296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7592678309903154936:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:37.353643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678309903154946:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:37.353712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678309903154946:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:37.353898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678309903154946:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:37.353990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678309903154946:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:37.354068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678309903154946:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:37.354145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678309903154946:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:37.354240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678309903154946:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:37.354321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7592678309903154946:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:37.354396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... ver.cpp:249:23 #13 0x00003ff53625 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #14 0x00003ff579f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #15 0x00004e62b368 in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #16 0x00001b4f9b79 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #17 0x00001b4f9b79 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #18 0x00001b6e66ff in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TTestCaseTPragmaSettingOltp::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:634:16 #19 0x00001b696ee7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #20 0x00001b696ee7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #21 0x00001b696ee7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #22 0x00001b696ee7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #23 0x00001b696ee7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #24 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #25 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #26 0x00001c064559 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #27 0x00001c0325c7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #28 0x00001b69623c in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #29 0x00001c033d7f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #30 0x00001c05e3bc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #31 0x7f472a50bd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b87bddd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021ce7779 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021ce7779 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021ce7779 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021ce7779 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021ce7779 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:354:25 #6 0x000021ce7779 in __push_back_slow_path /-S/contrib/libs/cxxsupp/libcxx/include/vector:1541:47 #7 0x000021ce7779 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/vector:1557:13 #8 0x000021ce7779 in grpc_core::Server::RegisterCompletionQueue(grpc_completion_queue*) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:794:8 #9 0x0000212fbfc7 in grpc::ServerBuilder::BuildAndStart() /-S/contrib/libs/grpc/src/cpp/server/server_builder.cc:396:5 #10 0x0000212eba79 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #11 0x00003ff53625 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #12 0x00003ff579f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #13 0x00004e62b368 in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #14 0x00001b4f9b79 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #15 0x00001b4f9b79 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #16 0x00001b6e66ff in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TTestCaseTPragmaSettingOltp::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:634:16 #17 0x00001b696ee7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #18 0x00001b696ee7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #19 0x00001b696ee7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #20 0x00001b696ee7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #21 0x00001b696ee7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #22 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #23 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #24 0x00001c064559 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #25 0x00001c0325c7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #26 0x00001b69623c in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #27 0x00001c033d7f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #28 0x00001c05e3bc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #29 0x7f472a50bd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b87bddd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021368121 in grpc_core::internal::StatusAllocHeapPtr(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/gprpp/status_helper.cc:427:25 #2 0x00002149d202 in grpc_core::CallCombiner::Cancel(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/call_combiner.cc:233:25 #3 0x0000214439fe in grpc_core::FilterStackCall::CancelWithError(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1037:18 #4 0x00002143f11c in grpc_core::Call::CancelWithStatus(grpc_status_code, char const*) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:366:3 #5 0x000021461223 in grpc_call_cancel_with_status /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:3499:30 #6 0x000021d40d46 in grpc::ServerContextBase::TryCancel() const /-S/contrib/libs/grpc/src/cpp/server/server_context.cc:347:7 #7 0x0000212e86dc in NYdbGrpc::TGrpcServiceProtectiable::StopService() /-S/ydb/library/grpc/server/grpc_server.cpp:64:26 #8 0x0000212eeeee in NYdbGrpc::TGRpcServer::Stop() /-S/ydb/library/grpc/server/grpc_server.cpp:278:18 #9 0x00001b39dfe6 in Shutdown /-S/ydb/core/testlib/test_client.h:443:33 #10 0x00001b39dfe6 in ShutdownGRpc /-S/ydb/core/testlib/test_client.h:395:30 #11 0x00001b39dfe6 in NKikimr::NKqp::TKikimrRunner::~TKikimrRunner() /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:178:17 #12 0x00001b6e673d in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:70:5 #13 0x00001b6e673d in reset /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:286:7 #14 0x00001b6e673d in ~unique_ptr /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:255:71 #15 0x00001b6e673d in ~TTableDataModificationTester /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:15:7 #16 0x00001b6e673d in ~TPragmaSetting /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:529:11 #17 0x00001b6e673d in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TTestCaseTPragmaSettingOltp::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:635:5 #18 0x00001b696ee7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #19 0x00001b696ee7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #20 0x00001b696ee7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #21 0x00001b696ee7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #22 0x00001b696ee7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #23 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #24 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #25 0x00001c064559 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #26 0x00001c0325c7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #27 0x00001b69623c in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #28 0x00001c033d7f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #29 0x00001c05e3bc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #30 0x7f472a50bd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) SUMMARY: AddressSanitizer: 428565 byte(s) leaked in 5232 allocation(s). |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpEffects::AlterDuringUpsertTransaction+UseSink >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> KqpEffects::UpdateOn_Literal [GOOD] >> KqpEffects::UpdateOn_Params >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> KqpRboPg::Aggregation+ColumnStore [GOOD] >> KqpRboPg::Aggregation-ColumnStore >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink >> KqpEffects::UpdateOn_Select [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> KqpFail::Immediate >> THiveTest::TestLockTabletExecutionLocalGone >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RR2 >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpImmediateEffects::UpdateAfterInsert >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=167808870.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808870.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808870.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808870.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808870.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808870.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808870.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807670.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808870.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147808870.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807670.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807670.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147807670.000000s;Name=;Codec=}; 2026-01-07T17:51:10.469437Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:10.497768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:10.497989Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:10.503811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:10.504013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:10.504203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:10.504282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:10.504356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:10.504439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:10.504504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:10.504576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:10.504651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:10.504721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:10.504788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:10.504873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:10.504969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:10.526540Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:10.527152Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:10.527252Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:10.527420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:10.527561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:10.527640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:10.527675Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:10.527978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:10.528036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:10.528067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:10.528093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:10.528234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:10.528277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:10.528305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:10.528332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:10.528405Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:10.528471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:10.528513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:10.528535Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:10.528575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:10.528603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:10.528633Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:10.528678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:10.528889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:10.528914Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:10.529111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:10.529169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:10.529203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:10.529289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:10.529322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:5 ... ite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=74; 2026-01-07T17:52:15.411059Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=4885; 2026-01-07T17:52:15.411088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=4962; 2026-01-07T17:52:15.411131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-07T17:52:15.411183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=24; 2026-01-07T17:52:15.411207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5331; 2026-01-07T17:52:15.411300Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=56; 2026-01-07T17:52:15.411387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=54; 2026-01-07T17:52:15.411466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=53; 2026-01-07T17:52:15.411529Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2026-01-07T17:52:15.414369Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2799; 2026-01-07T17:52:15.416814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2383; 2026-01-07T17:52:15.416873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2026-01-07T17:52:15.416906Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-01-07T17:52:15.416948Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=19; 2026-01-07T17:52:15.416993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=23; 2026-01-07T17:52:15.417018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-07T17:52:15.417087Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2026-01-07T17:52:15.417122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:52:15.417175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-01-07T17:52:15.417237Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=36; 2026-01-07T17:52:15.417450Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=184; 2026-01-07T17:52:15.417474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17168; 2026-01-07T17:52:15.417560Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:52:15.417638Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:52:15.417675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:52:15.417719Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:52:15.429333Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:52:15.429444Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:15.429504Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-07T17:52:15.429552Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808020298;tx_id=18446744073709551615;;current_snapshot_ts=1767808271777; 2026-01-07T17:52:15.429581Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:15.429611Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:15.429635Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:15.429693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:15.429841Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.075000s; 2026-01-07T17:52:15.429936Z node 1 :TX_TIERING WARN: log.cpp:841: TEST_STEP=4;fline=fetcher.h:165;error=event_undelivered_to_scheme_cache;reason=ActorUnknown; 2026-01-07T17:52:15.430747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:52:15.431090Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:52:15.431143Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:15.431213Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-07T17:52:15.431274Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808020298;tx_id=18446744073709551615;;current_snapshot_ts=1767808271777; 2026-01-07T17:52:15.431321Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:15.431368Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:15.431405Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:15.431484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:15.432310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.113000s; 2026-01-07T17:52:15.432347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpRboYql::Aggregation+ColumnStore [GOOD] >> KqpRboYql::Aggregation-ColumnStore >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] >> KqpEffects::EmptyUpdate-UseSink [GOOD] >> KqpEffects::EffectWithSelect-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=167808871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167808871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807671.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147808871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807671.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807671.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147807671.000000s;Name=;Codec=}; 2026-01-07T17:51:12.121436Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:12.140991Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:12.141289Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:12.146225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:12.146376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:12.146531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:12.146601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:12.146668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:12.146733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:12.146788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:12.146850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:12.146916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:12.146973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:12.147024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:12.147076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:12.147152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:12.164421Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:12.164874Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:12.164920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:12.165084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:12.165187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:12.165238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:12.165264Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:12.165329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:12.165376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:12.165403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:12.165425Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:12.165526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:12.165561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:12.165585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:12.165614Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:12.165686Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:12.165735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:12.165781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:12.165804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:12.165838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:12.165862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:12.165879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:12.165905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:12.165928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:12.165946Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:12.166057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:12.166131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:12.166177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:12.166272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:12.166298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:12.166314Z nod ... e=8; 2026-01-07T17:52:16.033149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=84; 2026-01-07T17:52:16.033180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=5624; 2026-01-07T17:52:16.033213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=5715; 2026-01-07T17:52:16.033259Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-07T17:52:16.033322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=28; 2026-01-07T17:52:16.033347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=6217; 2026-01-07T17:52:16.033466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=78; 2026-01-07T17:52:16.033582Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2026-01-07T17:52:16.033700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=81; 2026-01-07T17:52:16.033789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=52; 2026-01-07T17:52:16.036425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2590; 2026-01-07T17:52:16.039153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2656; 2026-01-07T17:52:16.039227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-07T17:52:16.039277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-07T17:52:16.039316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:52:16.039386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2026-01-07T17:52:16.039427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-07T17:52:16.039504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2026-01-07T17:52:16.039546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-07T17:52:16.039608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2026-01-07T17:52:16.039688Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=47; 2026-01-07T17:52:16.039945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=224; 2026-01-07T17:52:16.039974Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20400; 2026-01-07T17:52:16.040096Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-07T17:52:16.040202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:52:16.040245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:52:16.040308Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:52:16.053235Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:52:16.053370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:16.053445Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-07T17:52:16.053501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808021963;tx_id=18446744073709551615;;current_snapshot_ts=1767808273442; 2026-01-07T17:52:16.053535Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:16.053573Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.053603Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.053666Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:16.053857Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.022000s; 2026-01-07T17:52:16.054926Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:52:16.055624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:52:16.055677Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:16.055754Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-07T17:52:16.055807Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808021963;tx_id=18446744073709551615;;current_snapshot_ts=1767808273442; 2026-01-07T17:52:16.055845Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:16.055882Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.055913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.055984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:16.056214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.155000s; 2026-01-07T17:52:16.056243Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808867.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808867.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807667.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-07T17:51:09.028280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:09.065495Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:09.065689Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:09.072221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:09.072448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:09.072679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:09.072800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:09.072897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:09.073030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:09.073151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:09.073248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:09.073376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:09.073486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.073582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:09.073674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:09.073776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:09.099480Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:09.100085Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:09.100152Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:09.100335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:09.100478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:09.100619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:09.100664Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:09.100783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:09.100876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:09.100948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:09.100984Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:09.101173Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:09.101238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:09.101278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:09.101309Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:09.101417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:09.101481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:09.101530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:09.101561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:09.101613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:09.101662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:09.101697Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:09.101745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:09.101782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:09.101811Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:09.102016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:09.102064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:09.102095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:09.102245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:09.102302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.102339Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:09.102391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:09.102450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:09.102485Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:09.102531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2026-01-07T17:52:16.570210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=126; 2026-01-07T17:52:16.570252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9059; 2026-01-07T17:52:16.570293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9187; 2026-01-07T17:52:16.570357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-07T17:52:16.570454Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=47; 2026-01-07T17:52:16.570492Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9794; 2026-01-07T17:52:16.570638Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=99; 2026-01-07T17:52:16.570741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=59; 2026-01-07T17:52:16.570849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2026-01-07T17:52:16.570965Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=73; 2026-01-07T17:52:16.574688Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3658; 2026-01-07T17:52:16.578752Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3979; 2026-01-07T17:52:16.578838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-07T17:52:16.578888Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-07T17:52:16.578925Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-07T17:52:16.578989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2026-01-07T17:52:16.579035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-07T17:52:16.579119Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2026-01-07T17:52:16.579155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-07T17:52:16.579229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2026-01-07T17:52:16.579322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2026-01-07T17:52:16.579547Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=192; 2026-01-07T17:52:16.579591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=27592; 2026-01-07T17:52:16.579709Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=21099992;raw_bytes=29608900;count=3;records=320000} evicted {blob_bytes=10565848;raw_bytes=16084450;count=1;records=160000} at tablet 9437184 2026-01-07T17:52:16.579801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-07T17:52:16.579861Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-07T17:52:16.579927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-07T17:52:16.590050Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-07T17:52:16.590212Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:16.590308Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:52:16.590375Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808025055;tx_id=18446744073709551615;;current_snapshot_ts=1767808270189; 2026-01-07T17:52:16.590443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:16.590490Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.590527Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.590610Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:16.590812Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.091000s; 2026-01-07T17:52:16.592026Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-07T17:52:16.592776Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-07T17:52:16.592827Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-07T17:52:16.592913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-07T17:52:16.592987Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767808025055;tx_id=18446744073709551615;;current_snapshot_ts=1767808270189; 2026-01-07T17:52:16.593032Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-07T17:52:16.593088Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.593125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.593213Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:16.593579Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.073000s; 2026-01-07T17:52:16.593631Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1693:3530];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808871.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808871.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807671.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-07T17:51:12.834080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:12.868702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:12.868958Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:12.879441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:12.879687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:12.879938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:12.880067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:12.880179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:12.880308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:12.880385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:12.880444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:12.880538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:12.880616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:12.880683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:12.880744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:12.880807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:12.905526Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:12.906157Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:12.906242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:12.906420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:12.906577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:12.906660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:12.906707Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:12.906800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:12.906880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:12.906934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:12.906967Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:12.907133Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:12.907197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:12.907240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:12.907270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:12.907367Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:12.907419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:12.907462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:12.907493Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:12.907551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:12.907607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:12.907649Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:12.907698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:12.907739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:12.907768Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:12.907953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:12.908017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:12.908052Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:12.908169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:12.908213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:12.908242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:12.908305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:12.908350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:12.908382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:12.908424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... k;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:52:16.688392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:2254;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.688426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-07T17:52:16.688585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=2254; 2026-01-07T17:52:16.688654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=2254;batch_columns=timestamp; 2026-01-07T17:52:16.688915Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1234:3173];bytes=1280000;rows=160000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1235:3174]->[1:1234:3173] 2026-01-07T17:52:16.689105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.689224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.689359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.689515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:52:16.689633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.689743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.690002Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1235:3174] finished for tablet 9437184 2026-01-07T17:52:16.690486Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1234:3173];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.022},{"events":["l_task_result"],"t":0.148},{"events":["l_ProduceResults","f_Finish"],"t":0.149},{"events":["l_ack","l_processing","l_Finish"],"t":0.15}],"full":{"a":65126223,"name":"_full_task","f":65126223,"d_finished":0,"c":0,"l":65276259,"d":150036},"events":[{"name":"bootstrap","f":65126505,"d_finished":916,"c":1,"l":65127421,"d":916},{"a":65275694,"name":"ack","f":65148704,"d_finished":55455,"c":71,"l":65275584,"d":56020},{"a":65275680,"name":"processing","f":65127553,"d_finished":117228,"c":143,"l":65275590,"d":117807},{"name":"ProduceResults","f":65127084,"d_finished":95971,"c":216,"l":65275982,"d":95971},{"a":65275988,"name":"Finish","f":65275988,"d_finished":0,"c":0,"l":65276259,"d":271},{"name":"task_result","f":65127570,"d_finished":59749,"c":72,"l":65274286,"d":59749}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.690560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1234:3173];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:52:16.690938Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1234:3173];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.022},{"events":["l_task_result"],"t":0.148},{"events":["l_ProduceResults","f_Finish"],"t":0.149},{"events":["l_ack","l_processing","l_Finish"],"t":0.15}],"full":{"a":65126223,"name":"_full_task","f":65126223,"d_finished":0,"c":0,"l":65276801,"d":150578},"events":[{"name":"bootstrap","f":65126505,"d_finished":916,"c":1,"l":65127421,"d":916},{"a":65275694,"name":"ack","f":65148704,"d_finished":55455,"c":71,"l":65275584,"d":56562},{"a":65275680,"name":"processing","f":65127553,"d_finished":117228,"c":143,"l":65275590,"d":118349},{"name":"ProduceResults","f":65127084,"d_finished":95971,"c":216,"l":65275982,"d":95971},{"a":65275988,"name":"Finish","f":65275988,"d_finished":0,"c":0,"l":65276801,"d":813},{"name":"task_result","f":65127570,"d_finished":59749,"c":72,"l":65274286,"d":59749}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1235:3174]->[1:1234:3173] 2026-01-07T17:52:16.691035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:52:16.538612Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10565848;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10565848;selected_rows=0; 2026-01-07T17:52:16.691074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:52:16.691256Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 >> KqpRboYql::TPCH_YDB_PERF [GOOD] >> KqpRboYql::TPCH_YQL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=167808878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167808878.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808878.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147808878.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167808878.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167808878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807678.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147808878.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147808878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147807678.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147807678.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147807678.000000s;Name=;Codec=}; 2026-01-07T17:51:18.892679Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:18.912197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:18.912356Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:18.917585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:18.917792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:18.918014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:18.918141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:18.918267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:18.918381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:18.918479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:18.918590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:18.918708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:18.918806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:18.918898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:18.918994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:18.919060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:18.936018Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:18.936515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:18.936562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:18.936684Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:18.936785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:18.936840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:18.936868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:18.936973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:18.937025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:18.937054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:18.937076Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:18.937180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:18.937222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:18.937247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:18.937264Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:18.937318Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:18.937360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:18.937392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:18.937411Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:18.937448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:18.937472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:18.937488Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:18.937515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:18.937542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:18.937559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:18.937670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:18.937698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:18.937726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:18.937795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:18.937823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:5 ... lumnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-07T17:52:16.648394Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-07T17:52:16.648621Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767808327227:max} readable: {1767808327227:max} at tablet 9437184 2026-01-07T17:52:16.648756Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-07T17:52:16.648950Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808327227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:16.649021Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808327227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-07T17:52:16.649552Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808327227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-07T17:52:16.651327Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808327227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-07T17:52:16.652253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767808327227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1451:3396];trace_detailed=; 2026-01-07T17:52:16.652667Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-07T17:52:16.652879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-07T17:52:16.653132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.653304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.653637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-07T17:52:16.653777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.653910Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.654124Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1451:3396] finished for tablet 9437184 2026-01-07T17:52:16.654641Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1450:3395];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":60019248,"name":"_full_task","f":60019248,"d_finished":0,"c":0,"l":60021250,"d":2002},"events":[{"name":"bootstrap","f":60019471,"d_finished":930,"c":1,"l":60020401,"d":930},{"a":60020676,"name":"ack","f":60020676,"d_finished":0,"c":0,"l":60021250,"d":574},{"a":60020658,"name":"processing","f":60020658,"d_finished":0,"c":0,"l":60021250,"d":592},{"name":"ProduceResults","f":60020084,"d_finished":582,"c":2,"l":60020997,"d":582},{"a":60021002,"name":"Finish","f":60021002,"d_finished":0,"c":0,"l":60021250,"d":248}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-07T17:52:16.654725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1450:3395];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-07T17:52:16.655170Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1450:3395];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":60019248,"name":"_full_task","f":60019248,"d_finished":0,"c":0,"l":60021835,"d":2587},"events":[{"name":"bootstrap","f":60019471,"d_finished":930,"c":1,"l":60020401,"d":930},{"a":60020676,"name":"ack","f":60020676,"d_finished":0,"c":0,"l":60021835,"d":1159},{"a":60020658,"name":"processing","f":60020658,"d_finished":0,"c":0,"l":60021835,"d":1177},{"name":"ProduceResults","f":60020084,"d_finished":582,"c":2,"l":60020997,"d":582},{"a":60021002,"name":"Finish","f":60021002,"d_finished":0,"c":0,"l":60021835,"d":833}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1451:3396]->[1:1450:3395] 2026-01-07T17:52:16.655271Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-07T17:52:16.651299Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-07T17:52:16.655318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-07T17:52:16.655444Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> KqpEffects::InsertAbort_Select_Success >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> CompressExecutor::TestReorderedExecutor >> KqpImmediateEffects::UpsertExistingKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2026-01-07T17:50:37.232696Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.261209Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.261481Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.262410Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.262835Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:37.263969Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:37.264034Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.266376Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-07T17:50:37.266415Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.266526Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.266679Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.268822Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-07T17:50:37.268866Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-07T17:50:37.268958Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.269005Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.269268Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.269418Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.280015Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.280072Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.282310Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.282548Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.282710Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.282855Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.282995Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.283139Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.283273Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.283309Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.283475Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-07T17:50:37.283581Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.298786Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.298849Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.298974Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.301230Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.301303Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:50:37.301375Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:50:37.301401Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:50:37.301441Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:37.301674Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-07T17:50:37.301706Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-07T17:50:37.301864Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-07T17:50:37.301930Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-07T17:50:37.301961Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-07T17:50:37.302038Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-07T17:50:37.307255Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-07T17:50:37.307620Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:37.307796Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.307976Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2026-01-07T17:50:37.308010Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:51:2091] 2026-01-07T17:50:37.308114Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.308197Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:51:2091] 2026-01-07T17:50:37.308538Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-07T17:50:37.308591Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-07T17:50:37.308674Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:37.308846Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-07T17:50:37.309033Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-07T17:50:37.309088Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-07T17:50:37.309833Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.310079Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.310269Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-07T17:50:37.313304Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-07T17:50:37.313399Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-07T17:50:37.313496Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.321830Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-07T17:50:37.321921Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-07T17:50:37.321971Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-07T17:50:37.322107Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-07T17:50:37.326763Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-07T17:50:37.326845Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 202 ... 72057594046678944 leader: [44:334:2203] followers: 0 2026-01-07T17:52:17.989190Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [44:334:2203] 2026-01-07T17:52:17.989313Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594046678944] forward result remote node 44 [45:559:2160] 2026-01-07T17:52:17.989495Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594046678944] remote node connected [45:559:2160] 2026-01-07T17:52:17.989573Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594046678944]::SendEvent [45:559:2160] 2026-01-07T17:52:17.989909Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594046678944] Accept Connect Originator# [45:559:2160] 2026-01-07T17:52:17.990288Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594046678944] connected with status OK role: Leader [45:559:2160] 2026-01-07T17:52:17.990371Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594046678944] send queued [45:559:2160] 2026-01-07T17:52:17.990485Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046678944] send [45:559:2160] 2026-01-07T17:52:17.990521Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046678944] push event to server [45:559:2160] 2026-01-07T17:52:17.990589Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594046678944]::SendEvent [45:559:2160] 2026-01-07T17:52:17.990743Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594046678944] Push Sender# [45:558:2160] EventType# 271122945 2026-01-07T17:52:17.990914Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2026-01-07T17:52:17.991016Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:17.991311Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:52:17.991411Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:17.993007Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [45:565:2161] 2026-01-07T17:52:17.993053Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [45:565:2161] 2026-01-07T17:52:17.993102Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [45:566:2162] 2026-01-07T17:52:17.993128Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [45:566:2162] 2026-01-07T17:52:17.993313Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [45:565:2161] 2026-01-07T17:52:17.993377Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037888] queue send [45:566:2162] 2026-01-07T17:52:17.993634Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [44:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:52:17.993699Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [44:333:2202] 2026-01-07T17:52:17.994006Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:52:17.994108Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594037927937] forward result remote node 44 [45:565:2161] 2026-01-07T17:52:17.994370Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:52:17.994815Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594037927937] remote node connected [45:565:2161] 2026-01-07T17:52:17.994865Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [45:565:2161] 2026-01-07T17:52:17.995310Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:52:17.995451Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:52:17.995500Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:52:17.995771Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [44:476:2307] CurrentLeaderTablet: [44:492:2318] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:52:17.996065Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [44:476:2307] CurrentLeaderTablet: [44:492:2318] CurrentGeneration: 1 CurrentStep: 0} 2026-01-07T17:52:17.996148Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [44:476:2307] followers: 0 2026-01-07T17:52:17.996210Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [44:476:2307] 2026-01-07T17:52:17.996289Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037888] forward result remote node 44 [45:566:2162] 2026-01-07T17:52:17.996390Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [45:565:2161] 2026-01-07T17:52:17.996695Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037888] remote node connected [45:566:2162] 2026-01-07T17:52:17.996746Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [45:566:2162] 2026-01-07T17:52:17.997246Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [45:565:2161] 2026-01-07T17:52:17.997291Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [45:565:2161] 2026-01-07T17:52:17.997336Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [45:565:2161] 2026-01-07T17:52:17.997455Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [45:565:2161] 2026-01-07T17:52:17.997633Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [45:566:2162] 2026-01-07T17:52:17.998330Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [45:562:2161] EventType# 268959744 2026-01-07T17:52:17.998563Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-07T17:52:17.998663Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:17.998913Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(45, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:17.999041Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 204b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-07T17:52:17.999151Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:17.999410Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037888] connected with status OK role: Leader [45:566:2162] 2026-01-07T17:52:17.999454Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [45:566:2162] 2026-01-07T17:52:17.999488Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [45:566:2162] 2026-01-07T17:52:17.999553Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [45:566:2162] 2026-01-07T17:52:17.999722Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-07T17:52:17.999823Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:17.999976Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:52:18.000058Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:18.000276Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72075186224037888] Push Sender# [45:563:2162] EventType# 268959744 2026-01-07T17:52:18.000424Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-07T17:52:18.000471Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:18.000618Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(45, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:18.000724Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(45, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:18.000795Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-07T17:52:18.000873Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:18.001079Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-07T17:52:18.001124Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:18.001197Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:52:18.001270Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::BrokenCredentialsProvider >> KqpInplaceUpdate::SingleRowArithm-UseSink >> KqpEffects::AlterDuringUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterDuringUpsertTransaction-UseSink >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> KqpImmediateEffects::Insert >> KqpImmediateEffects::Upsert >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::BigRow >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 14788, MsgBus: 29306 2026-01-07T17:52:09.059712Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678959908414602:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.059752Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.299602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.339354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.339500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.380603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.394163Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.572394Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:09.662269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.662306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.662317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.662533Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.069727Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.303136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.361464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.468281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.600248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.652597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:11.733411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678968498351032:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.733499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.733791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678968498351041:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.733874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.058156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.082777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.107024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.130143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.151671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.179934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.209766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.252223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.328389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678972793319212:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.328473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.328739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678972793319217:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.328766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.328807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678972793319218:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.332887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.344587Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678972793319221:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:12.422003Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678972793319272:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:13.531261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 30481, MsgBus: 4062 2026-01-07T17:52:14.515434Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678983533703634:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.515475Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:14.524604Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:14.590168Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:14.623266Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:14.623350Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:14.625019Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:14.656956Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:14.656976Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:14.656982Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:14.657074Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:14.683060Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:15.016046Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:15.033243Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.090439Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.238031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.294834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.520021Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:17.234120Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678996418607341:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.234203Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.234406Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678996418607351:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.234466Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.299344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.324136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.347647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.371591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.396190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.420429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.468819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.506179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.569209Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678996418608219:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.569260Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678996418608224:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.569266Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.569392Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678996418608226:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.569421Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.571754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:17.579337Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592678996418608227:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:17.637104Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592678996418608279:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:18.817753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.515647Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678983533703634:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:19.515754Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpRboPg::Aggregation-ColumnStore [GOOD] >> KqpRboPg::AliasesRenames ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4036, MsgBus: 2057 2026-01-07T17:52:09.059848Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678961915852628:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.059901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.305311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.343024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.343143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.386819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.393944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.555031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:09.662198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.662227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.662233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.662286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.069622Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.284815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.296874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:10.353643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.482138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.614107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.681570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:11.897819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678970505789051:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.897912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.898183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678970505789061:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.898256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.142372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.167501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.191308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.215820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.239247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.268553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.299306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.340548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.395688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974800757225:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.395745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.395816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974800757230:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.395951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974800757232:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.395979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.398611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.407043Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678974800757234:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:12.490919Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678974800757285:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:13.639295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.060168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678961915852628:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.060217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1968, MsgBus: 23975 2026-01-07T17:52:14.740548Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678983268352240:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.740606Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:14.752197Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:14.833169Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:14.862829Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:14.862908Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:14.865088Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:14.929546Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:14.929569Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:14.929577Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:14.929654Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:14.946326Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:15.297778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:15.306764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.355508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.469079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.550408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.743191Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:17.824158Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678996153255958:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.824239Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.824522Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678996153255967:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.824582Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.888430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.915660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.942071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.967629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.993986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.020445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.048540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.110714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.175760Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679000448224132:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.175831Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.175846Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679000448224137:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.175969Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679000448224139:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.176009Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.178719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:18.186226Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679000448224140:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:18.261048Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679000448224192:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:19.660081Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.740852Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678983268352240:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:19.740913Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpImmediateEffects::UpdateAfterInsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22907, MsgBus: 64495 2026-01-07T17:52:09.059822Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678961500949113:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.059876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.301064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.341689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.341793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.396555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.409807Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.594072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:09.662160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.662190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.662197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.662275Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.070016Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.302947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.367959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.494964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.626287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.689401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:11.760296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678970090885542:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.760421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.760648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678970090885552:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.760690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.058388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.087196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.112596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.138488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.166356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.192636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.220458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.257885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.331890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974385853722:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.331969Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.332147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974385853727:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.332208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974385853728:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.332250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.335453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.343760Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678974385853731:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:12.433489Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678974385853782:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:13.527977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.059920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678961500949113:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.059989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6801, MsgBus: 18287 2026-01-07T17:52:15.027434Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678986653335725:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:15.028645Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:15.039843Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:15.098026Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:15.142383Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:15.142478Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:15.144050Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:15.197295Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:15.197323Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:15.197330Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:15.197422Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:15.275655Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:15.528619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:15.537353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.581994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.683645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.747598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.034694Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:17.977019Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678995243272131:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.977105Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.977325Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678995243272141:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.977365Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.048277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.072376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.094815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.119549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.145311Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.172917Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.224903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.259833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.322485Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678999538240307:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.322563Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678999538240312:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.322569Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.322708Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678999538240314:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.322754Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.325237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:18.332823Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592678999538240315:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:18.412793Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592678999538240367:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:19.851347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.027309Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678986653335725:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:20.060643Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 25684, MsgBus: 7773 2026-01-07T17:52:11.355684Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678968571532753:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:11.356410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:11.562149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:11.565181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:11.565293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:11.567116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:11.632106Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:11.633415Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678968571532725:2081] 1767808331354389 != 1767808331354392 2026-01-07T17:52:11.647082Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:52:11.647303Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:52:11.667874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:11.667891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:11.667896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:11.667986Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:11.841949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:11.989116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:12.046342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.152419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.276624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.335198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.428964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:13.656830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678977161469188:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:13.656956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:13.657234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678977161469198:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:13.657297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:13.883113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:13.905946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:13.930428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:13.954879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:13.978960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.005979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.034460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.071523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.139783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678981456437360:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:14.139870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:14.139936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678981456437365:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:14.139987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678981456437367:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:14.140015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:14.142977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:14.151821Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678981456437369:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:14.251484Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678981456437420:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:15.429713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 17307, MsgBus: 19045 2026-01-07T17:52:16.630467Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=u ... Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:16.690080Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:16.691173Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678991447819042:2081] 1767808336629681 != 1767808336629684 2026-01-07T17:52:16.743056Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:16.743114Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:16.752327Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:16.752346Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:16.752352Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:16.752423Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:16.757876Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:16.856839Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:17.109842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:17.118670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.178675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.281906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.329390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.634490Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:19.719217Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679004332722796:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.719320Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.720021Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679004332722806:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.720093Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.720263Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679004332722808:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.720316Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.767716Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.792110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.816291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.842479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.868729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.895415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.924296Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.964989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.039754Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679008627690973:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.039841Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.039900Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679008627690978:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.040265Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679008627690981:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.040322Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.042889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:20.054680Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679008627690980:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:20.118938Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679008627691033:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:21.490122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.630670Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592678991447819082:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:21.630736Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> BasicStatistics::TwoTables [GOOD] >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> KqpRboYql::Aggregation-ColumnStore [GOOD] >> BasicStatistics::Simple [GOOD] >> KqpRboYql::LeftJoins [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:45.579214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:45.646945Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:45.651887Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:45.652158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:45.652258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:45.927773Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:46.025170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:46.025307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:46.059588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:46.129114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:46.767756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:46.768597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:46.768632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:46.768660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:46.768868Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:46.831615Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:47.318832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:49.345605Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:49.349918Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:49.352332Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:49.372359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:49.372442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:49.408610Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:49.409840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.568155Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:49.568241Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:49.569315Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.569805Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.570243Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.570907Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.570994Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.571242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.571306Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.571384Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.571520Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.585256Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.726030Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:49.745789Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:49.745865Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:49.784053Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:49.785592Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:49.785868Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:49.785947Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:49.786002Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:49.786074Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:49.786142Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:49.786194Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:49.788083Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:49.789283Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:51:49.791822Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:49.802167Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:51:49.802244Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:51:49.802370Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:49.809376Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.809484Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.883738Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2212:2650] 2026-01-07T17:51:49.884066Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2212:2650], schemeshard id = 72075186224037897 2026-01-07T17:51:49.884885Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2214:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:49.925384Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:49.944057Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:49.944200Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:49.958043Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:50.041293Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:50.121757Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:50.435147Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:50.564606Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:50.564700Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:51:51.264261Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... lyToActorId = [2:3808:3392], StatRequests.size() = 1 2026-01-07T17:52:09.759111Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3847:3410]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:09.759353Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-07T17:52:09.759394Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3847:3410], StatRequests.size() = 1 2026-01-07T17:52:10.625469Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3874:3420]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:10.625817Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-07T17:52:10.625872Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3874:3420], StatRequests.size() = 1 2026-01-07T17:52:11.047604Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:11.472236Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3905:3434]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:11.472516Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:52:11.472562Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3905:3434], StatRequests.size() = 1 2026-01-07T17:52:12.249509Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3932:3444]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:12.249836Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:52:12.249883Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3932:3444], StatRequests.size() = 1 2026-01-07T17:52:12.624322Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:12.624869Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-07T17:52:12.625169Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:12.625231Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:52:13.025655Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3959:3454]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:13.025890Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:52:13.025921Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3959:3454], StatRequests.size() = 1 2026-01-07T17:52:13.864028Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:3990:3466]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:13.864365Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:52:13.864413Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:3990:3466], StatRequests.size() = 1 2026-01-07T17:52:14.684623Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4023:3478]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:14.684922Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:52:14.684998Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4023:3478], StatRequests.size() = 1 2026-01-07T17:52:15.186071Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:15.320230Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:52:15.320316Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:15.320355Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:15.320398Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:52:15.774160Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4057:3491]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:15.774392Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:52:15.774436Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4057:3491], StatRequests.size() = 1 2026-01-07T17:52:16.599536Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4088:3503]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:16.599812Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:52:16.599851Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4088:3503], StatRequests.size() = 1 2026-01-07T17:52:17.125325Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:17.125486Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-07T17:52:17.125617Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:52:17.125780Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:17.158972Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:17.159065Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:17.159255Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-07T17:52:17.173073Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:17.775469Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4119:3517]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:17.775665Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:52:17.775691Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4119:3517], StatRequests.size() = 1 2026-01-07T17:52:18.878842Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4146:3527]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:18.879065Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:52:18.879094Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4146:3527], StatRequests.size() = 1 2026-01-07T17:52:19.900386Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4175:3537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:19.900755Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:52:19.900806Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4175:3537], StatRequests.size() = 1 2026-01-07T17:52:20.438990Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:21.074329Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4210:3551]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:21.074775Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:52:21.074828Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4210:3551], StatRequests.size() = 1 2026-01-07T17:52:22.085729Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4237:3561]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:22.085998Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-07T17:52:22.086037Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4237:3561], StatRequests.size() = 1 2026-01-07T17:52:22.569689Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:22.569963Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-07T17:52:22.570157Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:22.570250Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:52:22.624634Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:22.624703Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:22.624973Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-07T17:52:22.638846Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:23.156338Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4270:3577]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:23.156691Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-07T17:52:23.156738Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4270:3577], StatRequests.size() = 1 2026-01-07T17:52:23.157217Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 36 ], ReplyToActorId[ [2:4272:3579]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:23.160980Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 36 ] 2026-01-07T17:52:23.161057Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 36, ReplyToActorId = [2:4272:3579], StatRequests.size() = 1 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> KqpImmediateEffects::ManyFlushes >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:46.526618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:46.594253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:46.599213Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:46.599451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:46.599543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:46.850971Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:46.927013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:46.927154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:46.961687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:47.027799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:47.661200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:47.662323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:47.662383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:47.662413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:47.662599Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:47.726080Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:48.218010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:50.203599Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:50.210297Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:50.213184Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:50.235438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:50.235578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:50.272195Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:50.273366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:50.432423Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:50.432496Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:50.433547Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.433973Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.434408Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.435068Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.435157Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.435349Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.435420Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.435496Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.435612Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.449494Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:50.596247Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:50.616695Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:50.616775Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:50.643880Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:50.644722Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:50.644862Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:50.644908Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:50.644967Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:50.645006Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:50.645049Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:50.645090Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:50.645489Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:50.646934Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:51:50.648728Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:50.656772Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:51:50.656830Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:51:50.656904Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:50.663650Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:50.663743Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:50.709884Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:51:50.710283Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:51:50.712202Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:50.736834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:50.747549Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:50.747631Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:50.756174Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:50.827822Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:50.913685Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:51.204035Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:51.312199Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:51.312272Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:51:52.034569Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-07T17:52:08.668466Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:3637:3305]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:08.668769Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-07T17:52:08.668814Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:3637:3305], StatRequests.size() = 1 2026-01-07T17:52:09.508847Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3672:3319]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:09.509107Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-07T17:52:09.509137Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3672:3319], StatRequests.size() = 1 2026-01-07T17:52:10.308644Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3701:3331]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:10.308961Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-07T17:52:10.309011Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3701:3331], StatRequests.size() = 1 2026-01-07T17:52:10.745843Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:11.227798Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3732:3345]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:11.228066Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:52:11.228114Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3732:3345], StatRequests.size() = 1 2026-01-07T17:52:12.184972Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3759:3355]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:12.185269Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:52:12.185316Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3759:3355], StatRequests.size() = 1 2026-01-07T17:52:12.679280Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:12.679726Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-07T17:52:12.680093Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:12.680164Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:52:13.137759Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3786:3365]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:13.138087Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:52:13.138129Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3786:3365], StatRequests.size() = 1 2026-01-07T17:52:14.140152Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:3819:3378]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:14.140362Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:52:14.140389Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:3819:3378], StatRequests.size() = 1 2026-01-07T17:52:15.134627Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:3850:3388]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:15.135075Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:52:15.135133Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:3850:3388], StatRequests.size() = 1 2026-01-07T17:52:15.773134Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:15.906584Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:52:15.906651Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:15.906699Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:15.906729Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:52:16.430319Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:3886:3403]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:16.430540Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:52:16.430570Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:3886:3403], StatRequests.size() = 1 2026-01-07T17:52:17.467117Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:3917:3415]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:17.467411Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:52:17.467460Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:3917:3415], StatRequests.size() = 1 2026-01-07T17:52:18.111152Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:18.111449Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-07T17:52:18.111625Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:18.111690Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:52:18.155235Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:18.155315Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:18.155522Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:52:18.184128Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:18.728518Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:3946:3427]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:18.728791Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:52:18.728833Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:3946:3427], StatRequests.size() = 1 2026-01-07T17:52:19.786268Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:3975:3439]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:19.786622Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:52:19.786668Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:3975:3439], StatRequests.size() = 1 2026-01-07T17:52:20.838871Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4004:3449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:20.839242Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:52:20.839292Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4004:3449], StatRequests.size() = 1 2026-01-07T17:52:21.378125Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:21.940565Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4037:3461]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:21.940916Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:52:21.940985Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4037:3461], StatRequests.size() = 1 2026-01-07T17:52:22.964585Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4066:3473]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:22.964945Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-07T17:52:22.964992Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4066:3473], StatRequests.size() = 1 2026-01-07T17:52:23.471818Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:23.472007Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-07T17:52:23.472192Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:52:23.472386Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:23.538822Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:23.538899Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:23.544307Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:52:23.557997Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:24.032057Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4097:3487]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:24.032330Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-07T17:52:24.032366Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4097:3487], StatRequests.size() = 1 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::Aggregation-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29795, MsgBus: 22787 2026-01-07T17:51:51.268880Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678884192615842:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:51.268945Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:51.507846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:51.553010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:51.553163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:51.584643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:51.625091Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:51.625997Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678884192615805:2081] 1767808311265934 != 1767808311265937 2026-01-07T17:51:51.752313Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:51.821722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:51.821809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:51.821815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:51.821888Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:52.275967Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:52.373985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:53.483248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678892782551284:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.483341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.483560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678892782551293:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.483603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.877937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:53.975309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678892782551387:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.975374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.975530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678892782551389:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.975560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.990838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:54.038232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678897077518776:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.038328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.038361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678897077518781:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.038498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678897077518783:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.038534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:54.040845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:51:54.048055Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678897077518785:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-07T17:51:54.159188Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678897077518836:2655] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29871, MsgBus: 19700 2026-01-07T17:51:55.200904Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678898909929899:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:55.200962Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:55.209651Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:55.257981Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:55.260073Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592678898909929853:2081] 1767808315200148 != 1767808315200151 2026-01-07T17:51:55.309151Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:55.309183Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:55.309189Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:55.309286Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:55.312316Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:55.312375Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:55.313500Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:55.509297Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:55.571113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:56.205052Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:57.122258Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678907499865321:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:57.122486Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Se ... lt=not_found; 2026-01-07T17:52:04.009346Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678940920438673:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.009443Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.009490Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678940920438678:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.009532Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678940920438680:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.009573Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:04.011968Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:04.020855Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678940920438682:3121], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:52:04.091058Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678940920438735:5247] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:13.222884Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:52:13.222920Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 28424, MsgBus: 21480 2026-01-07T17:52:17.024362Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592678994262124931:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:17.024806Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:17.040306Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:17.115260Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592678994262124811:2081] 1767808337021875 != 1767808337021878 2026-01-07T17:52:17.115301Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:17.148153Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:17.148226Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:17.151398Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:17.200634Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:17.200665Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:17.200672Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:17.200776Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:17.293044Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:17.602576Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:18.029096Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:20.706331Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679007147027586:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.706423Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.706732Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679007147027596:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.706782Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.726700Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.857249Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679007147027690:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.857342Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.857768Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679007147027693:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.857809Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.878927Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.943832Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679007147027769:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.943963Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.944183Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679007147027774:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.944306Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679007147027775:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.944521Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.947736Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:20.960626Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679007147027778:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-07T17:52:21.041821Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679011441995127:2647] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:22.023661Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592678994262124931:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:22.023746Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertDuplicates+UseSink >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] >> KqpRboPg::AliasesRenames [GOOD] >> KqpRboPg::Bench_10Joins >> KqpFail::Immediate [GOOD] >> KqpFail::OnCommit |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpdateOn >> KqpInplaceUpdate::BigRow [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::LeftJoins [GOOD] Test command err: Trying to start YDB, gRPC: 1974, MsgBus: 16604 2026-01-07T17:51:51.269059Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678881755697899:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:51.269120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:51.502550Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:51.538304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:51.538466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:51.556219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:51.618909Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:51.619869Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678881755697862:2081] 1767808311265845 != 1767808311265848 2026-01-07T17:51:51.699328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:51.822023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:51.822055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:51.822060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:51.822143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:52.275304Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:52.322230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:53.494140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678890345633337:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.494239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.494475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678890345633347:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.494532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:53.895630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:51:54.299299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:54.299477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:54.299637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:54.299712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:54.299814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:54.299894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:54.299963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:54.300053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:54.300126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:54.300193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:54.300253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:54.300323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:54.300410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7592678894640601213:2335];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:54.306115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:54.306254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:54.306487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:54.306621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:54.306770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:54.306907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:54.307068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:54.307189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:54.307349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:54.307472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:54.307626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:54.307744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:54.307871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7592678894640601198:2328];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:54.328626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7592678894640601230:2342];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event= ... X_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038099;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.850901Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038099;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.850920Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038099;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.851286Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.851358Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.851382Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.858900Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038105;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.858989Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038105;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.859012Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038105;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.859535Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038113;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.859596Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038113;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.859612Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038113;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.866591Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.866660Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.866677Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.868179Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.868241Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.868260Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.873524Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038117;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.873603Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038117;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.873625Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038117;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.876611Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038125;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.876678Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038125;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.876696Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038125;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.880043Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038129;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.880135Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038129;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.880156Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038129;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.885209Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.885275Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.885292Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.886270Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038109;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.886347Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038109;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.886372Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038109;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.893244Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038127;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.893309Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038127;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:17.893327Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038127;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-07T17:52:18.029204Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678998706310716:3699], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.029330Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.029457Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678998706310721:3702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.029550Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592678998706310723:3703], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.029592Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:18.033870Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:18.050734Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592678998706310725:3704], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:52:18.152047Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592678998706310776:8021] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:20.357720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:52:20.357754Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:44.962579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:45.056154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:45.061830Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:45.062130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:45.062228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:45.381225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:45.459318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:45.459420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:45.493429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:45.558894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:46.194071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:46.195170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:46.195214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:46.195246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:46.195440Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:46.260266Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:46.767553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:48.907145Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:48.911873Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:48.914433Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:48.938516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:48.938603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:48.975537Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:48.976824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.143103Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:49.143204Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:49.144693Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.145170Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.145754Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.146742Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.146888Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.147193Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.147310Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.147449Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.147637Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.162109Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.311801Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:49.332399Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:49.332461Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:49.360213Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:49.361214Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:49.361367Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:49.361438Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:49.361486Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:49.361532Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:49.361568Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:49.361607Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:49.362034Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:49.363464Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:51:49.365242Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:51:49.372659Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:51:49.372702Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:51:49.372758Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:51:49.401674Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.401761Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.421320Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:51:49.421593Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:51:49.423680Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:49.449179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:49.459655Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:49.459744Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:49.468953Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:49.614124Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:49.631842Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:51:49.901413Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:50.029456Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:50.029548Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:51:50.739813Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... ce_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-07T17:52:11.106380Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:5298:4238], StatRequests.size() = 1 2026-01-07T17:52:11.988087Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:5329:4251]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:11.988508Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-07T17:52:11.988553Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:5329:4251], StatRequests.size() = 1 2026-01-07T17:52:12.320535Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:52:12.320665Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-07T17:52:12.320793Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:52:12.321027Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:12.799277Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:5358:4263]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:12.799659Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-07T17:52:12.799709Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:5358:4263], StatRequests.size() = 1 2026-01-07T17:52:13.911230Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:5397:4282]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:13.911435Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:52:13.911465Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:5397:4282], StatRequests.size() = 1 2026-01-07T17:52:14.967892Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:5434:4297]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:14.968360Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:52:14.968427Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:5434:4297], StatRequests.size() = 1 2026-01-07T17:52:15.516010Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:15.740568Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:52:15.740656Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:15.740698Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:15.740739Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:52:16.415983Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:5474:4316]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:16.416299Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:52:16.416343Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:5474:4316], StatRequests.size() = 1 2026-01-07T17:52:17.548708Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:5505:4328]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:17.549058Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:52:17.549093Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:5505:4328], StatRequests.size() = 1 2026-01-07T17:52:18.177482Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:52:18.177940Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-07T17:52:18.178241Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:18.178304Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:52:18.211970Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:18.212061Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:18.212410Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:18.227106Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:18.926976Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:5536:4342]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:18.927282Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:52:18.927323Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:5536:4342], StatRequests.size() = 1 2026-01-07T17:52:20.011965Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-07T17:52:20.012034Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.893000s, at schemeshard: 72075186224037899 2026-01-07T17:52:20.012336Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-07T17:52:20.026587Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:20.139254Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:5573:4362]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:20.139602Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:52:20.139651Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:5573:4362], StatRequests.size() = 1 2026-01-07T17:52:21.167027Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:5604:4374]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:21.167455Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:52:21.167516Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:5604:4374], StatRequests.size() = 1 2026-01-07T17:52:21.635219Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-07T17:52:21.635294Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.633000s, at schemeshard: 72075186224037905 2026-01-07T17:52:21.635485Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-07T17:52:21.650121Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:21.720801Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:22.442282Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:5647:4396]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:22.442678Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:52:22.442727Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:5647:4396], StatRequests.size() = 1 2026-01-07T17:52:23.609694Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:5676:4408]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:23.610193Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:52:23.610242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:5676:4408], StatRequests.size() = 1 2026-01-07T17:52:24.035911Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:52:24.036473Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-07T17:52:24.036843Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:24.036951Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:52:24.070384Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:24.070489Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:24.070742Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:24.084873Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:24.703879Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5709:4424]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:24.704192Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:52:24.704235Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5709:4424], StatRequests.size() = 1 2026-01-07T17:52:24.704710Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5711:4426]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:24.708353Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:52:24.708436Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5711:4426], StatRequests.size() = 1 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14546, MsgBus: 8306 2026-01-07T17:52:09.059690Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678961219453848:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.059759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.305230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.346687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.346764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.382429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.400454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.596089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:09.662188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.662219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.662226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.662289Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.069296Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.293046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.354024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.517996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.651083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.724097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:11.579028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678969809390272:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.579110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.579308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678969809390281:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.579338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.058208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.082809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.104859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.127470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.151424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.176672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.206573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.243208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.315009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974104358449:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.315072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.315087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974104358454:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.315260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974104358456:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.315334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.318280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.326673Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678974104358457:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:12.429364Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678974104358509:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25317, MsgBus: 2065 2026-01-07T17:52:14.622901Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678983743485142:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.623034Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:14.634354Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:14.701833Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:14.702850Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for ... 7T17:52:20.926912Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.985227Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.133334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.197241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.270086Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:23.417878Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679022770801623:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.417994Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.418275Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679022770801633:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.418348Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.480780Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.509047Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.535923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.566985Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.596659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.627377Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.657635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.733716Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.798206Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679022770802502:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.798292Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.798403Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679022770802507:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.798533Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679022770802509:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.798582Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.802114Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:23.813382Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679022770802511:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:23.888957Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679022770802562:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:25.211083Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679009885897895:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:25.211130Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:25.229272Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-07T17:52:25.229491Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:52:25.229619Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:52:25.229786Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7592679031360737468:2529], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [3:7592679031360737444:2529]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[3:7592679031360737468:2529].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:52:25.229888Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592679031360737462:2529], SessionActorId: [3:7592679031360737444:2529], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7592679031360737444:2529]. 2026-01-07T17:52:25.230157Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=NzVmODkxMzUtNGM0ZTk0ZGItNDI3Mjk0MjYtYzAzMzZiMQ==, ActorId: [3:7592679031360737444:2529], ActorState: ExecuteState, LegacyTraceId: 01kecsd90fcj1g8nkeck9594j9, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7592679031360737463:2529] from: [3:7592679031360737462:2529] trace_id# 2026-01-07T17:52:25.230290Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592679031360737463:2529] TxId: 281474976710673. Ctx: { TraceId: 01kecsd90fcj1g8nkeck9594j9, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NzVmODkxMzUtNGM0ZTk0ZGItNDI3Mjk0MjYtYzAzMzZiMQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:52:25.230541Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NzVmODkxMzUtNGM0ZTk0ZGItNDI3Mjk0MjYtYzAzMzZiMQ==, ActorId: [3:7592679031360737444:2529], ActorState: ExecuteState, LegacyTraceId: 01kecsd90fcj1g8nkeck9594j9, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TwoShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27187, MsgBus: 30869 2026-01-07T17:52:09.059633Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678961409529517:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.059722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.341751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.341839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.365575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.366965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.391576Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.634458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:09.662157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.662181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.662187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.662302Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.069204Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.310737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.353604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.476434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.596764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.652892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:11.755581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678969999465936:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.755675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.755904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678969999465946:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.755934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.058368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.083617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.107327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.133986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.156996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.185423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.214937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.254139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.331290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974294434113:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.331360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.331369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974294434118:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.331508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974294434120:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.331550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.334996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.343911Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678974294434121:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:12.446569Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678974294434173:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:13.550309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 14979, MsgBus: 14937 2026-01-07T17:52:14.530538Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678980877027903:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.530612Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:14.542242Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [ ... : will try to initialize from file: (empty maybe) 2026-01-07T17:52:20.362248Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:20.362348Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:20.478823Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:20.798299Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:20.804551Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:20.814322Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.883418Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.053087Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.114128Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.226960Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:23.408489Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679020018808349:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.408586Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.408869Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679020018808358:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.408921Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.482427Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.511828Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.542520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.571984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.603401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.635214Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.702563Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.742481Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.807469Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679020018809226:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.807531Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679020018809231:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.807567Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.807744Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679020018809234:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.807794Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.810898Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:23.819839Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679020018809233:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:23.874731Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679020018809286:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:25.172680Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679007133904626:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:25.172751Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:25.348373Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.533405Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:52:25.554117Z node 3 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037927 cannot parse tx 281474976710675: Table '/Root/TestTable' scheme changed. 2026-01-07T17:52:25.554275Z node 3 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [3:7592679028608744273:2520] TxId: 281474976710675. Ctx: { TraceId: 01kecsd9efcxm9sfcyrapqgwaf, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJkOGE5YTktNmI5NTZkMy03MjE5NjZiOC1jYWM0YjY0ZQ==, PoolId: default, IsStreamingQuery: 0}. ERROR: [SCHEME_CHANGED] Table '/Root/TestTable' scheme changed.; trace_id# 2026-01-07T17:52:25.554658Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZjJkOGE5YTktNmI5NTZkMy03MjE5NjZiOC1jYWM0YjY0ZQ==, ActorId: [3:7592679028608744139:2520], ActorState: ExecuteState, LegacyTraceId: 01kecsd9efcxm9sfcyrapqgwaf, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/TestTable\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } trace_id# |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 19704, MsgBus: 2863 2026-01-07T17:52:13.899233Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678975964842631:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:13.899294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:14.059050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:14.104561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:14.104683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:14.138968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:14.142842Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:14.143835Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678975964842589:2081] 1767808333898296 != 1767808333898299 2026-01-07T17:52:14.221548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:14.221569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:14.221593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:14.221691Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:14.346840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:14.547550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:14.597036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.727883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.867740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.911499Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:14.935515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.675677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678988849746347:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.675770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.676025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678988849746357:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.676073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.927255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.957498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.991029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.019222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.048347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.085440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.115971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.158236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.221184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678993144714524:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.221259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.221317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678993144714529:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.221483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678993144714531:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.221539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.224843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:17.236883Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678993144714532:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:17.299505Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678993144714584:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:18.899721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678975964842631:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:18.899778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:18.904114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 4923, MsgBus: 11063 2026-01-07T17:52:20.283539Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679007166344766:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:20.283605Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:20.353048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:20.372135Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:20.389302Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:20.389392Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:20.399276Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:20.492706Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:20.492720Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:20.492728Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:20.492812Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:20.580580Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:20.908647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:20.923013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.979834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.104563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.171511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.290828Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:23.464777Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679020051248468:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.464866Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.465297Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679020051248478:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.465334Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.524228Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.551191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.579447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.607530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.633703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.690428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.717848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.757703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.827165Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679020051249344:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.827247Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.827248Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679020051249349:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.827398Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679020051249351:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.827444Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.829495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:23.836263Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679020051249352:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:23.928376Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679020051249404:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:25.284989Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679007166344766:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:25.285616Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:25.330036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> BasicStatistics::TwoNodes [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> BasicStatistics::SimpleGlobalIndex [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31993, MsgBus: 3113 2026-01-07T17:52:09.059751Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678959212775026:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.059798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.316213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.394000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.394126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.397030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.434607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.587482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:09.662228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.662255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.662261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.662329Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.071770Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.313335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.323003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:10.379934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.515384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.646538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.705000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:11.835350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678967802711450:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.835446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.835732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678967802711460:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.835778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.217185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.240988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.266105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.292737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.317705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.347518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.375929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.416279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.501609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678972097679627:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.501732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.501816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678972097679632:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.501936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678972097679634:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.501967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.505075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.513090Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678972097679635:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:12.570483Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678972097679687:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:13.786366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:13.984266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:52:14.052768Z node 1 :KQP_COMPUTE E ... re not loaded 2026-01-07T17:52:20.560441Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:20.560520Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:20.567028Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:20.675182Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:20.675208Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:20.675215Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:20.675306Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:20.717206Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:21.084425Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:21.091079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:21.103063Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.157003Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.297264Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.391439Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.514216Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:23.943384Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679021912022595:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.943468Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.943688Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679021912022605:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.943737Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.996183Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:24.025295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:24.051741Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:24.079441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:24.160603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:24.189142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:24.222651Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:24.264883Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:24.342996Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679026206990775:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:24.343087Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:24.343194Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679026206990780:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:24.343243Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679026206990782:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:24.343281Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:24.346951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:24.359286Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679026206990784:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:24.455974Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679026206990835:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:25.447522Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679009027118978:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:25.447573Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:26.338888Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:26.605541Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-07T17:52:26.812573Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZDRmODhkZmEtYjhjZDc5YjYtODFkZDI0NTktYmVmZTcxNjc=, ActorId: [3:7592679034796925701:2521], ActorState: ExecuteState, LegacyTraceId: 01kecsdag51gs5rw3p10ka52wv, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Tables: `/Root/TestTable`" issue_code: 2001 severity: 1 } trace_id# |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpEffects::EffectWithSelect-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:46.682662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:46.750942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:46.756982Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:529:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:46.757157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:46.757245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:47.075696Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:47.164101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:47.164201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:47.210095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:47.309628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:47.908652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:47.909262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:47.909306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:47.909356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:47.909812Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:47.976263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:48.537040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:52.892918Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:52.893153Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:52.893185Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-07T17:51:52.901908Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:52.907589Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:52.948872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:52.949000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:52.949562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:52.949618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:52.987586Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:52.987730Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-07T17:51:52.989594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:52.990284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.145355Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:53.145444Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:53.146278Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:53.146337Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:53.146916Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.147616Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.148238Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.148511Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.148700Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.148850Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.149003Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.149126Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.149203Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.163912Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:53.164257Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.164518Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.322916Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:53.337749Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:53.337825Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:53.361600Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:53.362278Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:53.362446Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:53.362493Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:53.362528Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:53.362565Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:53.362605Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:53.362645Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:53.363407Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:53.364914Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2345:2411] 2026-01-07T17:51:53.372680Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:53.390025Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2486:2448] Owner: [2:2485:2447]. Describe result: PathErrorUnknown 2026-01-07T17:51:53.390098Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2486:2448] Owner: [2:2485:2447]. Creating table 2026-01-07T17:51:53.390161Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2486:2448] Owner: [2:2485:2447]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:53.393179Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:53.393248Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2549:2457], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:53.417468Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2598:2494] 2026-01-07T17:51:53.417680Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2598:2494], schemeshard id = 72075186224037897 2026-01-07T17:51:53.432108Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2632:2500], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:53.457855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:53.499331Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2486:2448] Owner: [2:2485:2447]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:53.499458Z node 2 ... ] RequestId[ 23 ], ReplyToActorId[ [2:4312:2991]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:14.387331Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:52:14.387381Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4312:2991], StatRequests.size() = 1 2026-01-07T17:52:15.189915Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4343:2997]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:15.190249Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:52:15.190289Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4343:2997], StatRequests.size() = 1 2026-01-07T17:52:15.562182Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:15.562645Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-07T17:52:15.562901Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:15.562958Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:52:15.994097Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4374:3003]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:15.994393Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:52:15.994438Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4374:3003], StatRequests.size() = 1 2026-01-07T17:52:16.915064Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4409:3011]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:16.915370Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:52:16.915410Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4409:3011], StatRequests.size() = 1 2026-01-07T17:52:17.932998Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4442:3017]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:17.933325Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:52:17.933359Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4442:3017], StatRequests.size() = 1 2026-01-07T17:52:18.585867Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:18.810959Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:52:18.811044Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:18.811075Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:18.811118Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:52:19.359307Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4485:3026]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:19.359590Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:52:19.359621Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4485:3026], StatRequests.size() = 1 2026-01-07T17:52:20.463362Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4520:3032]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:20.463776Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:52:20.463830Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4520:3032], StatRequests.size() = 1 2026-01-07T17:52:21.174417Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:21.174679Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-07T17:52:21.174809Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:52:21.174989Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:21.265056Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:21.265155Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:21.265418Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:52:21.279049Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:21.914988Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4553:3040]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:21.915310Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:52:21.915349Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4553:3040], StatRequests.size() = 1 2026-01-07T17:52:23.023576Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4586:3048]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:23.023986Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:52:23.024037Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4586:3048], StatRequests.size() = 1 2026-01-07T17:52:24.043313Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4619:3054]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:24.043564Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:52:24.043590Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4619:3054], StatRequests.size() = 1 2026-01-07T17:52:24.478339Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:24.903386Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4654:3062]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:24.903729Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:52:24.903771Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4654:3062], StatRequests.size() = 1 2026-01-07T17:52:25.741091Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4689:3068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:25.741508Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-07T17:52:25.741558Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4689:3068], StatRequests.size() = 1 2026-01-07T17:52:26.270281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:26.270777Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-07T17:52:26.271125Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:52:26.271177Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:26.315482Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:26.315550Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:26.315742Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-07T17:52:26.329463Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:26.843882Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4724:3076]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:26.844194Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-07T17:52:26.844242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4724:3076], StatRequests.size() = 1 2026-01-07T17:52:26.844671Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:4726:2967]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:26.848175Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:52:26.848298Z node 3 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [3:4736:2971] 2026-01-07T17:52:26.848359Z node 3 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [3:4736:2971] 2026-01-07T17:52:26.853012Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4744:3078] 2026-01-07T17:52:26.853829Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:4736:2971], server id = [2:4744:3078], tablet id = 72075186224037894, status = OK 2026-01-07T17:52:26.854135Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4744:3078], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:52:26.854202Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2026-01-07T17:52:26.854484Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 18446744073709551615 2026-01-07T17:52:26.854590Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [3:4726:2967], StatRequests.size() = 1 2026-01-07T17:52:26.854877Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 8036, MsgBus: 19723 2026-01-07T17:52:09.641994Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678961568785217:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.642053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.855070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.855159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.881780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.899897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.903538Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.904634Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678961568785184:2081] 1767808329640809 != 1767808329640812 2026-01-07T17:52:09.982501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.982535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.982542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.982679Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.064114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:10.360260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.426951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.533935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.649606Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.649840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.703358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.111527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974453688948:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.111670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.112063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974453688958:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.112153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.388825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.415136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.437740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.462831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.488698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.518250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.548369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.591526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.675220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974453689827:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.675289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.675319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974453689832:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.675493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678974453689834:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.675533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.678763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.688611Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678974453689835:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:12.747397Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678974453689887:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:13.973691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.642344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678961568785217:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.642402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... LASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:22.039803Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:22.054365Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:22.430436Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:22.439424Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.497951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.631106Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.726365Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.882316Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:25.052435Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679028746253369:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.052562Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.052798Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679028746253378:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.052852Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.121696Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.148569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.174765Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.202265Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.230272Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.259789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.316505Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.362202Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.433228Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679028746254245:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.433330Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.433363Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679028746254250:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.433513Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679028746254252:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.433546Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.437225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:25.447524Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679028746254253:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:25.535903Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679028746254305:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:26.868365Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679011566382357:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:26.868432Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:27.013521Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.588919Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7592679037336189454:2532], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01kecsdbb19swn33q0hytkp9me. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=OWFmZWQxOTEtNjY5ZjQ3MS0yNmUwN2FhNy1mOGY0OGI5MQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } } 2026-01-07T17:52:27.591797Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7592679037336189454:2532], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01kecsdbb19swn33q0hytkp9me. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=OWFmZWQxOTEtNjY5ZjQ3MS0yNmUwN2FhNy1mOGY0OGI5MQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } }. 2026-01-07T17:52:27.592572Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=OWFmZWQxOTEtNjY5ZjQ3MS0yNmUwN2FhNy1mOGY0OGI5MQ==, ActorId: [3:7592679033041221895:2532], ActorState: ExecuteState, LegacyTraceId: 01kecsdbb19swn33q0hytkp9me, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready)" severity: 1 } } trace_id# |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9336, MsgBus: 30033 2026-01-07T17:52:09.059736Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678960306095604:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.059789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.356153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.356214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.357222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.377484Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.389044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.590446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:09.662526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.662552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.662557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.662623Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.068747Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.280263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.298555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:10.353662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.502243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.641509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.695741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:11.693105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678968896032030:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.693196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.693450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678968896032040:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.693590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.058250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.082556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.109063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.135687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.162462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.190423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.216299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.256865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.348382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678973191000210:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.348446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.348461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678973191000215:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.348598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678973191000217:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.348628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.352739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.364068Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678973191000218:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:12.458531Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678973191000270:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20459, MsgBus: 19965 2026-01-07T17:52:14.591115Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678980691534439:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.591174Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:14.604325Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:14.673256Z node 2 :IMPORT ... pp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:20.482774Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:20.483733Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592679008744645427:2081] 1767808340374152 != 1767808340374155 2026-01-07T17:52:20.527216Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:20.527291Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:20.532863Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:20.649065Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:20.649090Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:20.649099Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:20.649197Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:20.771654Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:21.165204Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:21.171029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:21.178535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.244404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.374056Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.401894Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:21.438781Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.414608Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679021629549189:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.414699Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.414978Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679021629549199:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.415051Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.529025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.559973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.592507Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.628850Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.662436Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.689648Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.720726Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.764295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.833030Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679021629550068:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.833106Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679021629550073:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.833108Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.833232Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679021629550075:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.833272Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.836453Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:23.844686Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679021629550076:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:23.905975Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679021629550128:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:25.375484Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679008744645454:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:25.375545Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:25.777312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.831575Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:49.188099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:49.255008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:49.260120Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:49.260355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:49.260482Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:49.538075Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:49.617931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:49.618020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:49.651799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.725811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:50.312890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:50.313749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:50.313789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:50.313816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:50.314237Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:50.377938Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:50.907873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:53.393003Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:53.398361Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:53.400967Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:53.422017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:53.422104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:53.461336Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:53.463048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.614966Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:53.615095Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:53.616077Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.616556Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.616990Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.617515Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.617824Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.617928Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.618074Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.618211Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.618299Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.633027Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.801338Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:53.832867Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:53.832955Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:53.882465Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:53.883386Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:53.883558Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:53.883596Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:53.883634Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:53.883674Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:53.883737Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:53.883792Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:53.884444Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:53.893916Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:53.893986Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:53.920966Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:51:53.921749Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:51:53.979834Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:51:53.981387Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:53.993010Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:51:53.993058Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:51:53.993118Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:53.997449Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:54.000224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:54.004941Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:54.005030Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:54.013394Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:54.183221Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:54.195281Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:54.439840Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:54.581740Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:54.581807Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:51:55.353995Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-07T17:52:12.958145Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:3838:3392]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:12.958353Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-07T17:52:12.958381Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:3838:3392], StatRequests.size() = 1 2026-01-07T17:52:13.817730Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3875:3408]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:13.818064Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-07T17:52:13.818109Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3875:3408], StatRequests.size() = 1 2026-01-07T17:52:14.621214Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3904:3420]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:14.621487Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-07T17:52:14.621526Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3904:3420], StatRequests.size() = 1 2026-01-07T17:52:15.076376Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:15.502791Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3937:3434]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:15.503065Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:52:15.503106Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3937:3434], StatRequests.size() = 1 2026-01-07T17:52:16.260424Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3964:3444]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:16.260716Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:52:16.260757Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3964:3444], StatRequests.size() = 1 2026-01-07T17:52:16.657229Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:16.657371Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-07T17:52:16.657769Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:16.657927Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:52:17.071681Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3993:3456]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:17.072041Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:52:17.072092Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3993:3456], StatRequests.size() = 1 2026-01-07T17:52:17.879197Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4022:3466]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:17.879528Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:52:17.879578Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4022:3466], StatRequests.size() = 1 2026-01-07T17:52:18.775330Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4053:3476]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:18.775662Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:52:18.775706Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4053:3476], StatRequests.size() = 1 2026-01-07T17:52:19.378265Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:19.522814Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:52:19.522892Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:19.522934Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:19.522975Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:52:20.078012Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4089:3491]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:20.078413Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:52:20.078475Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4089:3491], StatRequests.size() = 1 2026-01-07T17:52:21.223594Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4122:3503]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:21.223864Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:52:21.223908Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4122:3503], StatRequests.size() = 1 2026-01-07T17:52:21.966846Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:21.967116Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-07T17:52:21.967551Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:52:21.967608Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:21.978905Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:21.978990Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:21.979243Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-07T17:52:21.993257Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:22.600529Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4153:3517]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:22.600847Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:52:22.600890Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4153:3517], StatRequests.size() = 1 2026-01-07T17:52:23.735495Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4182:3529]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:23.735775Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:52:23.735813Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4182:3529], StatRequests.size() = 1 2026-01-07T17:52:24.718913Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4211:3539]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:24.719201Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:52:24.719242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4211:3539], StatRequests.size() = 1 2026-01-07T17:52:25.248095Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:25.784327Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4244:3551]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:25.784570Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:52:25.784600Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4244:3551], StatRequests.size() = 1 2026-01-07T17:52:26.809940Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4273:3563]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:26.810232Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-07T17:52:26.810278Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4273:3563], StatRequests.size() = 1 2026-01-07T17:52:27.365193Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:27.365720Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-07T17:52:27.366161Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:27.366249Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:52:27.377223Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:27.377305Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:27.377528Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-07T17:52:27.390894Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:27.888865Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4306:3577]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:27.889200Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-07T17:52:27.889247Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4306:3577], StatRequests.size() = 1 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9178, MsgBus: 29624 2026-01-07T17:52:09.059784Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678961562831286:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.059831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.317036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.343844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.343969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.392558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.436238Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.553442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:09.662145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.662238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.662249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.662315Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:10.068306Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.307210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.315974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:11.579166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678970152766723:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.579259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.579531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678970152766732:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.579586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.059480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.075814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:7592678974447734073:2522], Recipient [1:7592678974447734081:2331]: NKikimr::TEvTablet::TEvBoot 2026-01-07T17:52:12.076721Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:7592678974447734073:2522], Recipient [1:7592678974447734081:2331]: NKikimr::TEvTablet::TEvRestored 2026-01-07T17:52:12.076998Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7592678974447734081:2331] 2026-01-07T17:52:12.077216Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:52:12.086536Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:7592678974447734073:2522], Recipient [1:7592678974447734081:2331]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T17:52:12.086636Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:52:12.086675Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:52:12.088441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:52:12.088537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:52:12.088578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:52:12.088997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:52:12.089054Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:52:12.089103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7592678974447734096:2331] in generation 1 2026-01-07T17:52:12.090395Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:52:12.127674Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:52:12.127845Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:52:12.127899Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7592678974447734098:2332] 2026-01-07T17:52:12.127912Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:52:12.127922Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:52:12.127955Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:12.128060Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:7592678974447734081:2331], Recipient [1:7592678974447734081:2331]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T17:52:12.128085Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T17:52:12.128195Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:52:12.128250Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:52:12.128307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:7592678974447734082:2526], Recipient [1:7592678974447734081:2331]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T17:52:12.128326Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T17:52:12.128350Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592678974447734071:2521], serverId# [1:7592678974447734082:2526], sessionId# [0:0:0] 2026-01-07T17:52:12.128380Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:7592678961562831610:2150], Recipient [1:7592678974447734082:2526] 2026-01-07T17:52:12.128414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T17:52:12.128495Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:52:12.128514Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:52:12.128568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T17:52:12.128593Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T17:52:12.128600Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T17:52:12.128607Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:52:12.128623Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:52:12.128641Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:52:12.128833Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710658] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T17:52:12.128864Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:52:12.128939Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2026-01-07T17:52:12.128979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710658] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T17:52:12.128992Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710658] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T17:52:12.129001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710658] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T17:52:12.129035Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710658] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T17:52:12.129297Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710658] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T17:52:12.129332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710658] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T17:52:12.129341Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710658] at 72075186224037888 to execution unit Finish ... vent TEvPipeCache::TEvDeliveryProblem 2026-01-07T17:52:16.986257Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset Trying to start YDB, gRPC: 9816, MsgBus: 11614 2026-01-07T17:52:17.703679Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592678993373202167:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:17.703812Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:17.715171Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:17.792302Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:17.832307Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:17.832393Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:17.836810Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:17.869897Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:17.869913Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:17.869917Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:17.870009Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:17.914823Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:18.199255Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:18.208590Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.256467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.382360Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.462860Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.760081Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:20.473027Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679006258105876:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.473114Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.473924Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679006258105886:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.473962Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.577089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.609859Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.648015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.686702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.754537Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.786235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.821524Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.888304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.973279Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679006258106765:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.973407Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.973776Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679006258106770:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.973824Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679006258106771:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.973835Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:20.977735Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:20.989499Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679006258106774:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:21.052601Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679010553074121:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:22.709088Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592678993373202167:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:22.709167Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:22.726429Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:47.444163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:47.514162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:47.519952Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:47.520194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:47.520285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:47.803144Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:47.895093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:47.895201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:47.929147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:48.007947Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:48.653633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:48.654343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:48.654380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:48.654402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:48.654699Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:48.716161Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:49.219780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:51.638705Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:51.645463Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:51.648385Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:51.672569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:51.672667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:51.711650Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:51.713460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:51.891262Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:51.891376Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:51.892689Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.893472Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.894035Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.894947Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.895551Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.895750Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.895945Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.896211Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.896371Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.911831Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:52.068294Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:52.103279Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:52.103356Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:52.130098Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:52.131000Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:52.131162Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:52.131199Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:52.131252Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:52.131291Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:52.131323Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:52.131355Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:52.131966Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:52.169073Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:52.169159Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:52.190596Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:51:52.191232Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:51:52.238919Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:51:52.240101Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:51:52.251810Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:51:52.251865Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:51:52.251930Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:51:52.256639Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:52.259537Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:52.269487Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:52.269608Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:52.279154Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:52.454341Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:52.471187Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:51:52.718202Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:52.858914Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:52.858978Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:51:53.581015Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... atType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:13.253484Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 19 ] 2026-01-07T17:52:13.253512Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 19, ReplyToActorId = [2:4355:3715], StatRequests.size() = 1 2026-01-07T17:52:14.204761Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:4387:3726]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:14.205117Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-07T17:52:14.205161Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:4387:3726], StatRequests.size() = 1 2026-01-07T17:52:15.185910Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:15.208437Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:4419:3741]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:15.208774Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-07T17:52:15.208818Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:4419:3741], StatRequests.size() = 1 2026-01-07T17:52:16.189374Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:4451:3756]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:16.189742Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-07T17:52:16.189793Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:4451:3756], StatRequests.size() = 1 2026-01-07T17:52:17.088804Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:17.089214Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-07T17:52:17.089799Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:17.089911Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:52:17.111815Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:4479:3767]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:17.112198Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-07T17:52:17.112237Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4479:3767], StatRequests.size() = 1 2026-01-07T17:52:18.087788Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4511:3782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:18.088140Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-07T17:52:18.088186Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4511:3782], StatRequests.size() = 1 2026-01-07T17:52:19.024560Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4541:3793]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:19.025042Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-07T17:52:19.025091Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4541:3793], StatRequests.size() = 1 2026-01-07T17:52:20.141609Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:20.273965Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4584:3810]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:20.274352Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-07T17:52:20.274398Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4584:3810], StatRequests.size() = 1 2026-01-07T17:52:20.298555Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:52:20.298629Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:20.298665Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:20.298694Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:52:21.511885Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4617:3824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:21.512248Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-07T17:52:21.512298Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4617:3824], StatRequests.size() = 1 2026-01-07T17:52:22.727253Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:22.727422Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-07T17:52:22.728026Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:22.728248Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:52:22.740117Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:22.740219Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:22.740660Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:22.755434Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:22.810305Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4648:3837]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:22.810917Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-07T17:52:22.810974Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4648:3837], StatRequests.size() = 1 2026-01-07T17:52:23.975045Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4682:3853]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:23.975396Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:52:23.975439Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4682:3853], StatRequests.size() = 1 2026-01-07T17:52:24.524187Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-07T17:52:24.524268Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.743000s, at schemeshard: 72075186224037899 2026-01-07T17:52:24.524478Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-07T17:52:24.539055Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:25.147711Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4712:3866]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:25.148131Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:52:25.148180Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4712:3866], StatRequests.size() = 1 2026-01-07T17:52:26.303565Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:26.336594Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4746:3880]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:26.336867Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:52:26.336900Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4746:3880], StatRequests.size() = 1 2026-01-07T17:52:27.496710Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4784:3896]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:27.497021Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:52:27.497064Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4784:3896], StatRequests.size() = 1 2026-01-07T17:52:28.495961Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-07T17:52:28.496082Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-07T17:52:28.496488Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:52:28.496634Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:52:28.507373Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:28.507430Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:28.507606Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:28.520582Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:28.542605Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4814:3909]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:28.542992Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:52:28.543056Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4814:3909], StatRequests.size() = 1 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::ManyFlushes [GOOD] >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID >> TNodeBrokerTest::NodesMigrationSetLocation >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted >> TNodeBrokerTest::SyncNodes >> TNodeBrokerTest::NodesMigrationExtendLease >> TSlotIndexesPoolTest::Basic [GOOD] >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18308, MsgBus: 15675 2026-01-07T17:52:20.212993Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679007460387690:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:20.213046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:20.442646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:20.505170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:20.505302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:20.574895Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:20.594361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:20.620997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:20.705480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:20.705509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:20.705533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:20.705603Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:21.106572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:21.171027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.228370Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:21.312078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.457025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.511505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.005678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020345291410:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.005771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.006012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020345291420:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.006043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.345866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.377598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.407819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.435190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.463373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.494794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.526635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.569371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.644057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020345292292:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.644110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020345292297:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.644148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.644634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020345292300:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.644686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.647268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:23.656684Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679020345292299:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:23.743486Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679020345292352:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:25.071828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.213905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679007460387690:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:25.213973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9772, MsgBus: 7796 2026-01-07T17:52:26.254963Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679033769338968:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:26.255019Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:26.281784Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:26.318639Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:26.367122Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:26.367200Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:26.379954Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:26.401282Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:26.401312Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:26.401319Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:26.401396Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:26.476492Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:26.765774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:26.771080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:26.783544Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:26.840464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.004769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.066059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.261367Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:28.870238Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679042359275390:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.870320Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.870516Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679042359275400:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.870558Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.925860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.954631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.987623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.017240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.046946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.075747Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.105946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.147790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.203170Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679046654243566:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.203265Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.203293Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679046654243571:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.203399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679046654243573:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.203434Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.206262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:29.213975Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679046654243574:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:29.295451Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679046654243628:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:30.504442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.255466Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679033769338968:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:31.255574Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove >> TNodeBrokerTest::NodesMigrationNodeName >> TNodeBrokerTest::Test1001NodesSubscribers >> TSlotIndexesPoolTest::Init [GOOD] >> TNodeBrokerTest::NodesMigrationReuseRemovedID >> TNodeBrokerTest::NodesMigrationNewExpiredNode >> TNodeBrokerTest::Test1000NodesSubscribers >> TNodeBrokerTest::NodesMigrationRemoveActive >> TNodeBrokerTest::UpdateNodesLog >> TLocalTests::TestAlterTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14285, MsgBus: 12958 2026-01-07T17:52:13.474852Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678976277537575:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:13.474988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:13.615397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:13.650142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:13.650216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:13.685285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:13.691498Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:13.703089Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678976277537537:2081] 1767808333473725 != 1767808333473728 2026-01-07T17:52:13.785209Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:13.786340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:13.786354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:13.786376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:13.786472Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:14.133884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:14.173779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.255127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.349334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.409395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.492129Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:16.019349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678989162441300:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.019452Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.019685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678989162441310:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.019713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.244469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.273400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.298496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.325021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.353102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.382625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.413093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.456024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:16.525966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678989162442181:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.526039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.526111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678989162442186:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.526228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678989162442188:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.526270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.529486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:16.539232Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678989162442190:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:16.597435Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678989162442241:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:18.475077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678976277537575:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:18.475146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3406, MsgBus: 6057 2026-01-07T17:52:18.980432Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678999375948749:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:18.980467Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sna ... ervice] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:24.950178Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:24.950198Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:24.950207Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:24.950294Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:25.400722Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:25.409380Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.463272Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.577873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.636456Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.793476Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:28.159954Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679042116758468:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.160066Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.160364Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679042116758478:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.160429Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.231110Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.259236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.321509Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.351259Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.380731Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.412326Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.440803Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.481775Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.546904Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679042116759345:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.546978Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.546985Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679042116759350:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.547143Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679042116759352:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.547191Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:28.550255Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:28.559995Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679042116759353:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:28.623392Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679042116759405:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:29.768324Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679024936887446:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:29.768409Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:30.130150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:30.597839Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7592679050706694400:2548], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01kecsde21fcvvhm9n5709zqdq. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZjRkZjc0OGEtMzE2YzE4MzYtZDE1MGI3ZGEtNDI2ZjcyNGQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-07T17:52:30.598058Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7592679050706694401:2549], TxId: 281474976710677, task: 2. Ctx: { CheckpointId : . TraceId : 01kecsde21fcvvhm9n5709zqdq. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZjRkZjc0OGEtMzE2YzE4MzYtZDE1MGI3ZGEtNDI2ZjcyNGQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7592679050706694397:2520], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:52:30.598294Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZjRkZjc0OGEtMzE2YzE4MzYtZDE1MGI3ZGEtNDI2ZjcyNGQ=, ActorId: [3:7592679050706694259:2520], ActorState: ExecuteState, LegacyTraceId: 01kecsde21fcvvhm9n5709zqdq, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] Test command err: Trying to start YDB, gRPC: 25734, MsgBus: 19081 2026-01-07T17:52:19.696870Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679004423160590:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:19.697010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:19.735732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:52:19.937454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:19.937537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:19.956713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:20.026546Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:20.027822Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679004423160554:2081] 1767808339695546 != 1767808339695549 2026-01-07T17:52:20.027899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:20.081025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:20.081053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:20.081061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:20.081146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:20.284712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:20.428476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:20.464099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.597356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.705348Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:20.741634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:20.799557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.749754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679017308064316:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:22.749896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:22.750242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679017308064326:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:22.750296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.048970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.074306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.101689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.128344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.151191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.202395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.229158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.270931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.348750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679021603032494:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.348841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.348965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679021603032499:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.349247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679021603032501:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.349305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.352720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:23.363193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679021603032502:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:23.455690Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679021603032554:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:24.696942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679004423160590:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:24.697006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:24.878657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, bu ... :7592679054197159783:2529] TxId: 281474976710676. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:52:31.044908Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2026-01-07T17:52:31.045106Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:836} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-07T17:52:31.045295Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710677. Resolved key sets: 1 2026-01-07T17:52:31.045410Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710677. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 54] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-07T17:52:31.045648Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-07T17:52:31.045755Z node 2 :KQP_EXECUTER DEBUG: kqp_shards_resolver.cpp:76: [ShardsResolver] TxId: 281474976710677. Shard resolve complete, resolved shards: 1 2026-01-07T17:52:31.045798Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:259} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Shards nodes resolved SuccessNodes# 1 FailedNodes# 0 trace_id# 2026-01-07T17:52:31.045826Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037927] trace_id# 2026-01-07T17:52:31.045863Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1767808350784} 2026-01-07T17:52:31.046104Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:7592679054197159794:2529] 2026-01-07T17:52:31.046143Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7592679054197159794:2529], channels: 1 2026-01-07T17:52:31.046206Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:52:31.046267Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7592679054197159794:2529], trace_id# 2026-01-07T17:52:31.046300Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7592679054197159794:2529], trace_id# 2026-01-07T17:52:31.046335Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-07T17:52:31.046862Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7592679054197159794:2529] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-07T17:52:31.046896Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7592679054197159794:2529], trace_id# 2026-01-07T17:52:31.046947Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7592679054197159794:2529], trace_id# 2026-01-07T17:52:31.048265Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7592679054197159794:2529] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 734 Tasks { TaskId: 1 CpuTimeUs: 247 FinishTimeMs: 1767808351047 Tables { TablePath: "/Root/TestImmediateEffects" ReadRows: 1 ReadBytes: 22 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 121 BuildCpuTimeUs: 126 HostName: "ghrun-z2c6jslqle" NodeId: 2 StartTimeMs: 1767808351047 CreateTimeMs: 1767808351046 UpdateTimeMs: 1767808351047 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-07T17:52:31.048299Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7592679054197159794:2529] 2026-01-07T17:52:31.048387Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:52:31.048418Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:7592679054197159790:2529] TxId: 281474976710677. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000734s ReadRows: 1 ReadBytes: 22 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:52:31.048760Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710678. Resolved key sets: 0 2026-01-07T17:52:31.048842Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710678. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1767808350784} 2026-01-07T17:52:31.048872Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:7592679054197159797:2529] TxId: 281474976710678. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-07T17:52:31.048911Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [2:7592679054197159797:2529] TxId: 281474976710678. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:7592679054197159782:2529] trace_id# 2026-01-07T17:52:31.048961Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:7592679054197159797:2529] TxId: 281474976710678. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-07T17:52:31.050804Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1494} ActorId: [2:7592679054197159797:2529] TxId: 281474976710678. Ctx: { TraceId: 01kecsdeq87bz86jn5nwswptee, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YmJiOThkNmMtYTM5ODA2NGYtNDM1YjE0NTItMzVkMTkxYQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-07T17:52:31.162498Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679032722320010:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:31.162589Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] >> TLocalTests::TestAddTenantWhileResolving ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 8153, MsgBus: 64936 2026-01-07T17:52:20.322752Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679007435927743:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:20.322840Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:20.704413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:20.704552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:20.709021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:20.721650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:20.731064Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679007435927702:2081] 1767808340321619 != 1767808340321622 2026-01-07T17:52:20.749518Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:20.864514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:20.864539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:20.864546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:20.864683Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:20.968620Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:21.232799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:21.242712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:21.336457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.336762Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:21.471550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.601299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.655468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.299181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020320831462:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.299295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.299567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020320831472:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.299639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.550704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.578739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.608196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.637559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.667858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.699939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.727960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.789616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.849235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020320832339:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.849320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.849475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020320832345:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.849494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679020320832346:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.849526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.852046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:23.861314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679020320832349:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:23.923130Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679020320832400:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:25.323179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679007435927743:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:25.323272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:25.347919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... ing to start YDB, gRPC: 26762, MsgBus: 2913 2026-01-07T17:52:26.641114Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679034866021893:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:26.641172Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:26.660563Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:26.733234Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:26.771105Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:26.771180Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:26.774314Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:26.829053Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:26.829077Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:26.829086Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:26.829157Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:26.871697Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:27.194332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:27.202416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:52:27.211365Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.260890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.373999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.422787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.649215Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:29.499004Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679047750925606:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.499099Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.499348Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679047750925615:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.499399Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.554072Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.580486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.608878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.635705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.662078Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.689487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.739446Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.781410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.850442Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679047750926486:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.850536Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.850571Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679047750926491:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.850803Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679047750926493:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.850854Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.853545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:29.862087Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679047750926494:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:29.923724Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679047750926546:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:31.326753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.641227Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679034866021893:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:31.641280Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TPQTest::Read_From_Different_Zones_What_Was_Written_With_Gaps [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 13282, MsgBus: 15485 2026-01-07T17:52:13.992649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678976308566904:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:13.993016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:14.151628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:14.170917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:14.170986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:14.205144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:14.233741Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:14.235029Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678976308566872:2081] 1767808333991217 != 1767808333991220 2026-01-07T17:52:14.280379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:14.280411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:14.280424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:14.280536Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:14.416022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:14.600564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:14.645511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.772045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.918573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:14.981415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:15.089540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:16.844754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678989193470630:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.844892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.845215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678989193470640:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:16.845271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.126344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.157979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.186109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.212466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.242494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.274240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.306966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.357765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.427741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678993488438806:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.427806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.427858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678993488438811:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.428014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678993488438813:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.428059Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:17.431103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:17.440715Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678993488438815:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:17.547445Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678993488438866:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:18.869007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:18.993075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678976308566904:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:18.993142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migratio ... 97Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13736, MsgBus: 22118 2026-01-07T17:52:26.083953Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592679035838659106:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:26.083996Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:26.094954Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:26.171266Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:26.208651Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:26.208738Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:26.215416Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:26.259141Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:26.259164Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:26.259171Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:26.259253Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:26.338563Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:26.662299Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:26.684752Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:26.740142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:26.931195Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:26.993950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:27.129934Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:29.368314Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679048723562830:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.368398Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.368610Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679048723562839:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.368662Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.431198Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.461059Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.488802Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.517283Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.544868Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.575778Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.620331Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.658639Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.718948Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679048723563707:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.719047Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.719076Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679048723563712:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.719193Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679048723563714:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.719241Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:29.721863Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:29.729970Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679048723563715:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:29.821875Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679048723563767:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:31.084626Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679035838659106:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:31.084722Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:31.188115Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> TNodeBrokerTest::NodesMigration1001Nodes >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> ObjectStorageListingTest::FilterListing [GOOD] >> TNodeBrokerTest::NodesMigrationReuseID >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> TNodeBrokerTest::ShiftIdRangeRemoveNew >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1000Nodes >> TNodeBrokerTest::SyncNodes [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:52:31.655625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:52:31.788707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:52:31.812041Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:52:31.812796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:52:31.812893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:52:32.299515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:32.299648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:32.384075Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808348727144 != 1767808348727148 2026-01-07T17:52:32.411947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:32.459614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:32.584848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:52:32.907863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:32.920896Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:33.042315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.108543Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:52:33.108826Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:52:33.159881Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:52:33.160057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:52:33.161916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:52:33.162020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:52:33.162082Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:52:33.162514Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:52:33.162676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:52:33.162773Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:52:33.173676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:52:33.208832Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:52:33.209124Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:52:33.209276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:52:33.209320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:52:33.209359Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:52:33.209404Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:33.209987Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:52:33.210108Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:52:33.210216Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:52:33.210277Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:52:33.210358Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:52:33.210423Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:52:33.210554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:52:33.211022Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:52:33.211376Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:52:33.211492Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:52:33.213381Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:52:33.224101Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:52:33.224209Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:52:33.342005Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:52:33.346493Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:52:33.346566Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:33.347036Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:52:33.347093Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:52:33.347153Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:52:33.347445Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:52:33.347608Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:52:33.347783Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:52:33.347853Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:52:33.357634Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:52:33.358179Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:52:33.360610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:52:33.360678Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:33.361710Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:52:33.361782Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:52:33.362890Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:52:33.362933Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:52:33.362982Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:52:33.363046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:52:33.363094Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:52:33.363184Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:33.376520Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:52:33.378371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:52:33.378533Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:52:33.378590Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:52:33.409124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.409252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:961:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.409612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.411510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.411703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.419807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:33.426994Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:52:33.544265Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:52:33.549469Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:52:33.627996Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:34.485773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1053:2873], serverId# [1:1054:2874], sessionId# [0:0:0] 2026-01-07T17:52:34.489226Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:52:34.489483Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-07T17:52:34.500356Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:34.511454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1061:2880], serverId# [1:1062:2881], sessionId# [0:0:0] 2026-01-07T17:52:34.511680Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-07T17:52:34.511874Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2026-01-07T17:52:34.512094Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1061:2880], serverId# [1:1062:2881], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:52:31.555247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:52:31.715210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:52:31.783426Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:52:31.784183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:52:31.784249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:52:32.299449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:32.299572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:32.383895Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808349310618 != 1767808349310622 2026-01-07T17:52:32.411883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:32.459532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:32.597437Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:52:32.908201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:32.921223Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:33.042931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.100244Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T17:52:33.100519Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T17:52:33.143241Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T17:52:33.143372Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T17:52:33.146508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T17:52:33.146584Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T17:52:33.146636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T17:52:33.150968Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T17:52:33.151128Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T17:52:33.151229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T17:52:33.161961Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T17:52:33.187358Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T17:52:33.188655Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T17:52:33.188822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T17:52:33.188869Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:52:33.188909Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T17:52:33.188984Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:33.191077Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T17:52:33.191178Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T17:52:33.191246Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:52:33.191301Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:52:33.191395Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T17:52:33.191433Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:52:33.191503Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T17:52:33.191860Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T17:52:33.192204Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T17:52:33.192967Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T17:52:33.195331Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:52:33.206028Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T17:52:33.206138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T17:52:33.325859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-07T17:52:33.338431Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T17:52:33.338550Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:33.339134Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:52:33.339192Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T17:52:33.339280Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T17:52:33.339615Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T17:52:33.339814Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T17:52:33.340030Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T17:52:33.340160Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T17:52:33.345507Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T17:52:33.347453Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T17:52:33.349964Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T17:52:33.350037Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:33.351163Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T17:52:33.351250Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:52:33.352403Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T17:52:33.352452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T17:52:33.352497Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T17:52:33.352566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T17:52:33.352619Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T17:52:33.352698Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:33.358372Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:52:33.360191Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T17:52:33.360348Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T17:52:33.360410Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T17:52:33.409046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.409160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:961:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.409531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.411444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.411624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.419813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:33.426362Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:52:33.540753Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T17:52:33.543404Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:52:33.619417Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:34.485733Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1053:2873], serverId# [1:1054:2874], sessionId# [0:0:0] 2026-01-07T17:52:34.489192Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-07T17:52:34.489397Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-07T17:52:34.500216Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T17:52:34.511124Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1061:2880], serverId# [1:1062:2881], sessionId# [0:0:0] 2026-01-07T17:52:34.511298Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-07T17:52:34.511463Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2026-01-07T17:52:34.511655Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1061:2880], serverId# [1:1062:2881], sessionId# [0:0:0] 2026-01-07T17:52:34.513072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1067:2886], serverId# [1:1068:2887], sessionId# [0:0:0] 2026-01-07T17:52:34.513225Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-07T17:52:34.513366Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2026-01-07T17:52:34.513508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1067:2886], serverId# [1:1068:2887], sessionId# [0:0:0] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SyncNodes [GOOD] Test command err: 2026-01-07T17:52:33.572302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.572366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] Test command err: 2026-01-07T17:52:33.569539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.569599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] Test command err: 2026-01-07T17:52:33.568342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.568643Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] Test command err: 2026-01-07T17:52:33.568339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.568660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] Test command err: 2026-01-07T17:52:33.567518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.568608Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2026-01-07T17:52:33.567914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.568639Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:33.895043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:52:33.930325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] Test command err: 2026-01-07T17:52:33.569098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.569157Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] Test command err: 2026-01-07T17:52:33.570191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.570254Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2026-01-07T17:51:20.591573Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:51:20.610833Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:51:20.611029Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-07T17:51:20.616546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:20.616719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:20.616906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:20.617018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:20.617078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:20.617138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:20.617210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:20.617283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:20.617360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:20.617426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:20.617496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:20.617568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:20.617646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:20.635995Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-07T17:51:20.636477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:51:20.636520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:51:20.636680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:20.636814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:51:20.636897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:51:20.636948Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-07T17:51:20.637039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-07T17:51:20.637088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:51:20.637123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:51:20.637144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-07T17:51:20.637257Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-07T17:51:20.637301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:51:20.637326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:51:20.637353Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-07T17:51:20.637411Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-07T17:51:20.637455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:51:20.637490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:51:20.637508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-07T17:51:20.637538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-07T17:51:20.637562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-07T17:51:20.637596Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-07T17:51:20.637631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-07T17:51:20.637657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-07T17:51:20.637675Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-07T17:51:20.637811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-07T17:51:20.637883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-07T17:51:20.637905Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-07T17:51:20.637990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-07T17:51:20.638023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-07T17:51:20.638041Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-07T17:51:20.638076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-07T17:51:20.638101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-07T17:51:20.638126Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-07T17:51:20.638160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-07T17:51:20.638182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-07T17:51:20.638204Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-07T17:51:20.638352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-07T17:51:20.638389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mn_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.315662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.315844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.316031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.316270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.316448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.316652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.316854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.317064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.317277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.317469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.317680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.317883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.318068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-07T17:52:32.318264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5208,5208;s_splitted=5312,5296;r_splitted=854,854; 2026-01-07T17:52:32.321276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=5;size=320144;limit=10240;r_count=80000;fline=column_info.h:139;sizes=10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005;s_splitted=10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10208;r_splitted=2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2516; 2026-01-07T17:52:32.362798Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6817016;count=1255;actions=__DEFAULT,;waiting=1;; 2026-01-07T17:52:33.235547Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6265200;event=data_write_finished;writing_id=a3d0f87c-ebf111f0-8ef79db6-395ec33e; 2026-01-07T17:52:33.237548Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=86;data_size=65;sum=4128;count=95; 2026-01-07T17:52:33.237631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=182;data_size=177;sum=8736;count=96;size_of_meta=112; 2026-01-07T17:52:33.237706Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=262;data_size=257;sum=12576;count=48;size_of_portion=192; 2026-01-07T17:52:33.253847Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:28 Blob count: 1255 2026-01-07T17:52:33.266166Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=27;operation_id=26; 2026-01-07T17:52:34.223695Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:28 Blob count: 1255 2026-01-07T17:52:34.225322Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=110;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-07T17:52:34.318074Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767808285898 at tablet 9437184, mediator 0 2026-01-07T17:52:34.318159Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[112] execute at tablet 9437184 2026-01-07T17:52:34.319127Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=128;fline=abstract.h:88;progress_tx_id=128;lock_id=1;broken=0; 2026-01-07T17:52:34.339232Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[112] complete at tablet 9437184 2026-01-07T17:52:34.339348Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=128;lock_id=1;broken=0; 2026-01-07T17:52:34.339504Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=128;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=27;operation_id=26; 2026-01-07T17:52:34.339537Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=128;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=26; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 |95.2%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 24404, MsgBus: 24789 2026-01-07T17:52:16.511572Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678991990612429:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:16.511624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:16.692567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:16.741273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:16.741406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:16.754057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:16.772427Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678991990612388:2081] 1767808336510362 != 1767808336510365 2026-01-07T17:52:16.773394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:16.821031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:16.821059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:16.821073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:16.821213Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:16.990397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:17.174817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:17.222740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.359576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.473650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:17.517287Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:17.537005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.305133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679004875516148:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.305247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.305460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679004875516158:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.305520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.556157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.584217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.611098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.634134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.656833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.683120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.726895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.768501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.827007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679004875517027:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.827076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.827088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679004875517032:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.827623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679004875517034:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.827729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:19.831467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:19.843907Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679004875517035:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:19.937672Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679004875517087:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:21.419234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.511899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678991990612429:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:21.511970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migratio ... d: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.716716Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.777843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.959384Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:31.919140Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679056878335283:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:31.919254Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:31.919594Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679056878335293:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:31.919659Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:31.955171Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.984620Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.014017Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.042727Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.069223Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.100011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.130414Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.202777Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.263465Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679061173303458:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.263545Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.263590Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679061173303463:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.263714Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679061173303465:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.263755Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.266798Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:32.275494Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679061173303467:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:32.371134Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679061173303518:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:33.890769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.954022Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679043993431573:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:33.955071Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:34.402481Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=6; 2026-01-07T17:52:34.402635Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037927 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-07T17:52:34.402762Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037927 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-07T17:52:34.402956Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7592679069763238657:2531], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7592679065468271106:2531]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7592679069763238657:2531].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2026-01-07T17:52:34.403492Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592679069763238624:2531], SessionActorId: [3:7592679065468271106:2531], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[3:7592679065468271106:2531]. 2026-01-07T17:52:34.403704Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=MWZiYmEwZmYtZWQ1MDZiZjItYmFiYmU2MTItYzJmMDg4NGU=, ActorId: [3:7592679065468271106:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsdhzs2cb96kj3zp7axavs, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7592679069763238652:2531] from: [3:7592679069763238624:2531] trace_id# 2026-01-07T17:52:34.403810Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592679069763238652:2531] TxId: 281474976710677. Ctx: { TraceId: 01kecsdhzs2cb96kj3zp7axavs, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MWZiYmEwZmYtZWQ1MDZiZjItYmFiYmU2MTItYzJmMDg4NGU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } trace_id# 2026-01-07T17:52:34.404097Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MWZiYmEwZmYtZWQ1MDZiZjItYmFiYmU2MTItYzJmMDg4NGU=, ActorId: [3:7592679065468271106:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsdhzs2cb96kj3zp7axavs, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } } trace_id# |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] Test command err: 2026-01-07T17:52:33.568038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.568647Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] >> TNodeBrokerTest::NodesMigration2000Nodes >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::NodesMigrationExpiredChanged >> TNodeBrokerTest::UpdateNodesLog [GOOD] >> TNodeBrokerTest::SubscribeToNodes >> KqpFail::OnCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] Test command err: 2026-01-07T17:52:35.113771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:35.113836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> TNodeBrokerTest::ExtendLeaseBumpVersion >> TNodeBrokerTest::NodesMigration999Nodes >> TNodeBrokerTest::ExtendLeaseRestartRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] Test command err: 2026-01-07T17:52:33.570181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.570249Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:35.602745Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] Test command err: 2026-01-07T17:52:33.570071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.570145Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> TNodeBrokerTest::NodesAlreadyMigrated >> TNodeBrokerTest::ListNodesEpochDeltasPersistance >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-07T17:52:36.306208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:36.306292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ... blocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE cookie 0 ... unblocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE 2026-01-07T17:52:37.038101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:37.038187Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLog [GOOD] Test command err: 2026-01-07T17:52:33.598137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.598217Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2026-01-07T17:52:35.964513Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1024] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13382, MsgBus: 17590 2026-01-07T17:52:18.843815Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679000548304665:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:18.843913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:19.019759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:19.025799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:19.025891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:19.060702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:19.114005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679000548304633:2081] 1767808338842430 != 1767808338842433 2026-01-07T17:52:19.120773Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:19.162892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:19.162926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:19.162940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:19.163087Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:19.188213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:19.498000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:19.556844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.665722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.783714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.837075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:19.849992Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:21.559711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679013433208392:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:21.559913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:21.560315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679013433208402:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:21.560395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:21.914445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.943289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.972113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.003841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.040411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.072706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.111252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.162294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:22.226206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679017728176572:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:22.226305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:22.226384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679017728176577:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:22.226574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679017728176579:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:22.226623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:22.230021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:22.239861Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679017728176581:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:22.306302Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679017728176632:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:23.704601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.844060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679000548304665:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:23.844130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migratio ... PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:31.026234Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:31.034720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.090989Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.233627Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.295850Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.517077Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:32.991501Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679061398115117:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.991602Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.992096Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679061398115127:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.992138Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.992325Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679061398115129:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:32.992354Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.046754Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.079495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.113121Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.147295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.178404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.212237Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.242857Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.288779Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.387140Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679065693083292:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.387225Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.387322Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679065693083297:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.387469Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679065693083299:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.387525Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.391641Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:33.405229Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679065693083301:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:33.475936Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679065693083352:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:35.282510Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:35.509084Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679052808178696:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:35.509202Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:35.782449Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7592679074283018339:2549], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01kecsdk42cvmb7pk0s6h16ggz. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=YzE5Nzg4YmItZWMwMzYwNmMtMWVhMWRlMzQtNjBjYzUxMzM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2026-01-07T17:52:35.782705Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7592679074283018340:2550], TxId: 281474976715677, task: 2. Ctx: { CheckpointId : . TraceId : 01kecsdk42cvmb7pk0s6h16ggz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YzE5Nzg4YmItZWMwMzYwNmMtMWVhMWRlMzQtNjBjYzUxMzM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [3:7592679074283018336:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T17:52:35.783023Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YzE5Nzg4YmItZWMwMzYwNmMtMWVhMWRlMzQtNjBjYzUxMzM=, ActorId: [3:7592679074283018206:2521], ActorState: ExecuteState, LegacyTraceId: 01kecsdk42cvmb7pk0s6h16ggz, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id# >> TNodeBrokerTest::SingleDomainModeBannedIds >> TNodeBrokerTest::ShiftIdRangeRemoveActive |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] Test command err: 2026-01-07T17:52:35.583579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:35.583643Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:36.637412Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> KqpRboYql::TPCH_YQL [GOOD] |95.3%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TNodeBrokerTest::BasicFunctionality >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] Test command err: 2026-01-07T17:52:36.231425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:36.231494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] Test command err: 2026-01-07T17:52:35.409017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:35.409074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpFail::OnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 18677, MsgBus: 5059 2026-01-07T17:52:09.346849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678960884247300:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:09.347593Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:09.554649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:09.562549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:09.562660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:09.621475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:09.621745Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678960884247273:2081] 1767808329344176 != 1767808329344179 2026-01-07T17:52:09.623184Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:09.708241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:09.708293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:09.708393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:09.708489Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:09.825335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:10.280681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:10.296896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:10.353762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.354624Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:10.486845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.621131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.701106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:11.885419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678969474183735:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.885517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.885754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678969474183744:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:11.885788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.118911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.141283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.162738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.188685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.214454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.242465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.273929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.322058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:12.414612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678973769151910:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.414694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.414721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678973769151915:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.414875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678973769151917:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.414923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:12.417440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:12.425998Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678973769151918:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:12.523857Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678973769151970:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:14.347201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592678960884247300:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:14.347277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23821, MsgBus: 15987 ... waiting for SysViewsRoster update finished 2026-01-07T17:52:18.126897Z node 2 :KQP_PROXY WARN: kqp_finalize_script_se ... D_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:52:29.335416Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:52:29.335512Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:52:29.537131Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:29.537274Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:29.561828Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1767808346513496 != 1767808346513499 2026-01-07T17:52:29.566990Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:29.612724Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:29.712834Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:52:30.078970Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:30.079076Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:30.079121Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:30.079543Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:30.092374Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:30.449615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:30.471763Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:30.691166Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:30.996815Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.257732Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.746801Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1901:3507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:31.747012Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:31.747588Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1974:3526], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:31.747657Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:31.770088Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.979811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.216946Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.520759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.770411Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.035527Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.305669Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.604908Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.966430Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2786:4167], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.966593Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.967125Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2790:4171], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.967219Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.967305Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2793:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.974681Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:34.148632Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2795:4176], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:34.195998Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:2855:4217] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:36.893741Z node 3 :RPC_REQUEST WARN: rpc_execute_query.cpp:487: Client lost 2026-01-07T17:52:36.894627Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:3169:4425] TxId: 281474976715674. Ctx: { TraceId: 01kecsdkhqe4ex9qvjqrywwg0c, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDVmZTdjYmItZmFlMTNjNjAtMzQ2MjIyYjgtNzgyOGFkNTQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-07T17:52:36.895064Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZDVmZTdjYmItZmFlMTNjNjAtMzQ2MjIyYjgtNzgyOGFkNTQ=, ActorId: [3:3114:4425], ActorState: ExecuteState, LegacyTraceId: 01kecsdkhqe4ex9qvjqrywwg0c, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 26731, MsgBus: 24626 2026-01-07T17:52:20.276015Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679006962263907:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:20.276095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:20.552737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:20.647724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:20.647829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:20.649044Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:20.662060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:20.719197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:20.780084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:20.780105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:20.780111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:20.780209Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:21.148242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:21.203701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.284186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:21.351192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.494093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:21.560020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.136402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679019847167643:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.136540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.136914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679019847167653:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.136976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.463221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.491628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.521452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.550812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.578888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.616698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.648622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.689981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:23.766689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679019847168524:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.766806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.766867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679019847168529:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.766927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679019847168531:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.766960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:23.770419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:23.780313Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679019847168533:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:52:23.854478Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679019847168584:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:25.118372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:25.275997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679006962263907:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:25.276073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4577, MsgBus: 4003 2026-01-07T17:52:26.446227Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivere ... hard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.681380Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.743319Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:32.956537Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:34.860048Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679068695415653:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.860144Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.860476Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679068695415662:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.860557Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.935094Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:34.968405Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:35.000164Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:35.031554Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:35.060285Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:35.085695Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:35.114113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:35.156503Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:35.218746Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679072990383828:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:35.218828Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679072990383833:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:35.218861Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:35.219055Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679072990383836:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:35.219103Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:35.221800Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:35.230670Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679072990383835:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:52:35.285542Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679072990383888:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:36.796962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:36.952775Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679055810511930:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:36.953443Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:37.063471Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=4; 2026-01-07T17:52:37.063639Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 4 at tablet 72075186224037927 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:52:37.063727Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 4 at tablet 72075186224037927 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:52:37.063885Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7592679081580318982:2529], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7592679077285351474:2529]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7592679081580318982:2529].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:52:37.063956Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7592679081580318974:2529], SessionActorId: [3:7592679077285351474:2529], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7592679077285351474:2529]. 2026-01-07T17:52:37.064224Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=Zjc1YzU4ZDYtZGNlOGNmMWEtZmY3NDU5MmUtOWYzNDI2NDU=, ActorId: [3:7592679077285351474:2529], ActorState: ExecuteState, LegacyTraceId: 01kecsdmk16zzyvcxbwpccx9sa, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7592679081580318975:2529] from: [3:7592679081580318974:2529] trace_id# 2026-01-07T17:52:37.064310Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7592679081580318975:2529] TxId: 281474976715675. Ctx: { TraceId: 01kecsdmk16zzyvcxbwpccx9sa, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Zjc1YzU4ZDYtZGNlOGNmMWEtZmY3NDU5MmUtOWYzNDI2NDU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:52:37.064483Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=Zjc1YzU4ZDYtZGNlOGNmMWEtZmY3NDU5MmUtOWYzNDI2NDU=, ActorId: [3:7592679077285351474:2529], ActorState: ExecuteState, LegacyTraceId: 01kecsdmk16zzyvcxbwpccx9sa, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestImmediateEffects`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TNodeBrokerTest::TestListNodes >> TNodeBrokerTest::NodesV2BackMigration |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> TTenantPoolTests::TestStateStatic >> KqpRboPg::Bench_10Joins [GOOD] >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> TNodeBrokerTest::RegistrationPipeliningNodeName Test command err: 2026-01-07T17:52:38.457146Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:52:38.457692Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0013cc/r3tmp/tmp5EFgJt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:52:38.458340Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0013cc/r3tmp/tmp5EFgJt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0013cc/r3tmp/tmp5EFgJt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1878325212447655311 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T17:52:38.464486Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T17:52:38.465008Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0013cc/r3tmp/tmp5EFgJt/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T17:52:38.465275Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0013cc/r3tmp/tmp5EFgJt/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0013cc/r3tmp/tmp5EFgJt/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11501230742750233966 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-07T17:52:38.312179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.312239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:38.863721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.863769Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] Test command err: 2026-01-07T17:52:35.157164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:35.157236Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: 2026-01-07T17:50:20.774426Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:50:20.843616Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:50:20.843701Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:50:20.843766Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:50:20.843826Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:50:20.860654Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:183:2195], now have 1 active actors on pipe 2026-01-07T17:50:20.860754Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:50:20.885519Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } 2026-01-07T17:50:20.885745Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:50:20.887981Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } 2026-01-07T17:50:20.888121Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:50:20.888195Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:50:20.888233Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:50:20.888271Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:50:20.889165Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:50:20.889536Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:50:20.891822Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:50:20.891882Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-07T17:50:20.891915Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:50:20.891972Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:50:20.892055Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:50:20.894387Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:50:20.894440Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:50:20.894483Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:50:20.894530Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:50:20.894564Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-07T17:50:20.894624Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.894667Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:50:20.894763Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-07T17:50:20.894808Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:50:20.894846Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:50:20.894874Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2026-01-07T17:50:20.894913Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2026-01-07T17:50:20.894946Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-07T17:50:20.894977Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-07T17:50:20.895021Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:50:20.895308Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:50:20.895367Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:50:20.895451Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:50:20.895683Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:50:20.895903Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-01-07T17:50:20.897765Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:50:20.897816Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-07T17:50:20.897854Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-01-07T17:50:20.897904Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:50:20.897962Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:50:20.899284Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-07T17:50:20.899314Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:50:20.899334Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:50:20.899376Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:50:20.899407Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-07T17:50:20.899429Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:50:20.899457Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:50:20.899502Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-07T17:50:20.899525Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-07T17:50:20.899563Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:50:20.899582Z node 1 :PERSQUEUE DEBUG: p ... request needed for owner 2026-01-07T17:52:35.297948Z node 74 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b7ab2011-fcaf71d0-70b22c03-e24a1eb3_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:35.468401Z node 74 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:35.468868Z node 74 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a2123133-e930be4-e8e8f1ac-2ffab7c4_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:36.027243Z node 75 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 75 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [75:107:2057] recipient: [75:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [75:107:2057] recipient: [75:105:2138] Leader for TabletID 72057594037927937 is [75:111:2142] sender: [75:112:2057] recipient: [75:105:2138] 2026-01-07T17:52:36.071171Z node 75 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:52:36.071222Z node 75 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:52:36.071257Z node 75 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:52:36.071294Z node 75 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [75:153:2057] recipient: [75:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [75:153:2057] recipient: [75:151:2172] Leader for TabletID 72057594037927938 is [75:157:2176] sender: [75:158:2057] recipient: [75:151:2172] Leader for TabletID 72057594037927937 is [75:111:2142] sender: [75:183:2057] recipient: [75:14:2061] 2026-01-07T17:52:36.093238Z node 75 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:52:36.094379Z node 75 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 73 actor [75:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 73 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 73 } Consumers { Name: "user" Generation: 73 Important: true } 2026-01-07T17:52:36.095830Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [75:189:2142] 2026-01-07T17:52:36.098669Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [75:189:2142] 2026-01-07T17:52:36.100709Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [75:190:2142] 2026-01-07T17:52:36.102877Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [75:190:2142] 2026-01-07T17:52:36.104659Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [75:191:2142] 2026-01-07T17:52:36.106828Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [75:191:2142] 2026-01-07T17:52:36.260795Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:36.261400Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|399c1307-b46613aa-95e58904-b42a46d9_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:36.612397Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:36.612943Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e4968b13-c02920ef-7f752ca0-8a83681d_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:36.890911Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:36.891327Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8d48e72b-d8ca25c-6fa84c96-31fcf30_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 80000 2026-01-07T17:52:37.192226Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:37.192655Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ac67a545-cc04ab8e-6afb171c-cf50e236_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:37.362857Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:37.363312Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9a88ff18-bb0fb289-9d2c181a-bf6dd597_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:37.901898Z node 76 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 76 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [76:107:2057] recipient: [76:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [76:107:2057] recipient: [76:105:2138] Leader for TabletID 72057594037927937 is [76:111:2142] sender: [76:112:2057] recipient: [76:105:2138] 2026-01-07T17:52:37.961667Z node 76 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:52:37.961733Z node 76 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:52:37.961781Z node 76 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:52:37.961836Z node 76 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [76:153:2057] recipient: [76:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [76:153:2057] recipient: [76:151:2172] Leader for TabletID 72057594037927938 is [76:157:2176] sender: [76:158:2057] recipient: [76:151:2172] Leader for TabletID 72057594037927937 is [76:111:2142] sender: [76:181:2057] recipient: [76:14:2061] 2026-01-07T17:52:37.982015Z node 76 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:52:37.983211Z node 76 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 74 actor [76:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 74 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 74 } Consumers { Name: "user" Generation: 74 Important: true } 2026-01-07T17:52:37.984555Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [76:187:2142] 2026-01-07T17:52:37.987428Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [76:187:2142] 2026-01-07T17:52:37.989553Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [76:188:2142] 2026-01-07T17:52:37.991849Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [76:188:2142] 2026-01-07T17:52:37.993661Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [76:189:2142] 2026-01-07T17:52:37.995901Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [76:189:2142] 2026-01-07T17:52:38.149352Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:38.149982Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f0b99530-86038cf6-e5740ad4-f74c09ed_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:38.447680Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:38.448156Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ec3721a6-ede4cbe7-c920be90-fbe3397e_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:38.712286Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:38.712710Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8285c204-eb1f4d3f-e4f88c93-3faa21c6_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 80000 2026-01-07T17:52:38.978310Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:38.978762Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fc0dffc0-2f8349ae-abd74e23-a99a02c9_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:52:39.134375Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:52:39.134840Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|302cdfa5-2d6d9253-6cf9a721-48ca5a26_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2026-01-07T17:52:38.375619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.375674Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:38.620826Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2026-01-07T17:52:38.646835Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-07T17:52:38.659923Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2026-01-07T17:52:39.480058Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-07T17:52:39.510012Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true >> TTenantPoolTests::TestSensorsConfigForStaticSlot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] Test command err: 2026-01-07T17:52:35.778025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:35.778093Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> TTenantPoolTests::TestStateStatic [GOOD] >> TNodeBrokerTest::ExtendLeaseBumpVersion [GOOD] >> TNodeBrokerTest::EpochCacheUpdate >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::TPCH_YQL [GOOD] Test command err: Trying to start YDB, gRPC: 32159, MsgBus: 1448 2026-01-07T17:51:52.898300Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678885741591175:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:52.898375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:53.066459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:53.083157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:53.083259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:53.107574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.128265Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678885741591136:2081] 1767808312897638 != 1767808312897641 2026-01-07T17:51:53.128912Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:53.166134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:53.166155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:53.166161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:53.166256Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:53.337887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:53.413598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:53.904246Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:55.056845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678898626493914:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:55.056846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678898626493906:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:55.056969Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:55.057327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678898626493921:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:55.057405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:55.060660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:51:55.070600Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678898626493920:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:51:55.172759Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678898626493973:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6651, MsgBus: 2886 2026-01-07T17:51:56.018055Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678906686992092:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:56.018098Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:56.027183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:56.081867Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:56.123267Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:56.123328Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:56.124606Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:56.145149Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:56.145165Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:56.145170Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:56.145224Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:56.256095Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:56.427696Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:57.021722Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:57.874004Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678910981960219:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:57.874091Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:57.874277Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592678910981960229:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:57.874312Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:57.971952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:51:58.101039Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[2:7592678915276927775:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:58.101039Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[2:7592678915276927778:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:58.101252Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[2:7592678915276927778:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:58.101443Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[2:7592678915276927778:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:58.101535Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[2:7592678915276927778:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:58.101599Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[2:7592678915276927778:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:58.101667Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[2:7592678915276927778:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:58.101730Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[2:7592678915276927778:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:58.101734Z node 2 :TX_COLUMNSHARD WARN: log.cp ... X_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.514321Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.514347Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.515105Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.515152Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.515168Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.525241Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.525241Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.525301Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.525306Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.525325Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.525327Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.537333Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.537415Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.537442Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.537828Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.537891Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.537916Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.549410Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.549447Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.549488Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.549499Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.549513Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.549521Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.561593Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.561669Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.561672Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.561698Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.561723Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.561742Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.574033Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.574056Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.574110Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.574112Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.574137Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.574137Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-07T17:52:27.627508Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679037444997949:3014], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:27.627610Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:27.627831Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679037444997954:3017], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:27.627887Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679037444997955:3018], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:27.627949Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:27.632407Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715666:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:27.646793Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679037444997958:3019], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715666 completed, doublechecking } 2026-01-07T17:52:27.725534Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679037444998011:5399] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 49], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:33.365991Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:52:33.366018Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TNodeBrokerTest::NodesSubscriberDisconnect >> TNodeBrokerTest::NodesMigrationReuseExpiredID >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-07T17:52:39.860029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:39.860082Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR 2026-01-07T17:52:40.447112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:40.447177Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] Test command err: 2026-01-07T17:52:33.571521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.571604Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for first batch is committed ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 2 ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 1 ... waiting for first batch is committed (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SubscribeToNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::Bench_10Joins [GOOD] Test command err: Trying to start YDB, gRPC: 15174, MsgBus: 17263 2026-01-07T17:51:54.971552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678894838305964:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:51:54.971638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:51:55.148835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:55.148971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:55.171939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:55.174674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:51:55.201713Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:55.204023Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592678894838305923:2081] 1767808314970201 != 1767808314970204 2026-01-07T17:51:55.248959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:55.248987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:55.249032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:55.249117Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:55.414423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:51:55.561337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:55.978376Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:56.997224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678903428241400:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.997356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.997643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678903428241410:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:56.997710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:51:57.213688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:51:57.567571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:57.567788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:57.567956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:57.568048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:57.568140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:57.568201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:57.568273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:57.568339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:57.568411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:57.568505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:57.568567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:57.568637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:57.568727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7592678907723209245:2328];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:57.579477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:51:57.579616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:51:57.579845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:51:57.579981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:51:57.580110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:51:57.580245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:51:57.580341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:51:57.580471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:51:57.580644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:51:57.580770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:51:57.580897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:51:57.581029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:51:57.581151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592678907723209251:2334];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:51:57.597702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7592678907723209256:2339];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event ... WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:27.313529Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:27.587071Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 10074, MsgBus: 64164 2026-01-07T17:52:30.135374Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592679052613316789:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:30.135456Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:30.192129Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:30.281649Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:30.283865Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592679052613316748:2081] 1767808350134498 != 1767808350134501 2026-01-07T17:52:30.349480Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:30.349567Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:30.354946Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:30.383831Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:30.383860Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:30.383867Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:30.383960Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:30.478820Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:30.849156Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:31.182348Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:31.581637Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679032734007579:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:31.581734Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:52:33.530611Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679065498219523:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.530719Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.531023Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679065498219532:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.531080Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:33.619159Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.655899Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.690704Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.732117Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.770382Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.808116Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.846440Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.889534Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.931206Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:33.967913Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:34.021538Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679069793187554:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.021661Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.021804Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679069793187559:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.022056Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679069793187561:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.022135Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:34.026551Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:34.041757Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679069793187562:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2026-01-07T17:52:34.131378Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679069793187614:3011] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 51], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:35.135523Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679052613316789:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:35.135624Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] Test command err: 2026-01-07T17:52:37.853421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:37.853497Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] Test command err: 2026-01-07T17:52:38.413003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.413069Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] Test command err: 2026-01-07T17:52:38.635720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.635785Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:39.597155Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] >> TEnumerationTest::TestPublish [GOOD] >> TLocalTests::TestAddTenant >> TNodeBrokerTest::TestRandomActions >> TNodeBrokerTest::NodeNameExpiration >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] >> TNodeBrokerTest::RegistrationPipelining >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-07T17:52:40.180485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:40.180537Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) 2026-01-07T17:52:40.962794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:40.962852Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SubscribeToNodes [GOOD] Test command err: 2026-01-07T17:52:37.984095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:37.984158Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:39.968988Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2026-01-07T17:52:40.553030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:40.553092Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:40.741932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted >> TNodeBrokerTest::NodesMigrationNewActiveNode >> TLocalTests::TestAddTenant [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2026-01-07T17:52:39.887362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:39.887438Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] |95.3%| [TA] $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] Test command err: 2026-01-07T17:52:38.503772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.503841Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] Test command err: 2026-01-07T17:52:38.330682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.330781Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2026-01-07T17:52:40.151794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:40.151848Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:40.448749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:52:40.467196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2026-01-07T17:52:42.288805Z node 1 :LOCAL ERROR: local.cpp:1299: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2026-01-07T17:52:42.289056Z node 1 :LOCAL ERROR: local.cpp:1549: Unknown domain dc-3 |95.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] Test command err: 2026-01-07T17:52:37.350695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:37.350760Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2026-01-07T17:52:38.389910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.389972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... sending extend lease request ... captured cache request ... captured cache request ... waiting for response ... waiting for epoch update |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TNodeBrokerTest::LoadStateMoveEpoch >> TNodeBrokerTest::BasicFunctionality [GOOD] >> TNodeBrokerTest::NodesMigrationRemovedChanged >> TNodeBrokerTest::UpdateEpochPipelining >> TSlotIndexesPoolTest::Expansion [GOOD] >> TNodeBrokerTest::NodeNameReuseRestart >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-07T17:52:42.137375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:42.137458Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE cookie 0 ... unblocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE 2026-01-07T17:52:42.693795Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:42.693859Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved >> TNodeBrokerTest::FixedNodeId >> TNodeBrokerTest::NodesV2BackMigration [GOOD] >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] >> TNodeBrokerTest::EpochCacheUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2026-01-07T17:52:39.268602Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:39.268669Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:40.562902Z node 2 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001, expected = DC=1/M=2/R=3/U=4/, got = DC=1/M=2/R=3/U=5/ 2026-01-07T17:52:40.576389Z node 2 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-07T17:52:40.576833Z node 2 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:40.577226Z node 2 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] >> TNodeBrokerTest::RegistrationPipelining [GOOD] >> TDynamicNameserverTest::TestCacheUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] Test command err: 2026-01-07T17:52:41.405666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:41.405721Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for updates are sent ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME cookie 0 ... waiting for updates are sent (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::EpochCacheUpdate [GOOD] Test command err: 2026-01-07T17:52:38.325777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:38.325854Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:41.044287Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:41.044362Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::TestListNodesEpochDeltas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigration [GOOD] Test command err: 2026-01-07T17:52:40.030587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:40.030650Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] Test command err: 2026-01-07T17:52:41.460536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:41.460599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless >> TNodeBrokerTest::Test999NodesSubscribers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2026-01-07T17:52:41.484255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:41.484323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] >> TPQTest::IncompleteProxyResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2026-01-07T17:52:42.315270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:42.315341Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] Test command err: 2026-01-07T17:52:41.543906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:41.543966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] Test command err: 2026-01-07T17:52:42.836568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:42.836628Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TPQTest::IncompleteProxyResponse [GOOD] >> TPQTest::SmallMsgCompactificationWithRebootsTest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> TNodeBrokerTest::TestListNodes [GOOD] >> TNodeBrokerTest::NodesMigrationExpireActive >> TNodeBrokerTest::FixedNodeId [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2026-01-07T17:52:43.801087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:43.801147Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:43.877269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2026-01-07T17:52:44.980546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:44.980624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:45.085370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-07T17:52:45.238673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-07T17:52:41.724742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:41.724819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:42.007806Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-07T17:52:42.019793Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-07T17:52:45.312042Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:45.312094Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2026-01-07T17:52:39.962757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:39.962816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2026-01-07T17:52:42.154208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:42.154290Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:42.276406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2026-01-07T17:52:44.257194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:44.257256Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-07T17:52:44.530013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:44.530073Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:45.878618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:45.878673Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2026-01-07T17:52:43.558202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:43.558259Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] Test command err: 2026-01-07T17:52:43.920465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:43.920543Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] Test command err: 2026-01-07T17:52:43.521977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:43.522063Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:44.967587Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-07T17:52:45.729489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:45.729551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2026-01-07T17:52:46.516061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:46.516132Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] Test command err: 2026-01-07T17:52:43.725753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:43.725815Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] Test command err: 2026-01-07T17:52:42.797914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:42.798010Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for first batch is committed ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 2 ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 1 ... waiting for first batch is committed (done) >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false [GOOD] >> GracefulShutdown::TTxGracefulShutdown >> TNodeBrokerTest::ExtendLeasePipelining |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2026-01-07T17:52:43.905494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:43.905558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:45.427215Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1001: ERROR_TEMP: No free node IDs ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] Test command err: 2026-01-07T17:52:45.623090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:45.623156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] Test command err: 2026-01-07T17:52:44.196863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:44.196956Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TPQTest::SmallMsgCompactificationWithRebootsTest [GOOD] |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] Test command err: 2026-01-07T17:52:44.450573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:44.450643Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:46.291596Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1142: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] Test command err: 2026-01-07T17:52:46.171324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:46.171382Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2026-01-07T17:52:44.771337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:44.771410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2026-01-07T17:52:44.911313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:44.911369Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:44.988723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::SmallMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-07T17:48:17.186342Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:48:17.244215Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:48:17.248487Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:48:17.248674Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:48:17.248715Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:48:17.248744Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:48:17.248778Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:48:17.248809Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:17.248846Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:48:17.262625Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:183:2195], now have 1 active actors on pipe 2026-01-07T17:48:17.262718Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:48:17.278893Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:48:17.281609Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:48:17.281780Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:48:17.282619Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:48:17.282734Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:48:17.283091Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:48:17.283445Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:48:17.285710Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:48:17.285783Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-07T17:48:17.285824Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:48:17.285869Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:48:17.285936Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:48:17.287355Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:17.288600Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:48:17.288653Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:17.288690Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:17.288747Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:17.288778Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-07T17:48:17.288812Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:17.288848Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:17.288910Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-07T17:48:17.288971Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:48:17.289007Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:48:17.289049Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-07T17:48:17.289083Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-07T17:48:17.289113Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-07T17:48:17.289144Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-07T17:48:17.289184Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:17.289463Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:17.289514Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:48:17.289587Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:48:17.289802Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:48:17.289991Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:17.292466Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:48:17.292565Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:48:17.292612Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:48:17.292656Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:17.292699Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:48:17.292733Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:48:17.292786Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:48:17.292836Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:48:17.293175Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:197:2203], now have 1 active actors on pipe Set pipe for create session: [1:201:2206] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:202:2057] recipient: [1:14:2061] 2026-01-07T17:48:17.294264Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:201:2206], now have 1 active actors on pipe 2026-01-07T17:48:17.294345Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T1 ... UE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 50 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-07T17:52:47.919080Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-07T17:52:47.919099Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-07T17:52:47.919123Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 27:0 isTruncatedBlob 0 2026-01-07T17:52:47.919212Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 27:0 isTruncatedBlob 0 hasNonZeroParts 0 isMiddlePartOfMessage 0 2026-01-07T17:52:47.919844Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 50:0 2026-01-07T17:52:47.920005Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 14 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 50 partno 0 count 4294967295 size 4294967295 endOffset 67 max time lag 0ms effective offset 50 2026-01-07T17:52:47.920295Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 14 added 2 blobs, size 5223238 count 17 last offset 61, current partition end offset: 67 2026-01-07T17:52:47.920325Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 14. Send blob request. 2026-01-07T17:52:47.920382Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 50 partno 0 count 11 parts_count 0 source 1 size 3379747 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-07T17:52:47.920415Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 61 partno 0 count 6 parts_count 0 source 1 size 1843491 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-07T17:52:47.920452Z node 159 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 14. All 2 blobs are from cache. 2026-01-07T17:52:47.920498Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' 2026-01-07T17:52:47.920532Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 61 partno 0 count 6 parts 0 suffix '0' 2026-01-07T17:52:47.920586Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-07T17:52:47.920687Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 50 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-07T17:52:47.920727Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 52 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-07T17:52:47.920760Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 54 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-07T17:52:47.920793Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 56 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-07T17:52:47.920826Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 58 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-07T17:52:47.920858Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 60 totakecount 11 count 1 size 307240 from pos 0 cbcount 1 2026-01-07T17:52:47.920904Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 61 totakecount 6 count 2 size 614475 from pos 0 cbcount 2 2026-01-07T17:52:47.921000Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-07T17:52:47.921027Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-07T17:52:47.921057Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 50:0 isTruncatedBlob 0 2026-01-07T17:52:47.921252Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 50 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.921436Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 51 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.921577Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 52 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.921746Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 53 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.921908Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 54 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.922065Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 55 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.922223Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 56 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.922377Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 57 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.922546Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 58 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.922727Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 59 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.922886Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 60 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-07T17:52:47.922939Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-07T17:52:47.925097Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000027_00000_0000000023_00000 2026-01-07T17:52:47.925171Z node 159 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-07T17:52:47.925300Z node 159 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-07T17:52:47.925334Z node 159 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:52:47.925413Z node 159 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 50 partNo 0 count 11 size 167 2026-01-07T17:52:47.925438Z node 159 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000027_00000_0000000023_00000(+) to d0000000000_00000000000000000027_00000_0000000023_00000(+) 2026-01-07T17:52:47.928415Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 27 count 23 actorID [159:138:2142] 2026-01-07T17:52:47.928461Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 50 count 11 size 3379747 actorID [159:138:2142] is actual 1 2026-01-07T17:52:47.928535Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 50 count 11 size 167 actorID [159:138:2142] 2026-01-07T17:52:47.928654Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 27 partno 0 count 23 parts 0 suffix '0' size 263 2026-01-07T17:52:47.928707Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' size 3379747 2026-01-07T17:52:47.929870Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' size 167 2026-01-07T17:52:47.930161Z node 159 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:52:47.930192Z node 159 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-07T17:52:47.930220Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2026-01-07T17:52:47.933791Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [159:301:2288], now have 1 active actors on pipe 2026-01-07T17:52:47.933846Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:52:47.933874Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:52:47.933968Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 61 for user __ydb_compaction_consumer 2026-01-07T17:52:47.934203Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [159:303:2290], now have 1 active actors on pipe Got start offset = 50 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap-FillTables [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2026-01-07T17:52:45.044233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:45.044310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:45.130091Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-07T17:52:45.141965Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-07T17:52:48.135976Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:48.136023Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> TNodeBrokerTest::ConfigPipelining >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink >> KqpDataIntegrityTrails::BrokenReadLock+UseSink |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink |95.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2026-01-07T17:52:48.232215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:48.232282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] >> Compression::WriteRAW |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select |95.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2026-01-07T17:52:46.041639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:46.041703Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:50.943742Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:50.943830Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:51.032011Z node 9 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:1001: ERROR_TEMP: No free node IDs ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> BasicUsage::CreateTopicWithCustomMetricsLevel >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::Ddl [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25542, MsgBus: 17831 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 7329, MsgBus: 25422 >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63182, MsgBus: 21217 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 9431, MsgBus: 18990 |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12563, MsgBus: 16008 |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22644, MsgBus: 61405 >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpScripting::StreamScanQuery >> KqpYql::EvaluateExpr1 >> KqpPragma::Auth >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> KqpScripting::LimitOnShard >> KqpYql::NonStrictDml >> KqpYql::UuidPrimaryKeyDisabled >> KqpScripting::ScanQueryInvalid |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpYql::TableUseBeforeCreate >> KqpScripting::SelectNullType >> KqpYql::FlexibleTypes >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> KqpScripting::ScriptValidate >> KqpScripting::ScriptExplainCreatedTable >> KqpYql::BinaryJsonOffsetBound >> KqpScripting::QueryStats >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] >> KqpDataIntegrityTrails::Select [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 4787, MsgBus: 19049 |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26690, MsgBus: 16344 |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpYql::InsertCV-useSink ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 8372, MsgBus: 6927 |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpYql::TableConcat ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 29369, MsgBus: 65425 |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8729, MsgBus: 17392 >> KqpYql::InsertCVList-useSink |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpYql::TableRange >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> KqpYql::UuidPrimaryKey >> KqpPragma::ResetPerQuery >> KqpScripting::UnsafeTimestampCast >> KqpYql::EvaluateExprYsonAndType >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> KqpYql::RefSelect >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 27927, MsgBus: 16126 2026-01-07T17:52:58.386340Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679171101616183:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.386385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.709426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.769355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.769442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.825372Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679171101616038:2081] 1767808378366944 != 1767808378366947 2026-01-07T17:52:58.840737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.850065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.998392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.115662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.115690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.115697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.115774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.405281Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.874972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.880483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:01.700676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183986518817:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.700801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.701108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183986518826:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.701158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.396147Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679188281486137:2508] txid# 281474976710658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2026-01-07T17:53:02.436045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188281486147:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.436136Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.437904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188281486149:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.437980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.454979Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679188281486156:2518] txid# 281474976710659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2026-01-07T17:53:02.467301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188281486164:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.467442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.467883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188281486166:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.467976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.512897Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679188281486173:2527] txid# 281474976710660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2026-01-07T17:53:02.538762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188281486181:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.538843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.539266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188281486184:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.539309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.557343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.729085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188281486271:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.729196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.737099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188281486274:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.737188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::EvaluateExpr2 >> KqpSinkTx::TIsolationSettingTest-IsOlap+UsePragma [GOOD] >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 9019, MsgBus: 3285 |95.6%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::EvaluateIf >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> KqpYql::NonStrictDml [GOOD] >> KqpYql::PgIntPrimaryKey >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> KqpYql::UuidPrimaryKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 26465, MsgBus: 21562 2026-01-07T17:52:58.357579Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679171205197446:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.357644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.747610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.751831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.751941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.781186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.873974Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679171205197414:2081] 1767808378353235 != 1767808378353238 2026-01-07T17:52:58.884746Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:59.026678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.117429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.117453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.117458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.117520Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.397656Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.850845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.855406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:01.915620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679184090100190:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.915723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.915922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679184090100199:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.915955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.506689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188385067593:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.506791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.507101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188385067598:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.507175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188385067599:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.507306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.515816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.526316Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679188385067602:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:53:02.634490Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679188385067653:2596] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.357714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679171205197446:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.357816Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> KqpYql::TableConcat [GOOD] >> KqpYql::SelectNoAsciiValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 17670, MsgBus: 23011 2026-01-07T17:52:58.357917Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679169567082732:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.357977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.741624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.741723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.759615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.801179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.838215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.843987Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679169567082699:2081] 1767808378353731 != 1767808378353734 2026-01-07T17:52:59.070729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.116938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.116965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.117044Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.374675Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.842882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.920315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.101450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.261092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.342694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.820905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679182451986469:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.821052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.821372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679182451986479:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.821474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.420739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.456600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.491658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.522048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.596350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.660833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.711543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.788647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186746954656:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.788704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.788706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186746954661:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.788952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186746954663:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.789024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.792178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.802634Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679186746954664:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.890909Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679186746954718:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.358209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679169567082732:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.358264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] >> KqpYql::TableNameConflict >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> KqpYql::InsertCVList-useSink [GOOD] >> KqpYql::InsertIgnore >> KqpYql::InsertCV-useSink [GOOD] >> KqpYql::InsertCVList+useSink >> KqpPragma::OrderedColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 10838, MsgBus: 17043 2026-01-07T17:53:00.535121Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679180718181422:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:00.535208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:00.750805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:00.787312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:00.787418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:00.862923Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:00.866664Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679180718181383:2081] 1767808380529249 != 1767808380529252 2026-01-07T17:53:00.869664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:00.976978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:00.977002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:00.977015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:00.977137Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:00.999427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:01.387243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:01.549865Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:03.421982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679193603084156:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.422095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.422337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679193603084165:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.422363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.654684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.768413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679193603084260:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.768477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.768633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679193603084265:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.768917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679193603084267:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.768967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.772118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:03.782584Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679193603084268:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:53:03.913693Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679193603084320:2594] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:04.288651Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679197898051709:2362], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2026-01-07T17:53:04.289098Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OWE1NTc3YWMtMWFjNTY5OGEtNzA4Njk1ZDMtNDM2N2I1ZGE=, ActorId: [1:7592679193603084148:2321], ActorState: ExecuteState, LegacyTraceId: 01kecsef8mcyrx8jgazhw35dv4, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 3 column: 25 } message: "Invalid value \"invalid-uuid\" for type Uuid" end_position { row: 3 column: 25 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:05.533053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679180718181422:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.533118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> THeavyPerfTest::TTestLoadEverything [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> KqpYql::ColumnNameConflict >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> KqpYql::EvaluateExprYsonAndType [GOOD] >> KqpYql::EvaluateFor >> KqpScripting::EndOfQueryCommit >> KqpYql::RefSelect [GOOD] >> KqpYql::ScriptUdf >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce >> KqpScripting::NoAstSizeLimit [GOOD] >> KqpYql::EvaluateExpr3 >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] >> KqpYql::PgIntPrimaryKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 12484, MsgBus: 23701 2026-01-07T17:52:58.368338Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679169602423784:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.369011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.725030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.728044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.731504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.810072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.902100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.905123Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679169602423755:2081] 1767808378363520 != 1767808378363523 2026-01-07T17:52:59.019758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.117535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.117565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.117570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.117631Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.389227Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.894042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.901683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.969108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.124205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.320390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.399271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.875872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679182487327519:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.876032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.876380Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679182487327529:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.876444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.390261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.424556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.451237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.486410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.528601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.588668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.627921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.683663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.763339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186782295701:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.763442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.763625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186782295706:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.763673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186782295707:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.763807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.766898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.775666Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679186782295710:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:02.871290Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679186782295763:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.368875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679169602423784:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.368980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:04.670544Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808384692, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 18489, MsgBus: 4704 2026-01-07T17:53:05.565251Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679200279519870:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.565700Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:05.609044Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:05.735144Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.736297Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.736358Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.740363Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.797416Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:05.824706Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.824727Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.824733Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.824802Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:06.232549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:06.237546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:06.593063Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.564518Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213164422547:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.564635Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.565967Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213164422582:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.566076Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.629458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.681821Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213164422681:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.681908Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.682317Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213164422683:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.682366Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.711350Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213164422693:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.711406Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.711613Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213164422699:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.711645Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213164422698:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.711668Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.714868Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:08.724603Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679213164422702:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:53:08.782202Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679213164422753:2601] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr2 [GOOD] >> KqpYql::EvaluateIf [GOOD] >> HttpRequest::ProbeBaseStats [GOOD] >> KqpYql::AnsiIn [GOOD] >> KqpScripting::ScanQueryTruncate [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamDdlAndDml [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] Test command err: 2026-01-07T17:52:06.741572Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1767808326741546 2026-01-07T17:52:06.972281Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678947962505005:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:06.972327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:07.052689Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:52:07.054110Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678952469074155:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:07.054202Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:07.062735Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:52:07.186117Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:07.186588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:07.329016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:07.329134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:07.331351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:07.331452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:07.347408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:07.350816Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:52:07.354756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:07.395141Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:07.405586Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:07.420784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:07.589875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00365e/r3tmp/yandexOT2PV0.tmp 2026-01-07T17:52:07.589958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00365e/r3tmp/yandexOT2PV0.tmp 2026-01-07T17:52:07.591229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00365e/r3tmp/yandexOT2PV0.tmp 2026-01-07T17:52:07.591343Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:07.829804Z INFO: TTestServer started on Port 21937 GrpcPort 22465 PQClient connected to localhost:22465 2026-01-07T17:52:07.979900Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:07.988148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:08.060715Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:09.499534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678960847408226:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.499643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678960847408217:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.499800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.500223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678960847408233:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.500275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.504155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:09.527105Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678960847408232:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:52:09.800195Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678960847408324:2989] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:09.809605Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592678961059009085:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:52:09.810708Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=Y2YzMGYwYWMtMTRlZDI0NTktNzhkODQ2ODgtYWE1MmI3Mzg=, ActorId: [2:7592678961059009044:2299], ActorState: ExecuteState, LegacyTraceId: 01kecscsy5be2014e6g3ny9rz9, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:52:09.815330Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:52:09.823069Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592678960847408352:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:52:09.825055Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmVjMTBjOWQtNjhhZjI3YWUtNThlNDQxYjctOWU0ZmY4ZDE=, ActorId: [1:7592678960847408215:2328], ActorState: ExecuteState, LegacyTraceId: 01kecscsrs56c8htctj2eaxt7d, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:52:09.825490Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:52:09.904162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:10.02586 ... cer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1767808388] updating configuration. Deleted partitions []. Added partitions [] 2026-01-07T17:53:08.429018Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:53:08.431193Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:53:08.437854Z node 10 :PERSQUEUE DEBUG: partition.cpp:1289: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1767808388479, TxId 281474976715678 2026-01-07T17:53:08.437891Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.437912Z node 10 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-07T17:53:08.437943Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:53:08.437956Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.437971Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:53:08.437985Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:53:08.438215Z node 10 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1767808388479, TxId 281474976715678 2026-01-07T17:53:08.438241Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.438256Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:53:08.438271Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.438298Z node 10 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-07T17:53:08.438446Z node 10 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976715678 2026-01-07T17:53:08.438467Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:53:08.438481Z node 10 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:53:08.438499Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:53:08.438690Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:53:08.440854Z node 10 :PERSQUEUE DEBUG: partition.cpp:2305: [72075186224037894][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:53:08.440885Z node 10 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:53:08.441150Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:53:08.441245Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:53:08.441270Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.441286Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.441301Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.441318Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.441332Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:53:08.441357Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:53:08.441706Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1767808388" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1767808388" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1767808388" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 4 MonitoringProjectId: "" 2026-01-07T17:53:08.441762Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:53:08.441877Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:53:08.444275Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:53:08.453320Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-07T17:53:08.517128Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.517165Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.517178Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.517198Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.517212Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:53:08.541194Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.541226Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.541240Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.541259Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.541274Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:53:08.617507Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.617541Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.617558Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.617578Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.617595Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:53:08.641588Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.641622Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.641638Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.641659Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.641674Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:53:08.718184Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.718218Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.718233Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.718256Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.718276Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:53:08.741900Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:53:08.741935Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.741950Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:53:08.741971Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:53:08.741985Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T17:53:09.036682Z node 9 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [9:7592679217683752317:2482] TxId: 281474976715679. Ctx: { TraceId: 01kecsekcyc65p623998mtz9zm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MmNhMWRlMTAtNzNlODRiMDYtNzY0ZGQ0MTMtZGI3ZGQ2YTc=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 trace_id# 2026-01-07T17:53:09.036848Z node 9 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [9:7592679217683752321:2482], TxId: 281474976715679, task: 3. Ctx: { TraceId : 01kecsekcyc65p623998mtz9zm. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=9&id=MmNhMWRlMTAtNzNlODRiMDYtNzY0ZGQ0MTMtZGI3ZGQ2YTc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [9:7592679217683752317:2482], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |95.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 11003, MsgBus: 10037 2026-01-07T17:52:58.357808Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679172983750521:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.357853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.677163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.715249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.715371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.765092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:59.008288Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679172983750492:2081] 1767808378353141 != 1767808378353144 2026-01-07T17:52:59.017876Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:59.027876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.115920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.115951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.115961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116028Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.373323Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.787970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.851861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.040456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.225254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.310048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.801902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185868654254:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.802038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.802413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185868654264:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.802498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.421076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.457307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.507383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.549204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.650990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.694173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.748311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.824199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679190163622437:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.824320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.824528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679190163622442:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.824587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.824622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679190163622443:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.828612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.840661Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679190163622446:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.912463Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679190163622497:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.358115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679172983750521:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.358185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15058, MsgBus: 8988 2026-01-07T17:53:05.641226Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679203406130795:2248];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.642038Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:05.654378Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:05.756731Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.756812Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.756963Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.758285Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679203406130585:2081] 1767808385613765 != 1767808385613768 2026-01-07T17:53:05.758331Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:05.769360Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.894094Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.894114Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.894120Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.894194Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:06.058880Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:06.304576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:06.611552Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.954682Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216291033353:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.954763Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.955271Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216291033364:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.955312Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.975652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.019218Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679220586000753:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.019311Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.019592Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679220586000758:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.019646Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679220586000759:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.019737Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.023731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:09.032990Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679220586000762:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:53:09.121566Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679220586000813:2595] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] >> KqpYql::TableNameConflict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateIf [GOOD] Test command err: Trying to start YDB, gRPC: 10096, MsgBus: 22125 2026-01-07T17:52:58.397170Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679170951116106:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.398035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.693556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.715057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.715158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.718804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.829195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.969813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.118063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.118107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.118116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.118191Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.405140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.796266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.801876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.863976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.038217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.196346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.285782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.702147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183836019794:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.702279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.702648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183836019803:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.702686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.414996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.445281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.476143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.506308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.545285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.619186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.666776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.745490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188130987973:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.745565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.745698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188130987978:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.745747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188130987979:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.745791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.749029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.759171Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679188130987982:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.836457Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679188130988033:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.394367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679170951116106:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.394427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65291, MsgBus: 25390 2026-01-07T17:53:05.423136Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679201508286995:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.424080Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:05.457275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:05.526013Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.527224Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679201508286958:2081] 1767808385409027 != 1767808385409030 2026-01-07T17:53:05.566660Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.566741Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.572510Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.628422Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:05.648234Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.648259Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.648267Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.648333Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:06.032455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:06.038906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:06.046354Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.101914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.273803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.335727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.450211Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.433151Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214393190711:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.433219Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.433467Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214393190720:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.433512Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.552482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.592122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.627760Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.660608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.694193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.739874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.780827Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.831120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.916502Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214393191587:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.916589Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.916977Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214393191592:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.917035Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214393191593:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.917270Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.921012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:08.932785Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679214393191596:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:09.012302Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679218688158943:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:10.414047Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679201508286995:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:10.414096Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr2 [GOOD] Test command err: Trying to start YDB, gRPC: 13795, MsgBus: 13020 2026-01-07T17:52:58.358794Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679172169572388:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.358867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.717810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.752042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.752122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.765148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.842645Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.845121Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679172169572354:2081] 1767808378353451 != 1767808378353454 2026-01-07T17:52:58.962072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.115706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.115741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.115749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.115823Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.383159Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.784612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.853912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.046836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.217693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.291970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.700658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185054476125:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.700757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.701085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185054476134:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.701153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.416660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.441959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.471924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.502284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.570992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.608467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.665598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.776525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189349444310:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.776604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.776863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189349444315:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.776907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189349444316:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.777052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.781291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.796317Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679189349444319:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.889320Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679189349444370:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.359039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679172169572388:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.359084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63431, MsgBus: 8857 2026-01-07T17:53:05.217997Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679200420996591:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.218074Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:05.228585Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:05.291722Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.293085Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679200420996560:2081] 1767808385217062 != 1767808385217065 2026-01-07T17:53:05.339488Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.339587Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.346797Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.347805Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:05.426175Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.426194Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.426200Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.426271Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:05.783670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:05.790084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:05.807351Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.887128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.026426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.103111Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.277319Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.534122Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213305900323:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.534241Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.534646Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213305900333:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.534721Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.663128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.700598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.735386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.771732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.807592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.844886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.885454Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.936762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.044502Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679217600868498:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.044624Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.044965Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679217600868503:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.044970Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679217600868504:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.045025Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.053065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:09.065242Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679217600868507:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:09.156406Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679217600868558:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:10.218282Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679200420996591:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:10.218348Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpYql::InsertCVList+useSink [GOOD] >> KqpScripting::Pure [GOOD] >> KqpScripting::JoinIndexLookup [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertIgnore [GOOD] >> KqpYql::BinaryJsonOffsetNormal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:48.280887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:48.350704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:48.356244Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:48.356483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:48.356576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:48.634779Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:48.716852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:48.716994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:48.751371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:48.829541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:49.450651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:49.451175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:49.451215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:49.451242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:49.451660Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:49.514719Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:50.023936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:52.645164Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:52.651351Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:52.654151Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:52.679479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:52.679575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:52.717349Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:52.719095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:52.878223Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:52.878323Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:52.879479Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.880241Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.880715Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.881385Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.881855Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.881972Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.882088Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.882185Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.882304Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:52.897087Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.065080Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:53.105793Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:53.105883Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:53.161867Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:53.163059Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:53.163242Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:53.163293Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:53.163337Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:53.163393Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:53.163440Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:53.163480Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:53.164166Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:53.175086Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:53.175173Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:53.199385Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:51:53.200113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:51:53.249547Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2243:2658] 2026-01-07T17:51:53.252118Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:53.264335Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. Describe result: PathErrorUnknown 2026-01-07T17:51:53.264382Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. Creating table 2026-01-07T17:51:53.264454Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:53.271000Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2692], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:53.274275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:53.280966Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:53.281077Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:53.291145Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:53.473523Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:53.485737Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:53.767544Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:53.910879Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:53.910986Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2252:2665] Owner: [2:2250:2663]. Column diff is empty, finishing 2026-01-07T17:51:54.695565Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... request id = 55, ReplyToActorId = [2:5580:4601], StatRequests.size() = 1 2026-01-07T17:52:52.591076Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 56 ], ReplyToActorId[ [2:5615:4615]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:52.591483Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 56 ] 2026-01-07T17:52:52.591533Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 56, ReplyToActorId = [2:5615:4615], StatRequests.size() = 1 2026-01-07T17:52:53.336006Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:53.531705Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:52:53.531788Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:53.531843Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:53.531878Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:52:54.058493Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 57 ], ReplyToActorId[ [2:5655:4634]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:54.058824Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 57 ] 2026-01-07T17:52:54.058857Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 57, ReplyToActorId = [2:5655:4634], StatRequests.size() = 1 2026-01-07T17:52:55.405418Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 58 ], ReplyToActorId[ [2:5687:4648]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:55.405758Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 58 ] 2026-01-07T17:52:55.405796Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 58, ReplyToActorId = [2:5687:4648], StatRequests.size() = 1 2026-01-07T17:52:56.684615Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:52:56.684822Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-07T17:52:56.684978Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-07T17:52:56.749431Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:52:56.749498Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:56.749707Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:52:56.763312Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:57.795122Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 59 ], ReplyToActorId[ [2:5721:4664]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:57.795368Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 59 ] 2026-01-07T17:52:57.795403Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 59, ReplyToActorId = [2:5721:4664], StatRequests.size() = 1 2026-01-07T17:52:59.532085Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 60 ], ReplyToActorId[ [2:5764:4690]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:59.532338Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 60 ] 2026-01-07T17:52:59.532370Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 60, ReplyToActorId = [2:5764:4690], StatRequests.size() = 1 2026-01-07T17:53:00.690011Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 61 ], ReplyToActorId[ [2:5797:4704]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:00.690306Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 61 ] 2026-01-07T17:53:00.690353Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 61, ReplyToActorId = [2:5797:4704], StatRequests.size() = 1 2026-01-07T17:53:01.554376Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:02.014163Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 62 ], ReplyToActorId[ [2:5832:4718]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:02.014503Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 62 ] 2026-01-07T17:53:02.014568Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 62, ReplyToActorId = [2:5832:4718], StatRequests.size() = 1 2026-01-07T17:53:03.226195Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 63 ], ReplyToActorId[ [2:5867:4736]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:03.226465Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 63 ] 2026-01-07T17:53:03.226505Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 63, ReplyToActorId = [2:5867:4736], StatRequests.size() = 1 2026-01-07T17:53:04.028576Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:04.028809Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 2026-01-07T17:53:04.028914Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-07T17:53:04.093146Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:53:04.093232Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:04.093441Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-07T17:53:04.106884Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:53:04.447966Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 64 ], ReplyToActorId[ [2:5900:4752]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:04.448234Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 64 ] 2026-01-07T17:53:04.448264Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 64, ReplyToActorId = [2:5900:4752], StatRequests.size() = 1 2026-01-07T17:53:05.572122Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 65 ], ReplyToActorId[ [2:5933:4768]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:05.572416Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 65 ] 2026-01-07T17:53:05.572460Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 65, ReplyToActorId = [2:5933:4768], StatRequests.size() = 1 2026-01-07T17:53:06.621567Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 66 ], ReplyToActorId[ [2:5964:4782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:06.621889Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 66 ] 2026-01-07T17:53:06.621934Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 66, ReplyToActorId = [2:5964:4782], StatRequests.size() = 1 2026-01-07T17:53:07.376585Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:07.888766Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 67 ], ReplyToActorId[ [2:6004:4799]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:07.889085Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 67 ] 2026-01-07T17:53:07.889128Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 67, ReplyToActorId = [2:6004:4799], StatRequests.size() = 1 2026-01-07T17:53:09.210329Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 68 ], ReplyToActorId[ [2:6040:4813]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:09.210643Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 68 ] 2026-01-07T17:53:09.210694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 68, ReplyToActorId = [2:6040:4813], StatRequests.size() = 1 2026-01-07T17:53:10.088196Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:10.088407Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 14 2026-01-07T17:53:10.088551Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 14 2026-01-07T17:53:10.132554Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:53:10.132622Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:10.132819Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-07T17:53:10.150432Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:53:10.543524Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 69 ], ReplyToActorId[ [2:6076:4831]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:10.543744Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 69 ] 2026-01-07T17:53:10.543771Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 69, ReplyToActorId = [2:6076:4831], StatRequests.size() = 1 2026-01-07T17:53:10.544349Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 70 ], ReplyToActorId[ [2:6079:4834]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:10.544465Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 70 ] 2026-01-07T17:53:10.544494Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 70, ReplyToActorId = [2:6079:4834], StatRequests.size() = 1 Answer: 'HTTP/1.1 200 Ok Content-Type: application/json Connection: Close { "row_count":1000, "bytes_size":11272 }' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 27254, MsgBus: 20895 2026-01-07T17:52:58.418285Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679172246164653:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.418466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.772553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.803209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.803337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.906554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.925043Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679172246164510:2081] 1767808378410715 != 1767808378410718 2026-01-07T17:52:58.926947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.961283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.116650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.116686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116761Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.425170Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.781886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.788426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.856390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.028228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.266441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.380133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.919588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185131068270:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.919738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.920060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185131068280:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.920101Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.415200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.446673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.485056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.555906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.595566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.666973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.727113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.799223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189426036448:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.799293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189426036453:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.799295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.799934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189426036455:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.799988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.802583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.813123Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679189426036456:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.874005Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679189426036508:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.416776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679172246164653:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.416836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:04.698915Z node 1 :KQP_SESSION ERROR: {KQPSA@kqp_session_actor.cpp:3404} SessionId: ydb://session/3?node_id=1&id=N2JhYjU2ZjYtMThlMmY4ZGMtYTYyOTA4MC1lYT ... ://session/3?node_id=1&id=N2JhYjU2ZjYtMThlMmY4ZGMtYTYyOTA4MC1lYTgzZjM1Ng==, ActorId: [1:7592679198015971398:2532], ActorState: ExecuteState, LegacyTraceId: 01kecsefh40drzk0hjqggjrd8f, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:159: StringOffset must be inside buffer status# INTERNAL_ERROR issues# trace_id# Trying to start YDB, gRPC: 21132, MsgBus: 10327 2026-01-07T17:53:05.558925Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679202072831075:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.559010Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:05.709044Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:05.716359Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.721228Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679202072831037:2081] 1767808385553179 != 1767808385553182 2026-01-07T17:53:05.752684Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.752768Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.759770Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.837507Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.837546Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.837555Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.837628Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:05.986993Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:06.285224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:06.292781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.343902Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.484647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.543379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.621516Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.744916Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214957734787:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.745046Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.749321Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214957734797:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.749407Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.848538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.886900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.919551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.953476Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.988579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.034740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.073882Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.124023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.216710Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679219252702961:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.216853Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.217162Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679219252702967:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.217198Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679219252702966:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.217282Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.220808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:09.237245Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679219252702970:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:09.306356Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679219252703021:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:10.556412Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679202072831075:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:10.556476Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 25936, MsgBus: 61999 2026-01-07T17:52:58.357650Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679172452423271:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.357712Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.689031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.717065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.717163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.760460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.893218Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.970421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.116728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.116759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116830Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.371993Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.802319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.822287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.875744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.038879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.196747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.286915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.810422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185337327011:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.810616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.810997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185337327021:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.811070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.424116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.450520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.485248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.517422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.548206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.591009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.636571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.725266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189632295193:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.725374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.727363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189632295199:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.727407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189632295198:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.727724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.731623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.745117Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679189632295202:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.837773Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679189632295255:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.357762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679172452423271:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.359225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:04.534251Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679198222230160:2541], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:752: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:752: Error: Scan query should have a s ... trace_id#
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 23009, MsgBus: 4477 2026-01-07T17:53:05.577213Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679199268489571:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.577280Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:05.605377Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:05.682748Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.731876Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.731938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.740595Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.897067Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:05.897620Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.897631Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.897638Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.897721Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:06.357227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:06.362206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:06.375884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.460227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.599465Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:06.642326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.709012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.881042Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679212153393291:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.881123Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.885055Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679212153393301:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.885134Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.956821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.992986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.025060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.065337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.100362Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.139020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.178443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.233281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.328753Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216448361469:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.328847Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.329165Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216448361475:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.329396Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216448361474:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.329435Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.332883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:09.343986Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679216448361478:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:09.445978Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679216448361529:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:10.577451Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679199268489571:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:10.577525Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:11.096104Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808391125, txId: 281474976715673] shutting down |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 12404, MsgBus: 61960 2026-01-07T17:52:58.357863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679172653962416:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.357945Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.809940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.810024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.892915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.942474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.943517Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.950212Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679172653962385:2081] 1767808378355761 != 1767808378355764 2026-01-07T17:52:59.116161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.116192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116292Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.216079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.373574Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.802204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.809358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.886167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.042639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.222330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.295767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.907258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185538866146:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.907418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.907742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185538866156:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.907809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.416016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.444735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.474438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.506919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.565401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.638752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.689654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.790402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189833834341:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.790451Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.790453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189833834346:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.790578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189833834348:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.790611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.794066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.803249Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679189833834349:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.859529Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679189833834403:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.357853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679172653962416:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.357912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:04.803761Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808384825, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 25181, MsgBus: 20194 2026-01-07T17:53:05.612234Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679200684065482:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.612510Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:05.644559Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:05.761072Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679200684065362:2081] 1767808385607529 != 1767808385607532 2026-01-07T17:53:05.770363Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.772519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.772579Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.783041Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.896483Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.896552Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.896559Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.896635Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:05.907618Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:06.333255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:06.338163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:06.347440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.398473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.548484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.607511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.618249Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:09.052830Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679217863936410:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.052940Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.053310Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679217863936420:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.053361Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.113345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.144574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.176097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.206002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.235326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.268656Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.309815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.360692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.442535Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679217863937284:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.442632Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.442874Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679217863937290:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.442915Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.442969Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679217863937289:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.446804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:09.458933Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679217863937293:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:09.525564Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679217863937344:3760] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:10.610848Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679200684065482:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:10.610911Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 22881, MsgBus: 13456 2026-01-07T17:52:58.371835Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679172004917026:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.372151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.701582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.767339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.767472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.841404Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.849554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.966016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.116155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.116186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.373054Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.820629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.831950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.893872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.035081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.225161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.307337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.805708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679184889820669:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.805846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.806118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679184889820679:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.806162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.422780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.457241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.491304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.517555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.549927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.597625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.677081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.746336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189184788852:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.746395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.746654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189184788857:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.746687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189184788858:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.746898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.749736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.760077Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679189184788861:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.843920Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679189184788914:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.367162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679172004917026:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.367231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:04.412498Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679197774723807:2536], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2026-01-07T17:53:04.412844Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_s ... check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:05.357252Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.358223Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:05.369172Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679200333715466:2081] 1767808385263301 != 1767808385263304 2026-01-07T17:53:05.391252Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.391313Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.395506Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.439573Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.439594Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.439602Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.439677Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:05.535854Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:05.814244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:05.837112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:05.849371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.925249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.073852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.141995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.287773Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.240525Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213218619220:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.240624Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.241300Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213218619230:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.241373Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.306386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.338040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.413607Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.445267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.472604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.501871Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.531496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.593899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.676907Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213218620103:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.677044Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.677387Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213218620109:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.677389Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679213218620108:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.677426Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.681116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:08.693990Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679213218620112:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:08.771797Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679213218620163:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:10.267339Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679200333715572:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:10.267404Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:10.590912Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.408649Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808391391, txId: 281474976715675] shutting down |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 16994, MsgBus: 19767 2026-01-07T17:52:58.357735Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679169505755728:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.357788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.757057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.764544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.764652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.865972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.877036Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679169505755694:2081] 1767808378353749 != 1767808378353752 2026-01-07T17:52:58.883090Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.929839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.115992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.116017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116106Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.368474Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.836716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.897679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.055471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.204528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.278632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.826784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679182390659457:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.826923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.827259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679182390659467:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.827325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.416463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.496567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.544448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.582541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.633622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.704284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.759045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.834652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186685627648:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.834723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.834816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186685627653:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.834867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186685627655:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.834904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.838846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.851997Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679186685627657:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.917457Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679186685627708:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.357883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679169505755728:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.357936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:04.510234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... ;send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.594493Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:05.625078Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:05.713054Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679201943842462:2081] 1767808385594070 != 1767808385594073 2026-01-07T17:53:05.713877Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:05.714911Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.714980Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.745685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.867905Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.867936Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.867945Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.868023Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:05.880983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:06.297234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:06.316240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.377910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.499286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.595408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.599829Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.838198Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214828746231:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.838280Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.838616Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679214828746241:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.838650Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.911175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.954318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.994898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.035102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.072571Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.108907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.148761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.208050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.309242Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679219123714409:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.309328Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.309459Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679219123714414:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.309692Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679219123714415:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.309722Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.312540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:09.322500Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679219123714418:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:09.409404Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679219123714469:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:10.594447Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679201943842483:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:10.594531Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:10.945435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.454165Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808391489, txId: 281474976710675] shutting down |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 30508, MsgBus: 14014 2026-01-07T17:53:00.337841Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679179466308219:2122];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:00.337915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:00.377637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:00.624834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:00.627940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:00.628021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:00.690132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:00.700073Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679179466308125:2081] 1767808380307681 != 1767808380307684 2026-01-07T17:53:00.708374Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:00.796401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:00.810345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:00.810366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:00.810375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:00.810466Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:01.193643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:01.263046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.354098Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:01.402727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.567610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.635083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.463549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679192351211890:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.463678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.464037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679192351211900:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.464113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.724725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.750835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.777888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.803464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.827994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.866305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.925343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.972434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.049008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679196646180070:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.049123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.049343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679196646180075:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.049388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679196646180076:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.049432Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.053021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:04.064096Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679196646180079:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:04.140585Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679196646180130:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:05.328235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679179466308219:2122];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.328288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Tr ... ath=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:06.555904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:06.650363Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:06.652888Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679203528524641:2081] 1767808386515904 != 1767808386515907 2026-01-07T17:53:06.660318Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:06.660396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:06.682004Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:06.737590Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:06.737610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:06.737617Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:06.737699Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:06.740922Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:07.102693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:07.110108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:07.113323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.173813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.357070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.432762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.548323Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:09.512858Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216413428407:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.512967Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.514253Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216413428417:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.514334Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.581572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.609892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.639798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.694919Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.728392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.775479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.816414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.867168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.962461Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216413429291:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.962543Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.962793Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216413429297:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.962824Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.962848Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216413429296:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.968947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:09.984169Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679216413429300:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:10.044525Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679220708396647:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:11.461745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.527173Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679203528524749:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:11.527228Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:11.769210Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808391811, txId: 281474976710675] shutting down |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 17926, MsgBus: 11275 2026-01-07T17:53:00.538888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679178409776752:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:00.538970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:00.577333Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:00.798167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:00.798283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:00.800648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:00.874871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:00.915744Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:00.926113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:00.926139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:00.926160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:00.926229Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:01.110398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:01.382334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:01.456208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.553105Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:01.590110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.733862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.783546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.537104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191294680439:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.537227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.537555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191294680449:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.537633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.841827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.869972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.907218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.936754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.964969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.997298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.034655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.074460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.134084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679195589648617:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.134166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.134292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679195589648622:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.134341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679195589648624:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.134384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.137498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:04.146460Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679195589648626:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:04.218361Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679195589648677:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:05.538990Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679178409776752:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.539049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. Trying to start YDB, gRPC: 21788, MsgBus: 30300 2026-01-07T17:53:06.792131Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679206012574259:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:06.792328Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:06.801085Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:06.863609Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:06.865346Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679206012574219:2081] 1767808386791246 != 1767808386791249 2026-01-07T17:53:06.893002Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:06.926873Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:06.926977Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:06.937378Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:06.965355Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:06.965382Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:06.965389Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:06.965469Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:07.400244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:07.410940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:07.427472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.503991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.643270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.703481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.862856Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:09.819604Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679218897477969:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.819675Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.819917Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679218897477979:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.819947Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.881501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.915153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.967704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.999656Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.037391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.074660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.110992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.152225Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.220654Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679223192446152:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.220713Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.220952Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679223192446157:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.220982Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679223192446158:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.221057Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.225205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:10.237420Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679223192446161:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:10.327665Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679223192446212:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:11.792646Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679206012574259:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:11.792716Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 29162, MsgBus: 6341 2026-01-07T17:52:58.377727Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679172616993805:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.377780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.668600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.703523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.703618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.712816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.922896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.939941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.115525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.115610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.115618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.115696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.393564Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.822114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.830458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.890687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.039769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.203063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.284805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.702754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185501897517:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.702866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.703277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679185501897526:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.703329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.414995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.441894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.467919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.498058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.541135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.598657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.636374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.722460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189796865694:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.722561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.722931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189796865699:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.722980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679189796865700:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.723135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.727855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.745888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:53:02.746075Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679189796865703:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.815011Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679189796865754:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.377923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679172616993805:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.377986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:05.587897Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808385630, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 65394, MsgBus: 28084 2026-01-07T17:53:07.089515Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679209696754946:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.089975Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:07.099816Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:07.162690Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:07.183290Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:07.183367Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:07.197671Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:07.253460Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:07.253480Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:07.253486Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:07.253547Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:07.264996Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:07.702296Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:07.710042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.761155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.874007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.960614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.091365Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:10.294105Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222581658586:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.294186Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.294430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222581658596:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.294476Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.365876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.395247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.424960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.457410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.490447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.534459Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.574772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.623636Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.704437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222581659465:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.704556Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.704572Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222581659470:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.704777Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222581659472:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.704836Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.707330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:10.715274Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679222581659473:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:10.770899Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679222581659525:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:12.087031Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679209696754946:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:12.087099Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 6162, MsgBus: 31673 2026-01-07T17:52:58.413018Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679170220805957:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.413470Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.713485Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.730143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.730244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.786214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.876893Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.955788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.115972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.115998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.421122Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.805728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.856201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.032284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.209733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.295822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.734480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183105709586:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.734627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.734843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183105709596:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.734897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.431960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.477358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.505787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.538910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.568675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.610718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.658148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.745285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679187400677767:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.745348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.746368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679187400677772:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.746412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679187400677773:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.746519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.750275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.762595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679187400677776:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.857727Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679187400677827:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.417076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679170220805957:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.417148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:05.408031Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808385413, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 30462, MsgBus: 28478 2026-01-07T17:53:06.314019Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679207401629668:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:06.314127Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:06.330120Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:06.397465Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:06.401029Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679207401629632:2081] 1767808386313023 != 1767808386313026 2026-01-07T17:53:06.432904Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:06.432997Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:06.438075Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:06.513006Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:06.513026Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:06.513032Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:06.513111Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:06.566944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:06.911054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:06.915871Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:06.929197Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.971780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.104665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.210582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.329675Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:09.662417Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679220286533392:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.662490Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.662887Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679220286533401:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.662920Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.731307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.763869Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.800350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.839134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.873867Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.946001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.987921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.057019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.135572Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679224581501565:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.135657Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.135719Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679224581501570:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.135853Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679224581501572:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.135884Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.139261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:10.149065Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679224581501574:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:10.246629Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679224581501625:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:11.314258Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679207401629668:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:11.314318Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert >> KqpPragma::Warning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 10196, MsgBus: 4701 2026-01-07T17:53:00.142626Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679178173503637:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:00.142920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:00.190521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:00.476126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:00.476209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:00.510087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:00.563494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:00.564463Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679178173503533:2081] 1767808380131007 != 1767808380131010 2026-01-07T17:53:00.570186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:00.669541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:00.669568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:00.669577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:00.669635Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:00.833397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:01.144674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:01.149581Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:01.199030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.355294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.515895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.580136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.354983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191058407292:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.355126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.355523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191058407302:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.355585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.706971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.739374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.769336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.795545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.821926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.856582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.888788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.951008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.019600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679195353375467:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.019716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.019835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679195353375472:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.019877Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679195353375474:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.019936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.023783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:04.035485Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679195353375476:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:04.089750Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679195353375527:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:05.142163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679178173503637:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.143835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:06.184643Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592679203943310424:2537], TxI ... eration type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.973341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.111584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.175431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.186014Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:10.334193Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222274783739:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.334278Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.334531Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222274783749:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.334591Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.388180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.414641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.449414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.477212Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.513236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.551225Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.615792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.664583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.736789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222274784618:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.736862Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.736900Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222274784623:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.737037Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222274784625:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.737081Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.740082Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:10.749042Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679222274784627:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:10.811731Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679222274784678:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:12.183649Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679209389880030:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:12.183701Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:12.233585Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-07T17:53:12.233846Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:53:12.233985Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:53:12.234142Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [2:7592679230864719590:2534], Table: `/Root/Test` ([72057594046644480:45:1]), SessionActorId: [2:7592679230864719572:2534]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[2:7592679230864719590:2534].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:53:12.234230Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:7592679230864719584:2534], SessionActorId: [2:7592679230864719572:2534], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7592679230864719572:2534]. 2026-01-07T17:53:12.234452Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=NmEzNTNiMDEtZmRjN2NkMzUtMmY4MGZlZDktYjM0ZGM0YTg=, ActorId: [2:7592679230864719572:2534], ActorState: ExecuteState, LegacyTraceId: 01kecsepyaapjpsjn9h7w8t3pb, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7592679230864719585:2534] from: [2:7592679230864719584:2534] trace_id# 2026-01-07T17:53:12.234558Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:7592679230864719585:2534] TxId: 281474976710673. Ctx: { TraceId: 01kecsepyaapjpsjn9h7w8t3pb, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmEzNTNiMDEtZmRjN2NkMzUtMmY4MGZlZDktYjM0ZGM0YTg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:53:12.234771Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NmEzNTNiMDEtZmRjN2NkMzUtMmY4MGZlZDktYjM0ZGM0YTg=, ActorId: [2:7592679230864719572:2534], ActorState: ExecuteState, LegacyTraceId: 01kecsepyaapjpsjn9h7w8t3pb, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/Test`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id#
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertIgnore [GOOD] Test command err: Trying to start YDB, gRPC: 15883, MsgBus: 20172 2026-01-07T17:53:00.472594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679178672276506:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:00.472671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:00.744628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:00.744805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:00.749013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:00.798241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:00.825836Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679178672276265:2081] 1767808380405754 != 1767808380405757 2026-01-07T17:53:00.827624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:00.840129Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:53:00.840433Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T17:53:00.894202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:00.894236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:00.894247Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:00.894357Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:01.083063Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:01.309253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:01.316683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:01.358136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.475308Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:01.496390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.647928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.719772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.376848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191557180021:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.377011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.377434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191557180031:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.377528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.665564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.697443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.725896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.760569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.791864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.824104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.858411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.921981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.996521Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191557180903:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.996592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.996885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191557180909:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.996952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191557180908:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.997075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.000605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:04.012986Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679191557180912:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:04.088673Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679195852148259:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:05.477037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679178672276506:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.477110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;p ... eryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id#
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 Trying to start YDB, gRPC: 8332, MsgBus: 9324 2026-01-07T17:53:07.106149Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679209343534886:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.106226Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:07.116909Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:07.192918Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:07.223383Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:07.223461Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:07.229467Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:07.287959Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:07.287984Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:07.287990Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:07.288065Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:07.291113Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:07.676819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:07.684820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:07.692241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.753287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.940611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.015280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.147542Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:10.403080Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222228438585:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.403195Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.403430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222228438594:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.403480Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.471041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.502613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.530526Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.558483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.594342Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.630607Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.661238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.706579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.776142Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222228439467:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.776198Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.776352Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222228439472:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.776391Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679222228439474:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.776416Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.779101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:10.787430Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679222228439476:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:10.887944Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679222228439527:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:12.106600Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679209343534886:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:12.106670Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] >> KqpScripting::ScriptExplain [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 27914, MsgBus: 2977 2026-01-07T17:53:00.537388Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679178972709646:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:00.537437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:00.790237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:00.795920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:00.796087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:00.876473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:00.878465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:00.879977Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679178972709521:2081] 1767808380511139 != 1767808380511142 2026-01-07T17:53:00.985583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:00.985608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:00.985617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:00.985709Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:01.056953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:01.418565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:01.471562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.556091Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:01.663960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.816192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.880415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.700985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191857613284:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.701220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.705193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679191857613294:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:03.705289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.030959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.056184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.084599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.111327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.139113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.172967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.205477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.269297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.340554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679196152581457:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.340666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.340837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679196152581462:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.340840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679196152581463:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.340884Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.344350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:04.354366Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679196152581466:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:04.454950Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679196152581517:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:05.536622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679178972709646:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:05.536691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5581, MsgBus: 23666 2026-01-07T17:53:07.127260Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679208895690471:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.127311Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:07.221092Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:07.255217Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:07.264311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:07.264406Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:07.269699Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:07.397171Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:07.397195Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:07.397201Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:07.397278Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:07.417018Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:07.758711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:07.769523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:07.779379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.861084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:07.992864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.051935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.219675Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:10.294377Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679221780594168:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.294485Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.294804Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679221780594178:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.294865Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.341203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.373502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.405253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.431491Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.465256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.501059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.540456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.586766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.679707Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679221780595046:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.679788Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679221780595051:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.679800Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.679965Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679221780595054:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.680000Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.683169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:10.695322Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679221780595053:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:10.763458Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679221780595106:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:12.127499Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679208895690471:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:12.127547Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::ScriptUdf [GOOD] >> KqpYql::UpdatePk >> KqpYql::UpdateBadType >> KqpYql::DdlDmlMix >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg >> KqpScripting::StreamExecuteYqlScriptMixed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 23274, MsgBus: 3654 2026-01-07T17:52:58.371026Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679171490140944:2192];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.371591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.785055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.876482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.876640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.888418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.891260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.903768Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679171490140779:2081] 1767808378357414 != 1767808378357417 2026-01-07T17:52:58.982999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.116187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.116219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116309Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.381097Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.794391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.814162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.861591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.038925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.216686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.295798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.701096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679184375044549:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.701210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.701636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679184375044558:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.701679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.419387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.446140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.476695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.506203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.562465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.598307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.650321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.725708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188670012729:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.725774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.726271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188670012734:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.726273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188670012735:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.726313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.729620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.739157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679188670012738:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.798404Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679188670012789:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.367267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679171490140944:2192];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.367319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6489, MsgBus: 18106 2026-01-07T17:53:05.160744Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... 40: Table profiles were not loaded 2026-01-07T17:53:05.277061Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679199759898201:2081] 1767808385158469 != 1767808385158472 2026-01-07T17:53:05.299864Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:05.299943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:05.305230Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:05.367597Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:05.367612Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:05.367617Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:05.367671Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:05.443500Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:05.758040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:05.762972Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:05.772528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.863445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.026201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.090902Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.197975Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.592078Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679212644801951:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.592152Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.592591Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679212644801961:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.592642Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:08.666699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.700911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.746447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.789283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.822472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.878770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.924199Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.981324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.074291Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216939770130:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.074364Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.074759Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216939770136:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.074801Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679216939770134:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.074887Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:09.079090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:09.092141Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679216939770139:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:09.181094Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679216939770190:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:10.160563Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679199759898311:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:10.160638Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:11.367950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.915973Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808391916, txId: 281474976710676] shutting down 2026-01-07T17:53:12.557994Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808392574, txId: 281474976710680] shutting down 2026-01-07T17:53:12.879285Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808392875, txId: 281474976710684] shutting down |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard+DiscardSelectIsOn >> KqpYql::EvaluateExpr3 [GOOD] >> KqpYql::EvaluateExprPgNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 25269, MsgBus: 17219 2026-01-07T17:53:01.672959Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679185903811642:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:01.674260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:01.864465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:01.920946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:01.921047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:01.922981Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:01.931337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:02.003340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:02.003359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:02.003368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:02.003488Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:02.111266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:02.449209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:02.511996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.669114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.683262Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:02.814448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.872692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.572063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679198788715341:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.572143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.572435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679198788715351:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.572476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:04.843788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.869137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.894855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.920509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.942774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:04.969814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.000558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.040473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.100117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679203083683523:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.100157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.100200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679203083683528:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.100230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679203083683530:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.100250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.102702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:05.110340Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679203083683532:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:05.167414Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679203083683583:3759] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:06.672356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679185903811642:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:06.672408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:07.188236Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679211673618496:2542], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial, At function: Aggregate, At tuple
:2:20: Error: At tuple /lib/yql/aggregate.yqls:650:12: Error: At function: AggregationTraits ... e: "At function: PersistableRepr" end_position { row: 2 column: 20 } severity: 1 issues { position { row: 2 column: 26 } message: "At function: Member" end_position { row: 2 column: 26 } severity: 1 issues { position { row: 2 column: 26 } message: "Member not found: _yql_partition_id" end_position { row: 2 column: 26 } severity: 1 } } } } } } } } } } tx_id# trace_id# Trying to start YDB, gRPC: 1143, MsgBus: 27154 2026-01-07T17:53:08.070594Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679215481627760:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:08.070634Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:08.090168Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:08.139369Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:08.192796Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:08.192868Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:08.200363Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:08.251701Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:08.251720Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:08.251727Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:08.251808Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:08.251891Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:08.598261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:08.606138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:08.623009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.681100Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.812690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.880770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.079168Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:11.378043Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679228366531472:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.378165Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.378479Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679228366531482:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.378562Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.440263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.466684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.496129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.525983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.556270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.586251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.617077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.710211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.795976Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679228366532350:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.796069Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.796310Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679228366532356:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.796315Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679228366532355:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.796357Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.799799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:11.812113Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679228366532359:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:11.905648Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679228366532410:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:13.070765Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679215481627760:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:13.070832Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptData >> KqpScripting::ScriptingCreateAndAlterTableTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 19353, MsgBus: 28878 2026-01-07T17:53:02.491665Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679188297224295:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:02.491866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:02.691707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:02.756097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:02.756204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:02.784338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:02.787812Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:02.789214Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679188297224259:2081] 1767808382490342 != 1767808382490345 2026-01-07T17:53:02.880872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:02.880897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:02.880905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:02.880988Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:02.977144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:03.217463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:03.260649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.393003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.506147Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:03.526281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.586591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.422266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679201182128021:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.422379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.423111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679201182128031:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.423188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.791678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.835445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.881508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.918806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.952572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.990787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.034928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.088950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.172995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679205477096194:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.173091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.173473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679205477096199:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.173524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679205477096200:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.173554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.178381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:06.190664Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679205477096203:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:06.281406Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679205477096254:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:07.493054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679188297224295:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.493147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2642, MsgBus: 14328 2026-01-07T17:53:08.941253Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:08.941322Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:53:09.003993Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:09.006689Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:09.006742Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:09.009182Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679216148994010:2081] 1767808388898783 != 1767808388898786 2026-01-07T17:53:09.026584Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:09.115040Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:09.141782Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:09.141819Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:09.141827Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:09.141912Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:09.548150Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:09.567411Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.658138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.817006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.893501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.020349Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:11.907272Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679229033897768:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.907357Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.907598Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679229033897778:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.907643Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.966978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.995218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.026810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.056818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.085199Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.115094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.146361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.187249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.246094Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679233328865943:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.246166Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.246300Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679233328865948:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.246339Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679233328865949:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.246371Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.249408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:12.258013Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679233328865952:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:12.358111Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679233328866003:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQuery >> KqpScripting::SystemTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 15145, MsgBus: 17835 2026-01-07T17:52:58.469903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679170889115943:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.473443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.892678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.927753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.927876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.981479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:59.003393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:59.056996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.116224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.116250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.116258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.116309Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.493569Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.784692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.791255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:52:59.874124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.059943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.236944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.325951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.969197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183774019663:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.969335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.969647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183774019673:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.969712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.465218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.502994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.539364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.580103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.624436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.689219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.746080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.822489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188068987853:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.822564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188068987858:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.822582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.822768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679188068987860:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.822828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.826441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.837104Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679188068987861:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:02.930468Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679188068987913:3781] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.470171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679170889115943:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.470232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:04.514166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... 6: will try to initialize from file: (empty maybe) 2026-01-07T17:53:07.805581Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:07.805662Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:07.830399Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:08.216529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:08.229529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:08.243268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.305149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.444669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.567137Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.591534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.995732Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679221552358125:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.995802Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.996006Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679221552358134:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.996037Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.070238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.099625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.128763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.158158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.187043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.218497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.249376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.313088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.383067Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679225847326298:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.383148Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.383157Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679225847326303:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.383407Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679225847326305:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.383493Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.386705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:11.397329Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679225847326306:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:11.464810Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679225847326358:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:12.564873Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679208667454553:2206];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:12.564971Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:13.685791Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592679234437261279:2538], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:53:13.686359Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MTcxOWM3ODEtY2Q2ZDkxNTQtNDIxOWRkZjktZGZkOTIwYmI=, ActorId: [2:7592679234437261277:2537], ActorState: ExecuteState, LegacyTraceId: 01kecsere2deq1k72ncbx1whbr, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 168 } message: "At function: DataQueryBlocks" end_position { row: 1 column: 168 } severity: 1 issues { position { row: 1 column: 185 } message: "At function: TKiDataQueryBlock" end_position { row: 1 column: 185 } severity: 1 issues { position { row: 1 column: 208 } message: "At function: KiEffects" end_position { row: 1 column: 208 } severity: 1 issues { position { row: 1 column: 219 } message: "At function: KiWriteTable!" end_position { row: 1 column: 219 } severity: 1 issues { position { row: 1 column: 219 } message: "Cannot find table \'db.[/Root/ScriptingTest]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 219 } issue_code: 2003 severity: 1 } } } } } } tx_id# trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScan >> KqpYql::JsonNumberPrecision >> KqpYql::InsertCV+useSink >> KqpYql::TestUuidDefaultColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ScriptUdf [GOOD] Test command err: Trying to start YDB, gRPC: 11491, MsgBus: 6708 2026-01-07T17:53:02.748293Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679188581438391:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:02.748397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:02.970756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:02.974751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:02.974846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:03.034775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:03.035904Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:03.131230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:03.131254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:03.131263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:03.131348Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:03.139529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:03.532985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:03.578766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.699273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.762037Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:03.812280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.868278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.830769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679201466342098:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.830870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.831165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679201466342108:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.831195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.189719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.229923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.263739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.295666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.328949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.370304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.408425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.471699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.554947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679205761310276:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.555019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.555126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679205761310281:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.555166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679205761310283:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.555200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.559149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:06.571095Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679205761310285:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:06.673506Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679205761310336:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:07.749024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679188581438391:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.749095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 17827, MsgBus: 28184 2026-01-07T17:53:09.217823Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:09.217950Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:53:09.304459Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679219949110177:2081] 1767808389193888 != 1767808389193891 2026-01-07T17:53:09.318103Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:09.331031Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:09.331116Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:09.335561Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:09.385479Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:09.385502Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:09.385509Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:09.385589Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:09.400733Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:09.798411Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:09.805887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:09.811873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.882402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.052763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.119982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.261416Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:12.355836Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679232834013943:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.355942Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.356238Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679232834013952:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.356285Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.407355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.434658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.484672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.506254Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.529313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.554158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.580941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.618833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.683080Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679232834014819:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.683136Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.683295Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679232834014824:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.683334Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679232834014825:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.683414Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.686237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:12.695835Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679232834014828:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:12.782110Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679232834014879:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] >> BasicStatistics::ServerlessTimeIntervals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 20511, MsgBus: 12745 2026-01-07T17:53:02.482651Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679190366286917:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:02.482800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:02.798895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:02.799002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:02.807749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:02.889603Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:02.890310Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679190366286879:2081] 1767808382477169 != 1767808382477172 2026-01-07T17:53:02.890423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:02.947612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:02.947640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:02.947651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:02.947727Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:03.192054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:03.307238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:03.318180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:03.372146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.491409Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:03.495358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.613258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.681237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.186415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679203251190640:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.186516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.186903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679203251190650:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.186950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.491223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.525762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.557258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.586194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.616502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.660047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.694639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.734988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.819122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679203251191522:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.819231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.819316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679203251191527:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.819411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679203251191529:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.819447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.823177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:05.837438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679203251191531:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:05.888789Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679203251191582:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:07.486200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679190366286917:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.486616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:07.609551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... rations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:08.744281Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:08.804602Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:08.806129Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679214426268234:2081] 1767808388715872 != 1767808388715875 2026-01-07T17:53:08.816668Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:08.816743Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:08.838286Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:08.961455Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:08.961475Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:08.961488Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:08.961569Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:08.995178Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:09.358525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:09.364534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:09.379050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.458703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.613302Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.697319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.786912Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:11.631881Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679227311172002:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.631973Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.632200Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679227311172012:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.632242Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.699382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.737186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.764965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.793332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.822246Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.882463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.910937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.956704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.017763Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679231606140179:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.017827Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.017938Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679231606140186:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.017943Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679231606140184:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.017971Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.020764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:12.030153Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679231606140188:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:12.109296Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679231606140239:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:13.717074Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679214426268262:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:13.717288Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:14.066995Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808394056, txId: 281474976710673] shutting down 2026-01-07T17:53:14.230999Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808394223, txId: 281474976710675] shutting down 2026-01-07T17:53:15.234553Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808395241, txId: 281474976710677] shutting down |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 24430, MsgBus: 18022 2026-01-07T17:52:58.357630Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679170508373289:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.357667Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.408290Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:52:58.700594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.700720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.705408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.747709Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.775347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.777852Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679170508373255:2081] 1767808378353099 != 1767808378353102 2026-01-07T17:52:59.003182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.115766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.115795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.115818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.115904Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.369061Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.780195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.867828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.026415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.199287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.288582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.707187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183393277017:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.707290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.707585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679183393277027:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.707636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.419627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.451197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.484542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.515005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.568268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.600921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.640907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.743210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679187688245206:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.743324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.743657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679187688245212:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.743705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679187688245211:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.743810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.747788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.759689Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679187688245215:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.843368Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679187688245266:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.358003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679170508373289:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.358065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:05.410430Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [1:759267 ... tream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679234704496964:2633] 2026-01-07T17:53:13.549252Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 64ms, session id ydb://session/3?node_id=2&id=MWNmMmNlM2MtNDRiNDhlOTUtNjMzYjdiOWMtN2JkMjQ5ZmQ= } 2026-01-07T17:53:13.618032Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679234704496995:2643] 2026-01-07T17:53:13.626908Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 67ms, session id ydb://session/3?node_id=2&id=NDI0MDFkYWYtMThmZjg0ZTctZDc2NTBkZjAtNTkzZDI3M2Y= } 2026-01-07T17:53:13.747944Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 70ms, session id ydb://session/3?node_id=2&id=YzE2MTk2NDgtNTgwZjEwZS04NTk4OWZhYi1mMDY1NDg4Mw== } 2026-01-07T17:53:13.772064Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679234704497026:2655] 2026-01-07T17:53:13.779650Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 73ms, session id ydb://session/3?node_id=2&id=NDRkMmJjOTItNWVjMzRlMTUtNDViMDlhNmUtNDZkN2IxN2I= } 2026-01-07T17:53:13.852717Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679234704497057:2664] 2026-01-07T17:53:13.862567Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 76ms, session id ydb://session/3?node_id=2&id=NzJmNzAyM2QtYjRjYjk5ZmEtOTViOGI5ZGItMmM3MDA4YTY= } 2026-01-07T17:53:13.934153Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679234704497091:2670] 2026-01-07T17:53:13.940464Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 79ms, session id ydb://session/3?node_id=2&id=ODQ3OWIxM2EtMmQ4YTcwZGMtYTFmYTEyZTEtNzQzN2E3Y2E= } 2026-01-07T17:53:14.019188Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679234704497132:2679] 2026-01-07T17:53:14.025484Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 82ms, session id ydb://session/3?node_id=2&id=NjBiYWExZjctZDhmNjcwOTktZDczM2FmOTktYzhlYTUxYmM= } 2026-01-07T17:53:14.108486Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464444:2686] 2026-01-07T17:53:14.114508Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 85ms, session id ydb://session/3?node_id=2&id=YmIwZjUwYTMtNDk5YTk4NmMtM2QyMTA0NzgtMWM1ZjdjZmQ= } 2026-01-07T17:53:14.201777Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464465:2695] 2026-01-07T17:53:14.259633Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 88ms, session id ydb://session/3?node_id=2&id=Zjk2ZTlhYTYtNmRlZTg5NTktZDI0YjRjYTAtODgwODcwOTk= } 2026-01-07T17:53:14.293718Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464474:2699] 2026-01-07T17:53:14.300369Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 91ms, session id ydb://session/3?node_id=2&id=ODRkNDY2YmYtNjIxNzVkY2YtZDI1MDc2MzItMzQ3N2ZiYTE= } 2026-01-07T17:53:14.395989Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464500:2710] 2026-01-07T17:53:14.403619Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 94ms, session id ydb://session/3?node_id=2&id=NmE0NDkzZWEtZjRlM2RmYzctN2Q5MzQ2MWEtMzE5MjRjZjI= } 2026-01-07T17:53:14.494297Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464531:2716] 2026-01-07T17:53:14.503199Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 97ms, session id ydb://session/3?node_id=2&id=NjEyZDY1N2QtNjRiMjRkM2ItNmEwYWEyODctZDNhMDc2OGI= } 2026-01-07T17:53:14.592343Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808394625, txId: 281474976710675] shutting down 2026-01-07T17:53:14.597025Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464577:2727] 2026-01-07T17:53:14.705285Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464674:2742] 2026-01-07T17:53:14.712498Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 103ms, session id ydb://session/3?node_id=2&id=NTQ3NGFjZWUtZDgxMmUwM2ItZWY1NjM4YTgtMTViMWEwZGE= } 2026-01-07T17:53:14.814481Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464695:2751] 2026-01-07T17:53:14.815972Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:7592679238999464731:2756] TxId: 281474976710678. Ctx: { TraceId: 01kecsesf2a4c4pe25125kwgyw, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Y2U3MzhkYy1jMmIxNjFmYS03MjMwNjc2LTZlOGQ5N2Iz, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-07T17:53:14.816215Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=Y2U3MzhkYy1jMmIxNjFmYS03MjMwNjc2LTZlOGQ5N2Iz, ActorId: [2:7592679238999464705:2756], ActorState: ExecuteState, LegacyTraceId: 01kecsesf2a4c4pe25125kwgyw, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# 2026-01-07T17:53:14.816899Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808394849, txId: 281474976710677] shutting down 2026-01-07T17:53:14.817214Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:7592679238999464739:2760], TxId: 281474976710678, task: 4. Ctx: { CheckpointId : . TraceId : 01kecsesf2a4c4pe25125kwgyw. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=Y2U3MzhkYy1jMmIxNjFmYS03MjMwNjc2LTZlOGQ5N2Iz. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [2:7592679238999464731:2756], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-07T17:53:14.817365Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:7592679238999464740:2761], TxId: 281474976710678, task: 5. Ctx: { CheckpointId : . TraceId : 01kecsesf2a4c4pe25125kwgyw. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=Y2U3MzhkYy1jMmIxNjFmYS03MjMwNjc2LTZlOGQ5N2Iz. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [2:7592679238999464731:2756], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-07T17:53:14.818565Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7592679238999464784:2088], tablet: [2:7592679213229658150:2311], scanId: 8, table: /Root/EightShard 2026-01-07T17:53:14.830800Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 106ms, session id ydb://session/3?node_id=2&id=YmU1M2QyYTUtZmE3Y2ZjNjAtNGExY2YxODktY2MzMzI0ZTc= } 2026-01-07T17:53:14.926254Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464795:2766] 2026-01-07T17:53:14.949659Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 109ms, session id ydb://session/3?node_id=2&id=ZmNhZTM5NmMtZDNjM2RlOTctODI1NjQ0OWYtMTVhYjhhYjg= } 2026-01-07T17:53:15.041287Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808395073, txId: 281474976710680] shutting down 2026-01-07T17:53:15.042438Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679238999464816:2775] 2026-01-07T17:53:15.097594Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 112ms, session id ydb://session/3?node_id=2&id=NGViZDU2MzgtZjhhNWM4YTEtNTJlODJmNGMtNDdlOGFhMGU= } 2026-01-07T17:53:15.161017Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679243294432211:2791] 2026-01-07T17:53:15.169232Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 115ms, session id ydb://session/3?node_id=2&id=Y2E2OWE4MWMtY2NhNGQ0OTAtMzYzOGJmNDQtOWEyOTdiODY= } 2026-01-07T17:53:15.271940Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808395304, txId: 281474976710682] shutting down 2026-01-07T17:53:15.403368Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679243294432330:2815] 2026-01-07T17:53:15.413451Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 121ms, session id ydb://session/3?node_id=2&id=OWYzODEwNTEtYTcxNTVlNWUtNDE2MTE5M2EtZjkwZTdlY2U= } 2026-01-07T17:53:15.525733Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808395549, txId: 281474976710684] shutting down 2026-01-07T17:53:15.656920Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7592679243294432452:2841] 2026-01-07T17:53:15.666519Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 127ms, session id ydb://session/3?node_id=2&id=ZDgyMTdjOS00MmYwM2YzZi0xNGE3MTBmLWJlNzBjYTVm } 2026-01-07T17:53:15.752289Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808395787, txId: 281474976710686] shutting down |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessTimeIntervals [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:47.440759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:47.514768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:47.520693Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:47.520921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:47.521029Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:47.814154Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:47.895745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:47.895847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:47.930073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:47.997101Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:48.615566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:48.616369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:48.616401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:48.616422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:48.616567Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:48.679968Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:49.179215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:51.429030Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:51.436428Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:51.441090Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:51.472770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:51.472883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:51.511295Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:51.513124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:51.702385Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:51.702497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:51.703902Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.704499Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.705055Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.705900Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.706023Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.706313Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.706431Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.706538Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.706724Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:51.722017Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:51.924545Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:51.951600Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:51.951685Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:51.986476Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:51.987935Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:51.988173Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:51.988247Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:51.988304Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:51.988363Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:51.988410Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:51.988459Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:51.990384Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:51.991499Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:51:51.993906Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:51:52.004130Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:51:52.004199Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:51:52.004283Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:51:52.011380Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:52.011504Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:52.081778Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2212:2650] 2026-01-07T17:51:52.082098Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2212:2650], schemeshard id = 72075186224037897 2026-01-07T17:51:52.082944Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2214:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:52.122244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:52.139671Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:52.139830Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:52.152208Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:52.240405Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:52.379520Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:51:52.627738Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:52.757377Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:52.757462Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:51:53.473935Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... 7Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-07T17:52:27.118864Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:5603:4373], StatRequests.size() = 1 2026-01-07T17:52:27.576834Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-07T17:52:27.576904Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 26.258000s, at schemeshard: 72075186224037905 2026-01-07T17:52:27.577245Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-07T17:52:27.591571Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:27.635153Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:28.406605Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:5646:4395]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:28.406976Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-07T17:52:28.407029Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:5646:4395], StatRequests.size() = 1 2026-01-07T17:52:29.586970Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:5675:4407]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:29.587316Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-07T17:52:29.587361Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:5675:4407], StatRequests.size() = 1 2026-01-07T17:52:30.002742Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:52:30.003262Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-07T17:52:30.003663Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-07T17:52:30.003757Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:52:30.049414Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:30.049487Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:30.049766Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:30.063870Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:30.664678Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5708:4422]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:30.665118Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-07T17:52:30.665175Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5708:4422], StatRequests.size() = 1 2026-01-07T17:52:30.665725Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5710:4424]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:30.670070Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-07T17:52:30.670131Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5710:4424], StatRequests.size() = 1 2026-01-07T17:52:35.605055Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:35.605156Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:35.605510Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:35.619679Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:40.969213Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:40.969276Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:40.969519Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:40.982846Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:46.809882Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:46.809962Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:46.810282Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:46.825038Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:50.176321Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:52:51.681615Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-07T17:52:51.681704Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 27.538000s, at schemeshard: 72075186224037899 2026-01-07T17:52:51.682053Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-07T17:52:51.695643Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:52.674214Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:52:52.674287Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:52:52.674686Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:52:52.689265Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:52:56.563838Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:52:56.563927Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:56.563974Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:52:56.564007Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:52:58.613861Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-07T17:52:58.613934Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 26.015000s, at schemeshard: 72075186224037905 2026-01-07T17:52:58.614195Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-07T17:52:58.634631Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:53:00.184829Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:53:00.184900Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:00.185196Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:53:00.206805Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:53:09.312542Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-07T17:53:09.312767Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 8 2026-01-07T17:53:09.313188Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-07T17:53:09.313384Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-07T17:53:09.372480Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:53:09.372562Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:09.372824Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:53:09.389170Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:53:15.661127Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-07T17:53:15.661195Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:15.661418Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-07T17:53:15.678362Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 19526, MsgBus: 4989 2026-01-07T17:53:14.479248Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679241865810209:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:14.484387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:14.714265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:14.717756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:14.717876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:14.719929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:14.805576Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679241865810177:2081] 1767808394476791 != 1767808394476794 2026-01-07T17:53:14.814039Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:14.868259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:14.868304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:14.868320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:14.868423Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:14.928317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:15.278272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:15.285258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:15.485152Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:17.551073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679254750712952:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.551198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.551470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679254750712962:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.551510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.783849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.927740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679254750713062:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.927821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.927973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679254750713067:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.928273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679254750713069:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.928310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.931747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:17.943490Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679254750713070:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:53:18.066474Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679259045680418:2598] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] >> KqpYql::TestUuidDefaultColumn [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> KqpYql::UpdatePk [GOOD] >> TSchemeShardAuditSettings::AlterSubdomain |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> TSchemeShardAuditSettings::CreateExtSubdomain >> KqpYql::UpdateBadType [GOOD] >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> KqpYql::EvaluateExprPgNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 5611, MsgBus: 65177 2026-01-07T17:53:08.575106Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679216169294907:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:08.575593Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:08.797013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:08.837605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:08.837697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:08.892324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:08.897870Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:09.052852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:09.089627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:09.089649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:09.089657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:09.089741Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:09.492561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:09.498347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:09.554265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.574492Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:09.692245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.877458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.956492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.697798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679229054198444:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.697895Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.698335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679229054198454:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.698370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.978894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.008654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.036197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.062527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.090392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.121931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.151132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.196341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.263684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679233349166626:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.263777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.264006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679233349166631:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.264034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679233349166632:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.264051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.268006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:12.277236Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679233349166635:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:12.351751Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679233349166686:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:13.576055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679216169294907:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:13.576129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 14122, MsgBus: 7343 2026-01-07T17:53:14.566659Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679241803008743:2067] ... R WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:14.741606Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:14.741683Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:14.829580Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:15.142953Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:15.163773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:15.217343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:15.341024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:15.408909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:15.578336Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:17.696682Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679254687912477:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.696767Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.697026Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679254687912487:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.697074Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.767218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.797214Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.822870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.849039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.872823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.897892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.922086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.983909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.042236Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679258982880660:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.042318Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.042400Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679258982880665:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.042462Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679258982880667:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.042506Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.045701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:18.053668Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679258982880669:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:18.143094Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679258982880720:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:19.564609Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679241803008743:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:19.564664Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:19.629394Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592679263277848318:2535], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-07T17:53:19.629851Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZDc1ODk1ZTgtZjAwN2FiMDktNzFmNWQ4My0xMTI0MGUxMA==, ActorId: [2:7592679263277848310:2530], ActorState: ExecuteState, LegacyTraceId: 01kecsey7m2tkvt7w3z09fj9t9, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 26 } message: "At function: KiWriteTable!" end_position { row: 5 column: 26 } severity: 1 issues { position { row: 6 column: 27 } message: "Failed to convert type: Struct<\'Key\':Uint64,\'Value\':Uint64> to Struct<\'Key\':Uint64?,\'Value\':String?>" end_position { row: 6 column: 27 } severity: 1 issues { position { row: 6 column: 27 } message: "Failed to convert \'Value\': Uint64 to Optional" end_position { row: 6 column: 27 } severity: 1 } } issues { position { row: 6 column: 27 } message: "Failed to convert input columns types to scheme types" end_position { row: 6 column: 27 } issue_code: 2031 severity: 1 } } } tx_id# trace_id#
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 23036, MsgBus: 30689 2026-01-07T17:53:16.723386Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679246432581113:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:16.723494Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:16.769146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:17.089054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:17.170493Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679246432581084:2081] 1767808396720158 != 1767808396720161 2026-01-07T17:53:17.184617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:17.184888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:17.186809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:17.187075Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:17.295099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:17.303508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:17.303525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:17.303530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:17.303592Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:17.690699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:17.729248Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:19.381142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259317483862:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.381278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.381652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259317483872:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.381732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.697353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.795510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259317483968:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.795586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.795789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259317483973:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.795800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259317483974:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.795826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.799615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:19.808623Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679259317483977:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:53:19.945000Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679259317484028:2595] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::Discard+DiscardSelectIsOn [GOOD] >> KqpYql::Discard-DiscardSelectIsOn >> KqpScripting::ExecuteYqlScriptPg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 16683, MsgBus: 20022 2026-01-07T17:53:07.299511Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679211141382131:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.300761Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:07.597022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:07.612476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:07.612569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:07.677025Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679211141382074:2081] 1767808387291270 != 1767808387291273 2026-01-07T17:53:07.687983Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:07.688340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:07.844734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:07.844765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:07.844771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:07.844860Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:07.887999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:08.272506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:08.300991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:08.314624Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.352024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.458006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.622797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.693012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.525903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679224026285841:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.526011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.530496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679224026285851:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.530602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:10.856082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.888258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.920306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.947087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.977758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.009634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.043529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.109083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.172443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679228321254016:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.172516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.172615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679228321254021:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.172668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679228321254023:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.172730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.175886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:11.185264Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679228321254025:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:11.278912Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679228321254076:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:12.299069Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679211141382131:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:12.299134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:12.675292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... gRPC: 8395, MsgBus: 62080 2026-01-07T17:53:13.745102Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679236532106710:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:13.745209Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:13.837746Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:13.838257Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:13.879855Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:13.879943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:13.889492Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:13.939545Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:13.939566Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:13.939574Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:13.939660Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:14.091966Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:14.320692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:14.343796Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:14.400222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:14.532036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:14.595159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:14.767633Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:16.747195Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679249417010395:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.747276Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.747615Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679249417010405:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.747651Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.826487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.869432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.914389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.956369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.999838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.039407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.122481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.201542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.284328Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679253711978588:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.284414Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.284770Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679253711978593:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.284823Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679253711978594:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.284885Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.288366Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:17.314504Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679253711978597:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:17.395221Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679253711978648:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:18.745139Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679236532106710:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:18.745207Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:18.979533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.747142Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808399784, txId: 281474976715675] shutting down |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::FromBytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 14799, MsgBus: 23839 2026-01-07T17:53:15.122366Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679242738109729:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:15.122482Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:15.337155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:15.345680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:15.345812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:15.420275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:15.478093Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:15.485026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679242738109698:2081] 1767808395120968 != 1767808395120971 2026-01-07T17:53:15.545488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:15.545522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:15.545532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:15.545680Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:15.579554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:15.973748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:16.036437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.129290Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:16.198896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.369128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.446674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.393557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255623013458:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.393702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.394057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255623013468:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.394117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.668149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.692645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.714939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.740984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.767866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.800454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.861113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.900616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.968721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255623014337:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.968833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.969183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255623014343:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.969227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.969231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255623014342:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.972795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:18.982448Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679255623014346:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:19.055360Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679259917981693:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:20.122783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679242738109729:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:20.122852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:3:20: Warning: At lambda, At function: AsStruct, At tuple
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 24734, MsgBus: 17035 2026-01-07T17:53:15.159048Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679245372448642:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:15.159099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:15.390214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:15.395368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:15.395489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:15.437609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:15.500953Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:15.618508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:15.618563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:15.618632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:15.618828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:15.619866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:16.036562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:16.045722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:16.097112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.169177Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:16.239753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.417601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.493942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.370699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679258257352358:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.370800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.371091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679258257352368:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.371137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.645116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.672700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.699662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.727367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.756411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.787519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.842170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.879102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.943407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679258257353240:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.943518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.943808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679258257353246:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.943867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679258257353245:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.943929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.947497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:18.962851Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679258257353249:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:19.024673Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679262552320596:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:20.159438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679245372448642:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:20.159489Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 21234, MsgBus: 26605 2026-01-07T17:52:58.359534Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679173337884531:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:58.359644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:58.673351Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:58.815046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:58.815163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:58.894489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:58.912095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:58.915165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679173337884495:2081] 1767808378353126 != 1767808378353129 2026-01-07T17:52:58.967882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:59.117388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:59.117406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:59.117411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:59.117472Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:59.371080Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:59.781384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:59.864108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.039696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.207887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:00.280730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:01.723042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186222788258:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.723159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.723456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679186222788267:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:01.723500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.389357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.460732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.491774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.522110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.555577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.644941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.694682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.743643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:02.827398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679190517756444:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.827472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.827535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679190517756449:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.827680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679190517756451:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.827704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:02.830967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:02.840315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679190517756452:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:02.897972Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679190517756504:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:03.359959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679173337884531:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:03.360010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:04.821074Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808384804, txId: 281474976710674] shutting down 2026-01-07T17:53:04.821936Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [s ... :53:16.576629Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 106ms, session id ydb://session/3?node_id=2&id=NmE4M2EzZGUtNjlkNWJkMGYtZGYxNzQzNDctYWNlMzk4Yzc= } 2026-01-07T17:53:16.628571Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 109ms, session id ydb://session/3?node_id=2&id=OTk0Y2QwZWYtNTBkN2RhOGYtMjAxNDNkZDYtZDBlNTg0OTE= } 2026-01-07T17:53:16.741117Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 112ms, session id ydb://session/3?node_id=2&id=MWNmZTNhZTItZDkxMjM1ZDYtNWFkYWJhMmQtYjhkOWRiN2Q= } 2026-01-07T17:53:16.785520Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808396802, txId: 281474976710716] shutting down 2026-01-07T17:53:16.786110Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808396802, txId: 281474976710715] shutting down 2026-01-07T17:53:16.871882Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 115ms, session id ydb://session/3?node_id=2&id=NzAwMmJkZDMtYmY3YTE2YzQtOTI2MjZmNzMtNDBiOWFjMzQ= } 2026-01-07T17:53:16.987239Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808397012, txId: 281474976710720] shutting down 2026-01-07T17:53:16.987774Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808397012, txId: 281474976710719] shutting down 2026-01-07T17:53:16.991274Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 118ms, session id ydb://session/3?node_id=2&id=ZWRkOTk4MGYtODM1NGJkNC1lNjJmMzM0YS00MTQ3NDg2OQ== } 2026-01-07T17:53:17.112043Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 121ms, session id ydb://session/3?node_id=2&id=OWI1N2Y2NmQtNWMyNTVhN2EtNjJjYTg2N2ItMTM5MzBjZmQ= } 2026-01-07T17:53:17.235108Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808397264, txId: 281474976710724] shutting down 2026-01-07T17:53:17.235231Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808397264, txId: 281474976710723] shutting down 2026-01-07T17:53:17.374638Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 127ms, session id ydb://session/3?node_id=2&id=NTVjNmVjZTctYTQ1MzFiNTQtM2Q5Zjg3ZDMtNWZhNzUzMDc= } 2026-01-07T17:53:17.449546Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808397481, txId: 281474976710727] shutting down 2026-01-07T17:53:17.507282Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 130ms, session id ydb://session/3?node_id=2&id=NTc2ZjY0ZGItMmYyMDU3NmItNTlhMWJiODgtNThiNGRiMGE= } 2026-01-07T17:53:17.579047Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808397607, txId: 281474976710729] shutting down 2026-01-07T17:53:17.642968Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 133ms, session id ydb://session/3?node_id=2&id=YTgyZTg3MGYtYTc2OGJiZTAtZWNmYjdmY2ItZGJiYTNkNzA= } 2026-01-07T17:53:17.720103Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808397747, txId: 281474976710731] shutting down 2026-01-07T17:53:17.785750Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 136ms, session id ydb://session/3?node_id=2&id=YTc5MWU3ZmEtYzQ5ZTVjZTQtMWE3Mjc5MGYtZGFhNWUxYg== } 2026-01-07T17:53:17.847389Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808397880, txId: 281474976710733] shutting down 2026-01-07T17:53:17.939200Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 139ms, session id ydb://session/3?node_id=2&id=ZTliMTFlYWQtNzE0NjUwZTgtMjdjZmJlZDEtY2JiMjc1MzE= } 2026-01-07T17:53:18.018540Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808398048, txId: 281474976710735] shutting down 2026-01-07T17:53:18.018554Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808398048, txId: 281474976710736] shutting down 2026-01-07T17:53:18.185511Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 145ms, session id ydb://session/3?node_id=2&id=NWZlZWU5NDUtZmNjOTBkYmMtZWY3MGY2OTMtNjk4ZWEyNDQ= } 2026-01-07T17:53:18.232338Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808398265, txId: 281474976710739] shutting down 2026-01-07T17:53:18.324449Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 148ms, session id ydb://session/3?node_id=2&id=YTBhMjE4ZTUtYjM4MWJiMjYtMzYzZjNmOTUtYjhlYzcxZGQ= } 2026-01-07T17:53:18.375402Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808398405, txId: 281474976710741] shutting down 2026-01-07T17:53:18.477794Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 151ms, session id ydb://session/3?node_id=2&id=NWFhNzUyZWMtMTU4MWMzMzAtMzEzM2Y5NmQtY2E5YzcxMTI= } 2026-01-07T17:53:18.479189Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808398510, txId: 281474976710743] shutting down 2026-01-07T17:53:18.635338Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 154ms, session id ydb://session/3?node_id=2&id=OTJiOTJjMWMtZDkxNDMwNmUtODU5ZDk2MDItYzkxZTA0OGI= } 2026-01-07T17:53:18.663390Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808398699, txId: 281474976710745] shutting down 2026-01-07T17:53:18.792951Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 157ms, session id ydb://session/3?node_id=2&id=NTM4OTAxMGYtMmVhZGNkYjUtOGE4YzFhNDYtNTMyZWQ4Yzg= } 2026-01-07T17:53:18.842176Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808398867, txId: 281474976710747] shutting down 2026-01-07T17:53:18.956247Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 160ms, session id ydb://session/3?node_id=2&id=OTg4OWQ0NGYtYzZhOWNhNjQtMTg4NzE0OGUtZTQwMDUwYmQ= } 2026-01-07T17:53:19.037237Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808399070, txId: 281474976710750] shutting down 2026-01-07T17:53:19.037835Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808399070, txId: 281474976710749] shutting down 2026-01-07T17:53:19.220799Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 166ms, session id ydb://session/3?node_id=2&id=ZmFjNTQ5MTUtODkzNDA1Y2QtNmRmNDcyMjItNTAxNzY3NDM= } 2026-01-07T17:53:19.281165Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808399308, txId: 281474976710753] shutting down 2026-01-07T17:53:19.390249Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 169ms, session id ydb://session/3?node_id=2&id=NTY1ODQ2NGQtZGE2ZTIyZTItNDE4ZTRlZjYtM2M3NWUyMzY= } 2026-01-07T17:53:19.406336Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808399441, txId: 281474976710755] shutting down 2026-01-07T17:53:19.565150Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808399602, txId: 281474976710757] shutting down 2026-01-07T17:53:19.567752Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 172ms, session id ydb://session/3?node_id=2&id=YjVjMmNkOWUtZTc3ZjdmMzQtODI0OGU4ZDEtOGNkZmVkMQ== } 2026-01-07T17:53:19.742706Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 175ms, session id ydb://session/3?node_id=2&id=NWI4MzU3MWEtMzY0YzJkODEtM2U4M2EwZmEtYjgzODczN2Q= } 2026-01-07T17:53:19.773275Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808399805, txId: 281474976710759] shutting down 2026-01-07T17:53:19.928535Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 178ms, session id ydb://session/3?node_id=2&id=MjQ2MTQ4OWMtZDkxMmViNTMtM2RhYTNkLTJmOGVjYTc= } 2026-01-07T17:53:19.948028Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808399980, txId: 281474976710761] shutting down 2026-01-07T17:53:20.086423Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808400120, txId: 281474976710763] shutting down |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] >> KqpYql::JsonCast >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprPgNull [GOOD] Test command err: Trying to start YDB, gRPC: 8806, MsgBus: 30012 2026-01-07T17:53:09.680410Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679216538359498:2203];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:09.680468Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:09.912228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:09.913982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:09.914075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:09.969529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:10.011462Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:10.014744Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679216538359331:2081] 1767808389661286 != 1767808389661289 2026-01-07T17:53:10.105102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:10.105123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:10.105138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:10.105251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:10.206142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:10.491772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:10.544557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.666514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.753838Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:10.788218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.843703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.639773Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679229423263095:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.639880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.640183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679229423263105:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.640221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.991751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:13.023495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:13.053313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:13.085200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:13.118896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:13.153143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:13.208328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:13.247611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:13.314806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679233718231276:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:13.314879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:13.315106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679233718231281:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:13.315143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:13.315176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679233718231282:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:13.318608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:13.327681Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679233718231285:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:13.391013Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679233718231336:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:14.679433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679216538359498:2203];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:14.679496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7972, MsgBus: 7208 2026-01-07T17:53:15.970052Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679245357217057:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:15.970165Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:15.990388Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:16.067510Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:16.070245Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679245357217017:2081] 1767808395967685 != 1767808395967688 2026-01-07T17:53:16.105414Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:16.105530Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:16.118832Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:16.182709Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:16.209440Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:16.209463Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:16.209472Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:16.209540Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:16.649878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:16.657437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:16.662537Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.745861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.880344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.949690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.987427Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:19.106906Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679262537088059:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.106973Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.107226Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679262537088068:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.107292Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.162811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.186331Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.213337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.237959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.263717Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.300506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.328629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.365830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.431179Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679262537088935:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.431270Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.431450Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679262537088940:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.431522Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679262537088941:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.431582Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.434650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:19.444013Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679262537088944:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:19.513292Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679262537088995:3755] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:20.970359Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679245357217057:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:20.970465Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 14667, MsgBus: 6689 2026-01-07T17:53:08.921420Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679213360616729:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:08.921712Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:09.151245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:09.153860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:09.153924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:09.170051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:09.314329Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679213360616699:2081] 1767808388914951 != 1767808388914954 2026-01-07T17:53:09.314793Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:09.355552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:09.355592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:09.355601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:09.355683Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:09.368104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:09.729227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:09.735432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:09.800657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.931928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:09.945096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.092344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.154113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.856223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679226245520459:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.856295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.856503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679226245520469:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.856532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.176236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.203256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.232072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.260699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.286622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.315540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.361818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.397762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:12.458823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679230540488637:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.458968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.459063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679230540488642:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.459177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679230540488644:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.459218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:12.462744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:12.473679Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679230540488646:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:12.575672Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679230540488697:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:13.917798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679213360616729:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:13.917865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:14.076979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:14.601159Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808394639, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 25667, MsgBus: 12239 2026-01-07T17:53:15.514919Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679246202966091:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:15.515074Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:15.533648Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:15.618338Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679246202966062:2081] 1767808395513822 != 1767808395513825 2026-01-07T17:53:15.625175Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:15.635438Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:15.635522Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:15.663722Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:15.746618Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:15.746643Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:15.746650Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:15.746738Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:15.752456Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:16.202938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:16.220949Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.286757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.483734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.522022Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:16.545142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.026789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679263382837131:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.026863Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.027077Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679263382837140:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.027113Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.086656Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.114234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.142490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.167608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.192559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.218026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.243944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.279970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.343628Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679263382838010:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.343711Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679263382838015:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.343723Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.343945Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679263382838018:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.344035Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.346622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:19.355063Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679263382838017:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:19.409692Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679263382838070:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:20.514930Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679246202966091:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:20.515017Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:21.660990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:21.661105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:21.661214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:21.661252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:21.661282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:21.661365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:21.662354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:21.663500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:21.753838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:21.753898Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:21.772040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:21.772353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:21.772567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:21.781772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:21.782152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:21.783081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:21.786145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:21.796579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.798075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:21.817932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:21.818026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.818141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:21.818188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:21.818239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:21.819547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:21.985218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.989977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pp:665: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2026-01-07T17:53:22.937598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000049 FAKE_COORDINATOR: advance: minStep5000049 State->FrontStep: 5000048 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000049 2026-01-07T17:53:22.938037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000049, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:22.938159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000049 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:22.938200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_unsafe.cpp:47: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000049, at schemeshard: 72057594046678944 2026-01-07T17:53:22.938258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 43] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:22.938287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-07T17:53:22.938375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 128 -> 130 2026-01-07T17:53:22.938523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:22.938571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-07T17:53:22.939492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:53:22.941877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2026-01-07T17:53:22.946039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:22.946097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:22.946260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-07T17:53:22.946373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:22.946401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-07T17:53:22.946444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 112, path id: 43 2026-01-07T17:53:22.946731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-07T17:53:22.946784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2026-01-07T17:53:22.946846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-07T17:53:22.946870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-07T17:53:22.946906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-07T17:53:22.946935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-07T17:53:22.946961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2026-01-07T17:53:22.946986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-07T17:53:22.947011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 112:0 2026-01-07T17:53:22.947032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 112:0 2026-01-07T17:53:22.947089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-07T17:53:22.947120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2026-01-07T17:53:22.947144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 30 2026-01-07T17:53:22.947178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 43], 18446744073709551615 2026-01-07T17:53:22.947858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:53:22.947930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:53:22.947957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2026-01-07T17:53:22.947984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 30 2026-01-07T17:53:22.948014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:53:22.950820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:53:22.950926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:53:22.950968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2026-01-07T17:53:22.951004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-07T17:53:22.951037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-07T17:53:22.951146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2026-01-07T17:53:22.951448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:22.951498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-07T17:53:22.951577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-07T17:53:22.951844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:22.951886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-07T17:53:22.951945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:22.960051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:53:22.964211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:53:22.964301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:53:22.964378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-07T17:53:22.964655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-07T17:53:22.964680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-07T17:53:22.965144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-07T17:53:22.965221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-07T17:53:22.965245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1065:3054] TestWaitNotification: OK eventTxId 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:21.660978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:21.661076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:21.661174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:21.661211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:21.661240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:21.661303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:21.662256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:21.663520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:21.763072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:21.763130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:21.778901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:21.779231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:21.779414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:21.785610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:21.785883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:21.786497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:21.786753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:21.796764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.798078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:21.817936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:21.818027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.818139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:21.818186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:21.818241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:21.819358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:21.984408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.989973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.990932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.991020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.991103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.991233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 112 at step: 5000049 FAKE_COORDINATOR: advance: minStep5000049 State->FrontStep: 5000048 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000049 2026-01-07T17:53:22.932596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000049, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:22.932689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000049 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:22.932730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:190: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000049, at schemeshard: 72057594046678944 2026-01-07T17:53:22.932813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 43] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:22.932848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-07T17:53:22.932878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 128 -> 134 2026-01-07T17:53:22.934475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:53:22.934734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:53:22.936542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-07T17:53:22.936603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:53:22.936702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 134 -> 135 2026-01-07T17:53:22.936853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:22.936910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 FAKE_COORDINATOR: Erasing txId 112 2026-01-07T17:53:22.938611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:22.938663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:22.938785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-07T17:53:22.938886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:22.938917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-07T17:53:22.938979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 112, path id: 43 2026-01-07T17:53:22.939222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-07T17:53:22.939260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 112:0 ProgressState 2026-01-07T17:53:22.939290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 135 -> 240 2026-01-07T17:53:22.939958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:53:22.940041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:53:22.940068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2026-01-07T17:53:22.940101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 30 2026-01-07T17:53:22.940197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:53:22.940871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:53:22.941010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-07T17:53:22.941047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2026-01-07T17:53:22.941073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-07T17:53:22.941115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-07T17:53:22.941179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2026-01-07T17:53:22.943963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-07T17:53:22.944021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 112:0 ProgressState 2026-01-07T17:53:22.944103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-07T17:53:22.944139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-07T17:53:22.944170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-07T17:53:22.944191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-07T17:53:22.944234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2026-01-07T17:53:22.944267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-07T17:53:22.944301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 112:0 2026-01-07T17:53:22.944337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 112:0 2026-01-07T17:53:22.944399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-07T17:53:22.944964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:22.945013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-07T17:53:22.945064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-07T17:53:22.945825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:22.945882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-07T17:53:22.945983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:22.946345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:53:22.946652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-07T17:53:22.948602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:53:22.948693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-07T17:53:22.949053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-07T17:53:22.949088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-07T17:53:22.949572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-07T17:53:22.949648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-07T17:53:22.949731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1061:3050] TestWaitNotification: OK eventTxId 112 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 23126, MsgBus: 16331 2026-01-07T17:53:02.633454Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679189877250062:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:02.633541Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:02.917029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:03.016487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:03.018192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:03.018287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:03.025347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:03.107746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:03.107783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:03.107798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:03.107885Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:03.190071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:03.503023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:03.546712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.658638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.662440Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:03.788649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:03.843396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:05.879436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679202762153766:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.879552Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.879906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679202762153775:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:05.879947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.272238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.309578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.347203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.383109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.453964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.503706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.547010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.591605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:06.674227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679207057121946:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.674345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.676706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679207057121951:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.676762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679207057121952:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.676800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:06.680771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:06.692256Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679207057121955:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:06.787375Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679207057122006:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:07.634361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679189877250062:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.634476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:08.225120Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [1:7592679215647056891:2530] 2026-01-07T17:53:08.292327Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [1:7592679215647056898:2533] 2026-01-07T17:53:08.292852Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [1:7592679215647056906:2536] 2026-01-07T17:53:08.306003Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [1:7592679215 ... 0: SelfId: [2:7592679266622480504:2726]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7592679240852674140:2313] 2026-01-07T17:53:21.539979Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037892, step: 1767808400281 2026-01-07T17:53:21.540029Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480505:2727]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7592679240852674141:2314] 2026-01-07T17:53:21.540277Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037894, step: 1767808400484 2026-01-07T17:53:21.540329Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037891, step: 1767808400281 2026-01-07T17:53:21.540375Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037895, step: 1767808400484 2026-01-07T17:53:21.540428Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481110:2749]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7592679240852674139:2312] 2026-01-07T17:53:21.540488Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480504:2726]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7592679240852674140:2313] 2026-01-07T17:53:21.540538Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481111:2750]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7592679240852674136:2311] 2026-01-07T17:53:21.540828Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037894, step: 1767808400484 2026-01-07T17:53:21.540903Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037895, step: 1767808400484 2026-01-07T17:53:21.540979Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481110:2749]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7592679240852674139:2312] 2026-01-07T17:53:21.541055Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481111:2750]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7592679240852674136:2311] 2026-01-07T17:53:21.541307Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037892, step: 1767808400281 2026-01-07T17:53:21.541370Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037895, step: 1767808400484 2026-01-07T17:53:21.541419Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480505:2727]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7592679240852674141:2314] 2026-01-07T17:53:21.541545Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481111:2750]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7592679240852674136:2311] 2026-01-07T17:53:21.578060Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 121ms, session id ydb://session/3?node_id=2&id=ZWNhZTRmMDMtNjNjOGZiNWMtYWZjOTE1NTItN2Y2MjRiZQ== } 2026-01-07T17:53:21.578203Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037892, step: 1767808400281 2026-01-07T17:53:21.578321Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037894, step: 1767808400484 2026-01-07T17:53:21.578423Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480505:2727]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7592679240852674141:2314] 2026-01-07T17:53:21.578525Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481110:2749]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7592679240852674139:2312] 2026-01-07T17:53:21.578674Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037891, step: 1767808400281 2026-01-07T17:53:21.578737Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480504:2726]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7592679240852674140:2313] 2026-01-07T17:53:21.578963Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037890, step: 1767808400281 2026-01-07T17:53:21.579023Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480503:2725]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7592679240852674126:2310] 2026-01-07T17:53:21.579367Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037895, step: 1767808400484 2026-01-07T17:53:21.579429Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481111:2750]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7592679240852674136:2311] 2026-01-07T17:53:21.579590Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037892, step: 1767808400281 2026-01-07T17:53:21.581917Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037895, step: 1767808400484 2026-01-07T17:53:21.582000Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481111:2750]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7592679240852674136:2311] 2026-01-07T17:53:21.582086Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037894, step: 1767808400484 2026-01-07T17:53:21.582148Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037891, step: 1767808400281 2026-01-07T17:53:21.582219Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037890, step: 1767808400281 2026-01-07T17:53:21.582288Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480505:2727]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7592679240852674141:2314] 2026-01-07T17:53:21.582510Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481110:2749]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7592679240852674139:2312] 2026-01-07T17:53:21.582563Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480504:2726]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7592679240852674140:2313] 2026-01-07T17:53:21.582625Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480503:2725]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7592679240852674126:2310] 2026-01-07T17:53:21.583442Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037895, step: 1767808400484 2026-01-07T17:53:21.583509Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037892, step: 1767808400281 2026-01-07T17:53:21.583557Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710677. Snapshot is not valid, tabletId: 72075186224037894, step: 1767808400484 2026-01-07T17:53:21.583600Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037890, step: 1767808400281 2026-01-07T17:53:21.583642Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:601: TxId: 281474976710674. Snapshot is not valid, tabletId: 72075186224037891, step: 1767808400281 2026-01-07T17:53:21.583712Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481111:2750]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7592679240852674136:2311] 2026-01-07T17:53:21.583808Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480505:2727]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037892, actor_id: [2:7592679240852674141:2314] 2026-01-07T17:53:21.583892Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622481110:2749]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7592679240852674139:2312] 2026-01-07T17:53:21.583955Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480503:2725]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037890, actor_id: [2:7592679240852674126:2310] 2026-01-07T17:53:21.584008Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:170: SelfId: [2:7592679266622480504:2726]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037891, actor_id: [2:7592679240852674140:2313] |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> KqpYql::Closure [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:21.660973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:21.661096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:21.661200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:21.661250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:21.661280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:21.661368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:21.662331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:21.663530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:21.751649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:21.751714Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:21.768305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:21.768630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:21.771136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:21.779905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:21.780358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:21.782364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:21.786145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:21.796129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.798049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:21.817911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:21.817988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.818103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:21.818161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:21.818215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:21.819372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:21.990773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.991819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.991996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.992947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 65: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2026-01-07T17:53:24.542038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000112 FAKE_COORDINATOR: advance: minStep5000112 State->FrontStep: 5000111 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000112 2026-01-07T17:53:24.542937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000112, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:24.543047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000112 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:24.543089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_unsafe.cpp:47: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000112, at schemeshard: 72057594046678944 2026-01-07T17:53:24.543134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 62] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:24.543168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-07T17:53:24.543271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 128 -> 130 2026-01-07T17:53:24.543441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:24.543495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-07T17:53:24.544681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:24.548303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2026-01-07T17:53:24.550085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:24.550129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:24.550282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-07T17:53:24.550401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:24.550435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 175, path id: 1 2026-01-07T17:53:24.550471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 175, path id: 62 2026-01-07T17:53:24.550845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-07T17:53:24.550897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2026-01-07T17:53:24.550966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-07T17:53:24.550995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:24.551029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-07T17:53:24.551056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:24.551086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2026-01-07T17:53:24.551115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:24.551144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-07T17:53:24.551191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-07T17:53:24.551262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-07T17:53:24.551299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2026-01-07T17:53:24.551327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 106 2026-01-07T17:53:24.551352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 62], 18446744073709551615 2026-01-07T17:53:24.552061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:24.552164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:24.552201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2026-01-07T17:53:24.552242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-07T17:53:24.552278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:53:24.553054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:24.553133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:24.553166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2026-01-07T17:53:24.553192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-07T17:53:24.553231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-07T17:53:24.553310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2026-01-07T17:53:24.553726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:24.553770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-07T17:53:24.553856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-07T17:53:24.554794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:24.554851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-07T17:53:24.554914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:24.557502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:24.557613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:24.559371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:53:24.559487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-07T17:53:24.560655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-07T17:53:24.560696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-07T17:53:24.562050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-07T17:53:24.562144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-07T17:53:24.562183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2892:4880] TestWaitNotification: OK eventTxId 175 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:21.660967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:21.661076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:21.661171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:21.661206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:21.661237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:21.661289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:21.662274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:21.663518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:21.762766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:21.762826Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:21.776193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:21.776498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:21.776672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:21.782341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:21.782696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:21.783462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:21.786220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:21.796280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.798091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:21.817878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:21.817964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.818101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:21.818177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:21.818232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:21.819373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:21.997083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.998977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.999065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at step: 5000112 FAKE_COORDINATOR: advance: minStep5000112 State->FrontStep: 5000111 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000112 2026-01-07T17:53:24.638400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000112, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:24.638489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000112 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:24.638523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:190: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000112, at schemeshard: 72057594046678944 2026-01-07T17:53:24.638608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 62] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:24.638635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-07T17:53:24.638669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 128 -> 134 2026-01-07T17:53:24.639414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:24.640375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:24.641806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-07T17:53:24.641863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:53:24.641981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 134 -> 135 2026-01-07T17:53:24.642161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:24.642249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 FAKE_COORDINATOR: Erasing txId 175 2026-01-07T17:53:24.643982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:24.644028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:24.644160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-07T17:53:24.644253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:24.644280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 175, path id: 1 2026-01-07T17:53:24.644312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 175, path id: 62 2026-01-07T17:53:24.644670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-07T17:53:24.644713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 175:0 ProgressState 2026-01-07T17:53:24.644738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 135 -> 240 2026-01-07T17:53:24.645228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:24.645285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:24.645309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-07T17:53:24.645330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-07T17:53:24.645353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:53:24.646003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:24.646068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:24.646091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-07T17:53:24.646111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-07T17:53:24.646132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-07T17:53:24.646186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2026-01-07T17:53:24.648863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-07T17:53:24.648921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 175:0 ProgressState 2026-01-07T17:53:24.649022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-07T17:53:24.649055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:24.649088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-07T17:53:24.649113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:24.649143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2026-01-07T17:53:24.649177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:24.649207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-07T17:53:24.649233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-07T17:53:24.649312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-07T17:53:24.649798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:24.649845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-07T17:53:24.649903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-07T17:53:24.650393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:24.650433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-07T17:53:24.650497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:24.651182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:24.652570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:24.654962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:53:24.655085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-07T17:53:24.656382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-07T17:53:24.656423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-07T17:53:24.657802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-07T17:53:24.657926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-07T17:53:24.657964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:3046:5033] TestWaitNotification: OK eventTxId 175 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 10113, MsgBus: 28147 2026-01-07T17:53:13.196694Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679234293243533:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:13.196789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:13.405299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:13.427178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:13.427295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:13.474441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:13.502784Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:13.504047Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679234293243497:2081] 1767808393195034 != 1767808393195037 2026-01-07T17:53:13.576669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:13.576712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:13.576732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:13.576829Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:13.642322Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:13.982911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:14.036432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:14.132073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:14.228291Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:14.261498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:14.323051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.190515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679247178147260:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.190673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.191453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679247178147270:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.191510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.551108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.592273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.635902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.686084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.745600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.808250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.849491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.903694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.988054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679247178148144:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.988130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.988582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679247178148149:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.988643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679247178148150:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.988677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:16.993432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:17.009485Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679247178148153:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:17.088069Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679251473115500:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:18.196835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679234293243533:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:18.196908Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15029, MsgBus: 7549 2026-01-07T17:53:19.370146Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679261063113582:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:19.370215Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:19.437834Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:19.451103Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:19.452268Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679261063113546:2081] 1767808399369269 != 1767808399369272 2026-01-07T17:53:19.497943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:19.498022Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:19.500003Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:19.538399Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:19.538429Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:19.538440Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:19.538535Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:19.718247Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:19.881290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:19.889961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.935257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.052399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.105690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.376966Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:22.134089Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679273948017304:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.134163Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.135563Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679273948017314:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.135629Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.179001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.203589Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.225722Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.247234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.272161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.324484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.354208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.395901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.459408Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679273948018185:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.459487Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.459540Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679273948018190:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.459738Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679273948018192:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.459787Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.462758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:22.473040Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679273948018193:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:22.550395Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679273948018245:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:24.370563Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679261063113582:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:24.370664Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl >> KqpBatchDelete::ManyPartitions_2 >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates >> KqpBatchDelete::Large_1 >> KqpBatchUpdate::Large_2 >> KqpBatchDelete::TableNotExists >> KqpBatchDelete::Returning >> KqpBatchDelete::ColumnTable >> KqpBatchUpdate::Returning >> KqpBatchDelete::ManyPartitions_1 >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates >> KqpBatchDelete::TableWithIndex >> KqpBatchUpdate::SimplePartitions >> KqpBatchUpdate::NotIdempotent >> KqpBatchUpdate::Large_3 >> KqpYql::CreateUseTable [GOOD] >> KqpYql::Discard-DiscardSelectIsOn [GOOD] >> KqpYql::FromBytes [GOOD] >> KqpBatchDelete::DeleteOn |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> KqpYql::JsonCast [GOOD] >> KqpBatchUpdate::ColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 24128, MsgBus: 18788 2026-01-07T17:53:15.395306Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679242416785964:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:15.395387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:15.638158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:15.638266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:15.676793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:15.687893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:15.706907Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:15.794545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:15.794592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:15.794608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:15.794730Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:15.885625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:16.265113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:16.272573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:16.330480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.413203Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:16.483820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.673562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.757225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.487966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255301689686:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.488083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.488398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255301689696:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.488470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.807079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.832377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.858942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.882574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.908486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.937131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.989685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.028452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.090254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259596657858:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.090343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259596657863:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.090386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.090749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259596657866:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.090813Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.093851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:19.103816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679259596657865:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:19.204955Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679259596657918:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:20.396216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679242416785964:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:20.396296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 24102, MsgBus: 28044 2026-01-07T17:53:21.580780Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679269675324732:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:21.580852Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:21.595346Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:21.680333Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:21.681590Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679269675324698:2081] 1767808401579864 != 1767808401579867 2026-01-07T17:53:21.693975Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:21.694052Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:21.718340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:21.761405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:21.772911Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:21.772968Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:21.772974Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:21.773055Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:22.150164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:22.165918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.217403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.364878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.424965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.588993Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:24.393180Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679282560228454:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.393257Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.393473Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679282560228464:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.393511Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.457007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.482612Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.507030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.535142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.562277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.592393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.621465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.664302Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.719639Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679282560229331:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.719711Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.719811Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679282560229336:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.719891Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679282560229338:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.719946Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.722793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:24.731515Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679282560229340:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:24.787967Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679282560229391:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:26.037397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.251054Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808406280, txId: 281474976715675] shutting down |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> KqpBatchDelete::SimpleOnePartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard-DiscardSelectIsOn [GOOD] Test command err: Trying to start YDB, gRPC: 25752, MsgBus: 26455 2026-01-07T17:53:15.932482Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679243240501305:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:15.932530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:16.179571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:16.183325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:16.183454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:16.244462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:16.257940Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:16.415060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:16.419076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:16.419098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:16.419112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:16.419192Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:16.946474Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:16.968013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:16.973800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:17.019358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.112644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.249788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.336410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.985355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679256125405031:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.985500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.985872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679256125405041:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.985937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.279245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.312518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.340967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.373770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.402319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.434824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.465651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.532495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.596891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679260420373206:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.596960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.597036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679260420373211:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.597116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679260420373213:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.597156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.600667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:19.610034Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679260420373215:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:19.693421Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679260420373266:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:20.932624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679243240501305:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:20.932685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27890, MsgBus: 25260 2026-01-07T17:53:21.977663Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679271057559065:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:21.977718Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:21.993691Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:22.054270Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:22.055659Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679271057559037:2081] 1767808401976732 != 1767808401976735 2026-01-07T17:53:22.093676Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:22.093757Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:22.098853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:22.145922Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:22.145944Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:22.145951Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:22.146030Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:22.164591Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:22.502946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:22.511680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.568043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.697869Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.755020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.019922Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:24.717289Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679283942462786:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.717369Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.717569Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679283942462795:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.717612Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.771456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.794913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.822697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.848862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.875607Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.904297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.933041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.971942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.033417Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288237430963:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.033506Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.033528Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288237430968:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.033663Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288237430970:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.033715Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.036161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:25.043501Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679288237430971:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:25.145520Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679288237431023:3759] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:26.469088Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592679292532398618:2533], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2026-01-07T17:53:26.469540Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZGMxNTM5ZWYtNDU1N2IzYzctNjg1N2I2YTctNjQyNjFhMjE=, ActorId: [2:7592679292532398611:2529], ActorState: ExecuteState, LegacyTraceId: 01kecsf4vhe5fvqq90fstjxab2, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 13 } message: "DISCARD not supported in YDB queries" end_position { row: 2 column: 13 } issue_code: 2008 severity: 1 } } tx_id# trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpBatchDelete::SimplePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 30714, MsgBus: 15633 2026-01-07T17:53:16.609788Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679247803262006:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:16.609863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:16.639777Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:16.930099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:16.930223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:16.938718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:16.988291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:17.087244Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679247803261879:2081] 1767808396568453 != 1767808396568456 2026-01-07T17:53:17.136599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:17.235038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:17.241524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:17.241550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:17.241557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:17.241636Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:17.620023Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:17.655985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:17.735195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.832957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.967558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.022446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.576348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679260688165643:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.576476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.576859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679260688165653:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.576946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.851630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.876311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.898555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.924427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.950007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.981841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.011633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.048720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.112789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679264983133819:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.112861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.113090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679264983133824:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.113128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679264983133825:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.113139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.115751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:20.124020Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679264983133828:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:20.182150Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679264983133879:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:21.544060Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-07T17:53:21.573701Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:53:21.573899Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168 ... ZjAtMzFkMWYyNGM=, ActorId: [1:7592679269278101477:2534], ActorState: ExecuteState, LegacyTraceId: 01kecsf018a6sme4cy2vfg15w1, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/Test`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id#
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 2026-01-07T17:53:21.615306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679247803262006:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:21.615391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16280, MsgBus: 28870 2026-01-07T17:53:22.441659Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679275991302951:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:22.441738Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:22.454366Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:22.533671Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:22.561772Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:22.561851Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:22.565642Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:22.612306Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:22.612328Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:22.612340Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:22.612421Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:22.619602Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:22.914618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:22.927822Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.975054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.105000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.163276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.446134Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:24.885059Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679284581239354:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.885155Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.885390Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679284581239364:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.885438Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:24.940643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.968565Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.993416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.017394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.045327Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.073906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.100444Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.138994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.218243Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288876207528:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.218314Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.218320Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288876207533:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.218502Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288876207535:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.218550Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.221745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:25.230052Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679288876207536:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:25.331796Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679288876207588:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> KqpScripting::ScanQueryDisable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 8795, MsgBus: 1968 2026-01-07T17:53:16.502875Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679248896307856:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:16.510973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:16.832505Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:16.858146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:16.858254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:16.928176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:16.994402Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:16.998954Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679248896307810:2081] 1767808396476331 != 1767808396476334 2026-01-07T17:53:17.086539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:17.095490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:17.095534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:17.095542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:17.095626Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:17.523327Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:17.548836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:17.559061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:17.620624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.749303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.861370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.925431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.613531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679261781211573:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.613621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.613883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679261781211582:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.613926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.921252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.945874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.971888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.997835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.023559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.050775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.078399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.129138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.181433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679266076179743:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.181500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.181526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679266076179748:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.181623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679266076179750:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.181649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.184226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:20.192335Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679266076179751:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:20.273857Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679266076179803:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:21.501549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679248896307856:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:21.501616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28361, MsgBus: 12919 2026-01-07T17:53:22.883969Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679275340357516:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:22.884038Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:22.905038Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:22.978776Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:22.978857Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:22.991749Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679275340357488:2081] 1767808402883109 != 1767808402883112 2026-01-07T17:53:23.006987Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:23.015838Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:23.066644Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:23.066667Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:23.066675Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:23.066751Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:23.228916Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:23.466548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:23.477161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.522832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.658632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.737070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.893218Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:25.579036Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288225261246:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.579117Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.579326Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288225261255:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.579373Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.629298Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.677950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.703452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.726307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.749572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.775983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.800230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.835343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.893119Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288225262123:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.893213Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.893223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288225262128:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.893411Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288225262130:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.893484Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.895948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:25.905288Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679288225262131:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:25.963023Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679288225262185:3759] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[#]] |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 1604, MsgBus: 17394 2026-01-07T17:53:16.023247Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679248389252944:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:16.023305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:16.293449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:16.293581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:16.361325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:16.370540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:16.370713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:16.481535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:16.481556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:16.481562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:16.481650Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:16.551442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:16.984406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:16.995222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:17.042369Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:17.066460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.208253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.354378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.418375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.288445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679261274156660:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.288575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.288864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679261274156670:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.288902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.619397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.647642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.670891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.697110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.723112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.751878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.781454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.819841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.876107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679261274157537:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.876185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.876261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679261274157542:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.876376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679261274157543:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.876421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.879302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:19.888670Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679261274157546:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:19.992299Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679261274157597:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:21.023631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679248389252944:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:21.024984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:21.935610Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808401954, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 27410, MsgBus: 18991 2026-01-07T17:53:22.818767Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679275412265095:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:22.818825Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:22.830678Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:22.895957Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:22.933564Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:22.933659Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:22.938444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:22.984846Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:22.984872Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:22.984879Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:22.984977Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:23.001820Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:23.333419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:23.341432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.397127Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.509525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.616550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.824523Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:25.565510Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288297168801:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.565612Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.565855Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288297168810:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.565909Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.615233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.638429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.661903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.685922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.708577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.733871Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.759605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.792781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.875768Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288297169680:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.875845Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288297169685:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.875849Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.876032Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679288297169687:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.876060Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.879087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:25.888537Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679288297169688:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:25.958293Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679288297169740:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:27.662540Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808407694, txId: 281474976710673] shutting down 2026-01-07T17:53:27.808555Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808407841, txId: 281474976710675] shutting down 2026-01-07T17:53:27.819059Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679275412265095:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:27.819124Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpBatchUpdate::SimpleOnePartition |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 23566, MsgBus: 5765 2026-01-07T17:53:15.585856Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679242510014698:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:15.585905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:15.802135Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:15.826360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:15.826505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:15.876901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:15.952140Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:15.968349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:16.014862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:16.014890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:16.014898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:16.015011Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:16.472842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:16.489507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:16.537773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.596474Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:16.707229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.880785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.987940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.530115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255394918413:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.530227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.530535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255394918423:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.530618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.818802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.846746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.870137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.894105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.924171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.959282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.016521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.056836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.124228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259689886588:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.124303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.124325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259689886593:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.124458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259689886595:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.124492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.127775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:19.137817Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679259689886596:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:19.220559Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679259689886648:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:20.586237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679242510014698:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:20.586335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:20.961336Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808400967, txId: 281474976710673] shutting down 2026-01-07T17:53:21.287341Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808401 ... e_cache_undelivered_message; 2026-01-07T17:53:22.844311Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:22.904916Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:22.950334Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:22.950414Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:22.952492Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:23.016153Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:23.016194Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:23.016202Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:23.016283Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:23.068487Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:23.437467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:23.454498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.504447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.635415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.696126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.845346Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:25.635345Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679286525108838:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.635411Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.636181Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679286525108857:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.636230Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.637181Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679286525108876:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.637221Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.657292Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.686093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.714848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.743865Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.773504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.801313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.826255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.866227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:25.938216Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679286525109720:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.938286Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.938292Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679286525109725:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.938479Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679286525109727:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.938524Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:25.941902Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:25.952517Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679286525109728:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:26.022983Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679290820077076:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:27.833066Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679273640205124:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:27.833141Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:27.882197Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808407918, txId: 281474976710673] shutting down 2026-01-07T17:53:28.305743Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808408331, txId: 281474976710675] shutting down |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 4593, MsgBus: 12361 2026-01-07T17:53:16.380913Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679246716518812:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:16.380987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:16.692825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:16.780720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:16.780840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:16.791665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:16.804181Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:16.816069Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679246716518771:2081] 1767808396379290 != 1767808396379293 2026-01-07T17:53:16.929596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:16.929619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:16.929629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:16.929690Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:16.948484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:17.365819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:17.378245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:17.406611Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:17.419152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.520467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.659364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.724859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.518815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259601422551:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.518931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.519263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259601422561:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.519324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.780388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.807167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.835792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.860854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.885787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.913471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.983226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.022370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.104055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679263896390733:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.104140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.104175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679263896390738:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.104296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679263896390740:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.104332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.107380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:20.115473Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679263896390741:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:20.201196Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679263896390793:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:21.383510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679246716518812:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:21.383575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:22.167747Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808402171, ... ]; 2026-01-07T17:53:22.961200Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:22.974978Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:23.039495Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:23.040954Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679273954082573:2081] 1767808402960693 != 1767808402960696 2026-01-07T17:53:23.061888Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:23.061972Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:23.071715Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:23.121827Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:23.121860Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:23.121867Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:23.121979Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:23.254352Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:23.474283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:23.490542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.541032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.659539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.705364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.967812Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:26.018663Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291133953629:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.018751Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.019032Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291133953639:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.019084Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.079423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.110514Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.142623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.171273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.201827Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.235328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.272272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.314292Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.398691Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291133954509:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.398786Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.398790Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291133954514:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.399053Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291133954516:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.399118Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.402352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:26.413420Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679291133954517:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:26.488700Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679291133954569:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:27.961408Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679273954082599:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:27.961488Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:28.154847Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808408191, txId: 281474976710673] shutting down 2026-01-07T17:53:28.385537Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808408408, txId: 281474976710675] shutting down 2026-01-07T17:53:28.621249Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808408639, txId: 281474976710677] shutting down |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates >> KqpBatchDelete::Large_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 9982, MsgBus: 11024 2026-01-07T17:53:16.286924Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679249649959660:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:16.288731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:16.529217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:16.578178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:16.578485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:16.657848Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:16.661198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:16.855253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:16.858111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:16.858136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:16.858147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:16.858235Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:17.289185Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:17.300876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:17.355003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.499544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.641349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.707704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.590583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679262534863295:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.590708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.591032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679262534863305:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.591095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.860909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.883286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.906551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.931893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.957979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.983677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.013760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.088903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:20.145899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679266829831467:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.145975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.146042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679266829831472:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.146096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679266829831474:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.146143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:20.148728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:20.156827Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679266829831476:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:20.251815Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679266829831527:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:21.280781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679249649959660:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:21.280844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:22.259230Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808402283, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 5418, MsgBus: 2971 2026-01-07T17:53:23.064287Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679278169087279:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:23.064518Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:23.082264Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:23.152453Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:23.153585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:23.153658Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:23.153983Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679278169087255:2081] 1767808403063347 != 1767808403063350 2026-01-07T17:53:23.184108Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:23.248006Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:23.248029Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:23.248035Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:23.248112Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:23.352776Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:23.622937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:23.628564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:23.645848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.699102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.829103Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:23.896499Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.070673Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:26.020042Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291053991004:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.020138Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.020382Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291053991013:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.020427Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.078621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.105556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.132350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.158624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.185396Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.211360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.241975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.337998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.422856Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291053991883:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.422938Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291053991888:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.422942Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.423165Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679291053991890:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.423220Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.426240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:26.436605Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679291053991891:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:26.490759Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679291053991943:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:28.068609Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679278169087279:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:28.069603Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:28.567584Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808408597, txId: 281474976710673] shutting down |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpBatchDelete::TableNotExists [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> KqpScripting::SecondaryIndexes [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableNotExists [GOOD] Test command err: Trying to start YDB, gRPC: 16098, MsgBus: 12356 2026-01-07T17:53:26.997035Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679289440320488:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.997087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.343715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.343834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.345054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.402253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.488304Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.646439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.741733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.741757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.741762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.741831Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.016307Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.411624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.417684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:30.252782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306620190534:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.252784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306620190525:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.252888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.253223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306620190540:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.253270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.257410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:30.267867Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679306620190539:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:53:30.373784Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679306620190592:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:30.696508Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679306620190609:2334], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiDeleteTable!
:2:35: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:53:30.697050Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWVlZDM4ZWYtNzk1Y2NlZTItOWYyMmM5OTItNmI5NDYyNzc=, ActorId: [1:7592679306620190521:2322], ActorState: ExecuteState, LegacyTraceId: 01kecsf8maek8zp2hnvebaqyes, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 35 } message: "At function: KiDeleteTable!" end_position { row: 2 column: 35 } severity: 1 issues { position { row: 2 column: 35 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 35 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:53:30.849266Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679306620190627:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:41: Error: At function: KiDeleteTable!
:3:41: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:53:30.849700Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWVlZDM4ZWYtNzk1Y2NlZTItOWYyMmM5OTItNmI5NDYyNzc=, ActorId: [1:7592679306620190521:2322], ActorState: ExecuteState, LegacyTraceId: 01kecsf92t9txhfxs066dkmq7r, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 41 } message: "At function: KiDeleteTable!" end_position { row: 3 column: 41 } severity: 1 issues { position { row: 3 column: 41 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 41 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates >> KqpBatchDelete::Large_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:21.660947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:21.661042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:21.661149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:21.661198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:21.661229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:21.661291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:21.661375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:21.662273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:21.663515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:21.763795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:21.763856Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:21.778410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:21.778771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:21.778964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:21.785527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:21.785854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:21.786534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:21.786816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:21.796121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.798053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:21.817875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:21.817962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:21.818090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:21.818142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:21.818266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:21.819661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:21.995897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.996756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.996861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:21.997969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 57594046678944, cookie: 175 2026-01-07T17:53:31.167442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:31.167472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-07T17:53:31.167504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-07T17:53:31.167537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:53:31.168431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:31.168510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-07T17:53:31.168538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-07T17:53:31.168566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-07T17:53:31.168595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 6 2026-01-07T17:53:31.168657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2026-01-07T17:53:31.171068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:53:31.171160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:53:31.171193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:53:31.171219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:53:31.171584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-07T17:53:31.171630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 175:0 ProgressState 2026-01-07T17:53:31.171707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-07T17:53:31.171734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:31.171765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-07T17:53:31.171790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:31.171818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2026-01-07T17:53:31.171889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-07T17:53:31.171922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-07T17:53:31.171949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-07T17:53:31.172094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 5 2026-01-07T17:53:31.173898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:31.174039Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 0 TabletID: 72075186233409619 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 0 TabletID: 72075186233409619 2026-01-07T17:53:31.174959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2026-01-07T17:53:31.175203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 4 Forgetting tablet 72075186233409619 2026-01-07T17:53:31.175890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6958: Transaction 281474976840657 reset current state at schemeshard 72075186233409618 because pipe to tablet 72075186233409619 disconnected 2026-01-07T17:53:31.175938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6987: Pipe attached message is found and resent into the new pipe, opId:281474976840657:4294967295, dst tableId: 72075186233409619, msg type: 269090816, msg cookie: 0:281474976840657, at schemeshardId: 72075186233409618 2026-01-07T17:53:31.176173Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 0 TabletID: 72075186233409618 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 0 TabletID: 72075186233409618 2026-01-07T17:53:31.176302Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 0 TabletID: 72075186233409620 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 0 TabletID: 72075186233409620 2026-01-07T17:53:31.177506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:31.201482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2026-01-07T17:53:31.201783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-07T17:53:31.203402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2026-01-07T17:53:31.203637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 Forgetting tablet 72075186233409618 Forgetting tablet 72075186233409620 2026-01-07T17:53:31.204741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-07T17:53:31.206262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:31.206310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-07T17:53:31.206415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-07T17:53:31.206953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:31.206989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-07T17:53:31.207046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:31.209799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:74 2026-01-07T17:53:31.209847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2026-01-07T17:53:31.209942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:73 2026-01-07T17:53:31.209965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2026-01-07T17:53:31.210109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:75 2026-01-07T17:53:31.210145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2026-01-07T17:53:31.211971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:53:31.212088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-07T17:53:31.213275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-07T17:53:31.213312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-07T17:53:31.214532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-07T17:53:31.214657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-07T17:53:31.214686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:8301:9328] TestWaitNotification: OK eventTxId 175 |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> KqpBatchUpdate::TableWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 12180, MsgBus: 27047 2026-01-07T17:53:16.164503Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679247086218044:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:16.164553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:16.201990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:16.467895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:16.473204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:16.473292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:16.485200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:16.603577Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679247086217817:2081] 1767808396135141 != 1767808396135144 2026-01-07T17:53:16.610566Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:16.702626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:16.702647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:16.702653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:16.702742Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:16.717354Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:17.141582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:17.146401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:17.166242Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:17.219543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.352722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.515017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.579229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.201120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259971121589:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.201404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.201815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259971121599:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.201878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.578180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.607063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.630466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.654730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.678195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.706619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.735959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.781375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.855377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259971122465:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.855434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.855554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259971122470:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.855580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679259971122471:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.855637Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:19.858745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:19.866563Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679259971122474:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:19.936072Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679259971122525:3779] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:21.165598Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679247086218044:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:21.165660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:23.795977Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:23.837821Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:23.837907Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:23.858754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:23.885996Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:23.886019Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:23.886027Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:23.886108Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:23.986765Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:24.219178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:24.233351Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.278787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.389154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.435980Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:24.734578Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:26.440463Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679289702746892:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.440530Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.440818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679289702746902:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.440893Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.484833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.519903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.554364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.585112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.615719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.648094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.679771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.725502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:26.797155Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679289702747771:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.797236Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.797253Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679289702747776:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.797370Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679289702747778:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.797412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:26.800719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:26.809596Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679289702747779:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:26.905507Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679289702747831:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:28.540327Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.601195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.663648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.732007Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679276817843179:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:28.732842Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpBatchUpdate::UpdateOn >> KqpBatchUpdate::NotIdempotent [GOOD] >> KqpBatchDelete::Returning [GOOD] >> KqpBatchUpdate::Returning [GOOD] >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] >> KqpBatchUpdate::HasTxControl [GOOD] |95.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchDelete::DeleteOn [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 16129, MsgBus: 61529 2026-01-07T17:53:26.997153Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679292061039173:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.997239Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.316721Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.355133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.355248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.440566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.481143Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.497012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.742933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.742966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.742972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.743034Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.005538Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.428117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.509976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.695703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.843099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.929679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.305771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309240910197:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.305887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.306308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309240910207:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.306390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.777841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.815331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.841792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.918009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.955570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.030671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.092267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.151171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.218652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313535878383:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.218735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.218797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313535878388:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.218971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313535878390:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.219017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.222845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.233892Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679313535878392:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:31.294221Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679313535878443:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:32.001268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679292061039173:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.001328Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.961228Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679317830846043:2537], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with RETURNING 2026-01-07T17:53:32.962394Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=M2Q4ODkyMTktNmIyYzZkZWUtZTI2MTczYTctZmExYTRkZjM=, ActorId: [1:7592679317830846034:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfb853b6x4ya7wgsme7bq, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH DELETE is unsupported with RETURNING" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent [GOOD] Test command err: Trying to start YDB, gRPC: 16843, MsgBus: 30434 2026-01-07T17:53:26.998256Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679292569182627:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.998321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.377110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.384338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.388469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.462908Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.474795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.645186Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.741587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.741632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.741638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.741705Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.017211Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.439444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.543951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.723205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.899711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.997315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.310091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309749053652:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.310232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.310707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309749053662:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.310765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.783146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.812103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.841281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.875677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.906633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.946194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.981800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.058561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.144717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314044021835:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.144801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314044021841:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314044021840:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.149307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.162060Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679314044021844:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:31.229741Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679314044021895:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:32.001118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679292569182627:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.002468Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.974353Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679318338989493:2537], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2026-01-07T17:53:32.976524Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjM5ODgxYzItMmFlOTgxZDgtMzMwZDgyMTgtYzE5MDNkMjI=, ActorId: [1:7592679318338989484:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfb7yd4ff100700z89wha, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 43 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 43 } severity: 1 } } tx_id# trace_id# 2026-01-07T17:53:33.005495Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679318338989497:2539], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2026-01-07T17:53:33.005850Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjM5ODgxYzItMmFlOTgxZDgtMzMwZDgyMTgtYzE5MDNkMjI=, ActorId: [1:7592679318338989484:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfb9saca4sn8wn34akmcs, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 43 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 43 } severity: 1 } } tx_id# trace_id# 2026-01-07T17:53:33.031840Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679322633956801:2543], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:51: Error: Batch update is only supported for idempotent updates. 2026-01-07T17:53:33.032289Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjM5ODgxYzItMmFlOTgxZDgtMzMwZDgyMTgtYzE5MDNkMjI=, ActorId: [1:7592679318338989484:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfbas8ze8y90wmq12nze7, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 51 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 51 } severity: 1 } } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> KqpBatchUpdate::ManyPartitions_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 16214, MsgBus: 5668 2026-01-07T17:53:26.996970Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679293598496622:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.997048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.289213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.345037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.345146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.352038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.457061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.525015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.741760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.741795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.741802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.741895Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.009665Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.427190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.438651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.520388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.674260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.890476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.955547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.255506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310778367645:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.255630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.256006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310778367655:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.256065Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.797143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.872756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.906513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.947083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.979280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.015917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.070305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.123183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.193907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679315073335826:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.193960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.193983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679315073335831:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.194166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679315073335833:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.194214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.197825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.206116Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679315073335835:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:31.305656Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679315073335888:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:31.997189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679293598496622:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:31.997261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.961890Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679319368303484:2537], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with RETURNING 2026-01-07T17:53:32.963026Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjlhYWY1YS1iNmFlZDY2MC1mMDgzMGZiYi05M2Q1YTEzOQ==, ActorId: [1:7592679319368303475:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfb8e0svyd4rqjjr2hn57, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH UPDATE is unsupported with RETURNING" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 2429, MsgBus: 31229 2026-01-07T17:53:26.996977Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679289408281000:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.997043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.046894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:27.400051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.400164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.405108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.463526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.523604Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.670808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.741810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.741834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.741846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.741919Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.013261Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.441630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.447540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.507462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.675463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.841686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.918850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.290130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306588152029:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.290261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.290573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306588152039:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.290645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.783002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.822021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.857666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.891605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.925879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.970834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.011555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.061706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.164473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310883120207:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.164539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.164758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310883120212:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.164792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310883120213:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.164839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.168612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.180662Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679310883120216:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:31.257673Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679310883120267:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:31.997434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679289408281000:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:31.997496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:33.133615Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OGEwNzAyMzAtZmFlM2I4YzAtY2ZmM2JkOGEtNGNmYTIzZTY=, ActorId: [1:7592679315178087856:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfb89c7bkfgrr54kh3dnd, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode. status# PRECONDITION_FAILED issues# trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 61587, MsgBus: 4692 2026-01-07T17:53:27.003820Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679296850061611:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:27.003893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.337285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.399463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.399556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.522432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.524618Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.527867Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679292555094213:2081] 1767808406994758 != 1767808406994761 2026-01-07T17:53:27.545461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.741552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.741584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.741593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.741661Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.032832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.467864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.473305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.535696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.723919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.918400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.009253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.562173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309734965267:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.562286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.562846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309734965277:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.562900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.891532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.969017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.000486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.038292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.079994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.146635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.176363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.212822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.270692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314029933453:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.270756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.270761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314029933458:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.270964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314029933460:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.271007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.273809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.282584Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679314029933461:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:31.368638Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679314029933513:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:32.004383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679296850061611:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.004454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:33.202627Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679322619868377:2528], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-07T17:53:33.205173Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YjBkYjBiMDYtYTMwOWY5NjgtYzhlNjQ3ZGUtODQwNjk4YmM=, ActorId: [1:7592679322619868368:2522], ActorState: ExecuteState, LegacyTraceId: 01kecsfbez1krenv7dmy69vm71, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 2796, MsgBus: 1559 2026-01-07T17:53:27.001928Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679295169291036:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:27.002179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.381100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.385370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.385493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.527418Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.554274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.623127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.741591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.741617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.741622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.741679Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.017098Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.475910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.540116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.723190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.878518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.962455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.440127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679308054194692:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.440248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.440549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679308054194702:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.440613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.777471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.815218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.847985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.877825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.913216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.952569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.010764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.065305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.144364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679312349162874:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.144426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679312349162879:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679312349162880:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.149399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.160041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:53:31.164322Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679312349162883:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:31.241570Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679312349162934:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:32.002094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679295169291036:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.003577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:33.143635Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679320939097797:2528], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-07T17:53:33.144034Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTgwODEwZjktNzAxMTZhNWUtM2NiMmIwOTktMTM2MTNhYWI=, ActorId: [1:7592679316644130490:2521], ActorState: ExecuteState, LegacyTraceId: 01kecsfbb9bm3xrz6gxhd5q4nb, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DeleteOn [GOOD] Test command err: Trying to start YDB, gRPC: 63164, MsgBus: 24357 2026-01-07T17:53:27.949799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679296093009172:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:27.949866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:28.277969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:28.405726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:28.405813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:28.428917Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:28.435139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:28.517472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:28.562659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:28.562681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:28.562695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:28.562801Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.968239Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:29.097332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:29.106146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:29.164235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.296089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.450720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.522773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.153869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313272880193:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.153977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.156418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313272880203:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.156486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.373151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.405743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.433336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.462531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.494699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.531029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.569202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.613596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.691417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313272881069:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.691513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.691570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313272881074:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.691802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313272881076:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.691843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.695314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.706020Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679313272881077:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:31.810191Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679313272881129:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:32.950596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679296093009172:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.950688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:33.461756Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679321862816027:2537], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with ON 2026-01-07T17:53:33.462110Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDk3OGM1MzYtOWRmODkwNGUtNjhlZmYxMDItNzQxYzk5NGU=, ActorId: [1:7592679321862816017:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfbr372wcwd0chhn0vjbb, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH DELETE is unsupported with ON" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 >> KqpBatchUpdate::ColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 25929, MsgBus: 18839 2026-01-07T17:53:26.996967Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679289366184863:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.997047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.330571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.354981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.355056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.383861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.454665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.547292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.745568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.745599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.745605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.745677Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.013083Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.411208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.417788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:30.181994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306546054903:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.182124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.182361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306546054913:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.182536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.182590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306546054917:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.190754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:30.200920Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679306546054919:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:53:30.327200Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679306546054970:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:30.825406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:53:31.376391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:53:31.376398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:53:31.376645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:53:31.376707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:53:31.376967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:53:31.376992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:53:31.377116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:53:31.377142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:53:31.377258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:53:31.377264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:53:31.377375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:53:31.377397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:53:31.377482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:53:31.377487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:53:31.377627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:53:31.377647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:53:31.377820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:53:31.377825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:53:31.377950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:53:31.377959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:53:31.378071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7592679310841022777:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:53:31.378073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7592679310841022770:2347];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Restore ... pp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.120835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.120909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.120936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.125272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.125352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.125369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.128702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.128768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.128781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.131885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.131949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.131963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.134400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.134445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.134459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.139444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.139524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.139541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.140601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.140670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.140684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.147123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.147170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.147197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.147231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.147283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.147309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.153830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.153871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.153882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.160520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.160580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.160595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.167959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.168012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.168027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.175384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.175452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.175469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.182916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.182988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.183013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.190531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.190594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.190610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:33.888339Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZTFmYTRjNzItNDEyOGMzNDAtODY1MTM2YWEtMzQ3Mzg1ZWM=, ActorId: [1:7592679306546054899:2322], ActorState: ExecuteState, LegacyTraceId: 01kecsfbw77mjyn930mxgn7x1a, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time. status# PRECONDITION_FAILED issues# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 16802, MsgBus: 21969 2026-01-07T17:53:18.563245Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679256084000155:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:18.563788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:18.791934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:18.792077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:18.814097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:18.841113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:18.884614Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679256084000122:2081] 1767808398561564 != 1767808398561567 2026-01-07T17:53:18.886019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:18.936019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:18.936055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:18.936067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:18.936169Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:19.121388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:19.313123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:19.374037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.482555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.582714Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:19.624735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:19.684715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:21.379086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679268968903881:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:21.379203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:21.380101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679268968903891:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:21.380194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:21.758062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:21.787479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:21.816438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:21.843406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:21.872143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:21.904708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:21.949771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:21.995399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:22.080330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679273263872062:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.080415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.080545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679273263872067:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.080602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679273263872069:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.080629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:22.083739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:22.092805Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679273263872071:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:22.172383Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679273263872122:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:23.567927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679256084000155:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:23.571974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:23.672772Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 1ms, session id ydb://session/3?node_id=1&id=Nzg1YjlkNzUtYTc1NzQyY2YtYmM3YWNiYjYtNmE1ZWFhZDE= } 2026-01-07T17:53:23.684511Z node 1 :KQP_SESSION ... _RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808407218, txId: 281474976710743] shutting down Trying to start YDB, gRPC: 64211, MsgBus: 31561 2026-01-07T17:53:28.287621Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679298605853228:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:28.287665Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:28.321425Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:28.401029Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:28.409003Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679298605853198:2081] 1767808408286359 != 1767808408286362 2026-01-07T17:53:28.416392Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:28.416475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:28.427913Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:28.521960Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:28.521986Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:28.522001Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:28.522091Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.613983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:29.027361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:29.044843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:29.066028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.197699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.313944Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:29.355884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.426140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.808022Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679311490756946:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.808093Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.808495Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679311490756956:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.808538Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.858751Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.908536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.937249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.966011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.991723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.021689Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.054645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.113494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.193619Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679315785725125:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.193696Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.193773Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679315785725130:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.193892Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679315785725132:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.193933Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.197487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:32.207873Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679315785725134:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:32.282445Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679315785725185:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:33.290037Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679298605853228:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:33.290132Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates >> KqpBatchDelete::HasTxControl >> KqpBatchDelete::UnknownColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 28137, MsgBus: 26490 2026-01-07T17:53:28.585376Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679300289256408:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:28.587311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:28.959745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:28.959853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:28.960458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:28.963539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:29.047051Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679300289256344:2081] 1767808408577382 != 1767808408577385 2026-01-07T17:53:29.047456Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:29.151056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:29.151082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:29.151089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:29.151170Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:29.263712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:29.594540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:29.623149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:29.639723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:31.780768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313174159128:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.780769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313174159123:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.780878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.781285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313174159138:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.781360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.784399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.794401Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679313174159137:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:53:31.895391Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679313174159190:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:32.152718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:53:32.688690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:53:32.688920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:53:32.689284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:53:32.689410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:53:32.689514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:53:32.689612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:53:32.689698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:53:32.689798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:53:32.689907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:53:32.690033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:53:32.690148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:53:32.690275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:53:32.690385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7592679317469127022:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:53:32.730148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592679317469127025:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:53:32.730274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592679317469127025:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:53:32.730469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592679317469127025:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:53:32.730592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592679317469127025:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:53:32.730696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592679317469127025:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:53:32.730782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592679317469127025:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:53:32.730879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592679317469127025:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:53:32.730971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7592679317469127025:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:53:32.731061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:84 ... pp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.480583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.480642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.480655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.488906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.488944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.488981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.488986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.488995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.488999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.498134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.498160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.498207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.498215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.498223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.498233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.506391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.506474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.506492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.507505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.507581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.507602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.514338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.514433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.514451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.515506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.515575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.515595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.520593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.520684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.520698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.522129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.522205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.522224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.526416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.526466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.526478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.527418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.527461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.527474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.531793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.531847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.531859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.534303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.534365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:34.534375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-07T17:53:35.174277Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NDEzNGRkMzctMTAxOTAwZmMtMmZiYWNlNmUtNDA3ODE4Nzg=, ActorId: [1:7592679313174159119:2322], ActorState: ExecuteState, LegacyTraceId: 01kecsfd3pa7b4z9ddgax531pb, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time. status# PRECONDITION_FAILED issues# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates >> KqpBatchDelete::ManyPartitions_3 |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableNotExists >> KqpBatchUpdate::Large_1 >> KqpBatchDelete::TableWithIndex [GOOD] >> KqpBatchDelete::MultiStatement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 14997, MsgBus: 31699 2026-01-07T17:53:31.365668Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679315112545770:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:31.365738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:31.601034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:31.601924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:31.602008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:31.620485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:31.701091Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679315112545741:2081] 1767808411364400 != 1767808411364403 2026-01-07T17:53:31.708210Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:31.771625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:31.771650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:31.771687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:31.771806Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:31.821970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:32.213075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:32.217827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:32.263789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.374982Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:32.401792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.551419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.619092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.357390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679327997449516:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.357488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.357897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679327997449525:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.357967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.675446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.706886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.737279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.769671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.800853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.837789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.877258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.928468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:35.005026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679332292417692:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.005095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.005103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679332292417697:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.005237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679332292417699:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.005267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.008582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:35.022066Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679332292417700:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:35.095866Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679332292417752:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:36.369080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679315112545770:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:36.369169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:36.827497Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679336587385320:2527], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-07T17:53:36.828063Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjkwMGM5ODAtZTMxMjMzNTMtMzJmMjdjZi02OTI3ODEyMA==, ActorId: [1:7592679336587385311:2521], ActorState: ExecuteState, LegacyTraceId: 01kecsfeypcccd765p4th2c1hd, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 5836, MsgBus: 7242 2026-01-07T17:53:27.005243Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679296816744976:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:27.006700Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.253035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.347313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.347410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.432431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.469086Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.511466Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.741339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.741427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.741435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.741519Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.014885Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.415754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.544251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.732970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.912378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.012510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.309892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309701648685:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.310020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.310466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309701648695:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.310526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.777346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.811345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.842571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.880320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.911538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.951192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.998911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.089095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.180064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313996616862:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.180138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.180188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313996616867:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.180317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313996616869:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.180355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.183509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.193819Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679313996616871:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:31.293787Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679313996616924:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:32.004552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679296816744976:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.004628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.965607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.003913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.046629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.996138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UnknownColumn |95.8%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchUpdate::MultiStatement >> KqpBatchUpdate::UpdateOn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 30089, MsgBus: 10259 2026-01-07T17:53:32.534382Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679318159594503:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.534534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:32.728637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:32.820697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:32.820800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:32.822471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:32.855417Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:32.856220Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679318159594477:2081] 1767808412532208 != 1767808412532211 2026-01-07T17:53:32.893383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:32.950264Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:32.950286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:32.950304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:32.950421Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:33.360191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:33.429239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.547659Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:33.608680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.760671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.836691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:35.687842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679331044498233:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.687976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.688310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679331044498243:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.688396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.034480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.072549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.107416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.140342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.173802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.209455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.268620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.308874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.397349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679335339466406:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.397462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.397822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679335339466411:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.397860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679335339466412:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.398185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.402609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:36.415021Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679335339466415:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:36.503504Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679335339466466:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:37.534558Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679318159594503:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:37.534645Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 30162, MsgBus: 17249 2026-01-07T17:53:33.049382Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679321982710661:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:33.050768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:33.103474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:33.332834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:33.351384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:33.351470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:33.354058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:33.429102Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:33.433072Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679321982710567:2081] 1767808413045312 != 1767808413045315 2026-01-07T17:53:33.493438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:33.493476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:33.493488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:33.493606Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:33.548733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:33.960487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:34.050115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.059254Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:34.212291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.370770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.435809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.289991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679334867614332:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.290149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.294090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679334867614342:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.294229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.621798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.656577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.687791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.728257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.757225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.795182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.849525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.891315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.972329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679334867615219:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.972415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.972708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679334867615224:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.972777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679334867615225:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.972841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.977207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:36.988239Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679334867615228:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:37.083335Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679339162582577:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:38.049044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679321982710661:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:38.049120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 9467, MsgBus: 9236 2026-01-07T17:53:33.782300Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679322406219967:2091];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:33.783385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:34.126567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:34.131566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:34.131673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:34.172684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:34.223879Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:34.226257Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679322406219914:2081] 1767808413779524 != 1767808413779527 2026-01-07T17:53:34.314343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:34.314370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:34.314376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:34.314461Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:34.352430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:34.765430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:34.787020Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:34.817982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.932062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:35.062829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:35.115069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.066905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679339586090971:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.066995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.067314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679339586090981:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.067376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.371251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.407464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.438704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.468630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.496187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.530066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.560350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.631708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.714643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679339586091853:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.714728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.717087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679339586091858:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.717170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679339586091859:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.717217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.722480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:37.742490Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679339586091862:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:37.843709Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679339586091915:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:38.784870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679322406219967:2091];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:38.784959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:39.482011Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679348176026810:2537], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with ON 2026-01-07T17:53:39.483679Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDg0NDE0OGMtNTRmMzYwODUtZWU2NGNjOTktMzU5MTZhMmI=, ActorId: [1:7592679348176026801:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfhma72k5ncgnxbqrrs46, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH UPDATE is unsupported with ON" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableNotExists [GOOD] >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableNotExists [GOOD] Test command err: Trying to start YDB, gRPC: 6830, MsgBus: 13147 2026-01-07T17:53:38.035379Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679341907200427:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:38.035432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:38.397058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:38.413818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:38.413940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:38.485897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:38.559991Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679341907200300:2081] 1767808418021278 != 1767808418021281 2026-01-07T17:53:38.567963Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:38.673682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:38.673709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:38.673715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:38.673810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:38.681703Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:39.047082Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:39.160656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:39.166535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:41.368988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679354792103081:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.369079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679354792103092:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.369163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.369886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679354792103096:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.369960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.373827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:41.387541Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679354792103095:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:53:41.481682Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679354792103148:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:41.819666Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679354792103157:2334], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At function: KiUpdateTable!
:3:34: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:53:41.820153Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDMwMzhlYzYtMjMxNDJkZTctNzVmNDhmYmUtMjBhYzIyYmM=, ActorId: [1:7592679354792103054:2321], ActorState: ExecuteState, LegacyTraceId: 01kecsfkfp7e6b3a86jze40ssz, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 34 } message: "At function: KiUpdateTable!" end_position { row: 3 column: 34 } severity: 1 issues { position { row: 3 column: 34 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 34 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:53:41.876763Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679354792103183:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:4:41: Error: At function: KiUpdateTable!
:4:41: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:53:41.879098Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDMwMzhlYzYtMjMxNDJkZTctNzVmNDhmYmUtMjBhYzIyYmM=, ActorId: [1:7592679354792103054:2321], ActorState: ExecuteState, LegacyTraceId: 01kecsfky68e1ye1c42y1980b1, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 41 } message: "At function: KiUpdateTable!" end_position { row: 4 column: 41 } severity: 1 issues { position { row: 4 column: 41 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 4 column: 41 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::UnknownColumn [GOOD] >> KqpBatchDelete::HasTxControl [GOOD] >> KqpBatchUpdate::TableWithIndex [GOOD] >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 11180, MsgBus: 13387 2026-01-07T17:53:37.491264Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679338076083968:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:37.491539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:37.528556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:37.827224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:37.827359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:37.830705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:37.863961Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:37.864244Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:37.866581Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679338076083845:2081] 1767808417485173 != 1767808417485176 2026-01-07T17:53:38.000336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:38.000360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:38.000370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:38.000467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:38.131373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:38.502876Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:38.511686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:38.520008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:38.602253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.775395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.947717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.029320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.842724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679350960987616:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.842846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.843249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679350960987626:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.843305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.139323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.170818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.206565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.234384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.262054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.294273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.329132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.382865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.465771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679355255955800:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.465856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.466171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679355255955805:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.466235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679355255955806:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.466372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.470226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:41.479731Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679355255955809:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:41.563666Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679355255955862:3784] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:42.490459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679338076083968:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:42.490530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:43.037888Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679359550923460:2537], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-07T17:53:43.038353Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTRiZmU4ZjUtMTBhNzA3NWEtMzg3NWNiNS00M2IzYTdlNg==, ActorId: [1:7592679359550923451:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfn2e2tcj7pnrrqa4vdm0, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 19222, MsgBus: 3229 2026-01-07T17:53:37.605307Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679337268422034:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:37.605429Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:37.825900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:37.907796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:37.907900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:37.913750Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679337268421821:2081] 1767808417588925 != 1767808417588928 2026-01-07T17:53:37.919290Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:37.926530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:38.013246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:38.013266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:38.013275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:38.013348Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:38.103337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:38.488058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:38.493882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:38.541849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.606695Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:38.714183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.886285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.954691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.867057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679350153325576:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.867173Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.867597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679350153325586:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.867639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.225698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.258441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.290940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.327323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.365606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.407676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.485381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.535637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.609281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679354448293757:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.609363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.610683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679354448293763:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.610732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.610733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679354448293762:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.615867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:41.625404Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679354448293766:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:41.708040Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679354448293819:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:42.603689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679337268422034:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:42.603775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:43.191440Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679363038228716:2537], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At lambda, At function: Coalesce
:3:37: Error: At function: ==
:3:23: Error: At function: Member
:3:23: Error: Member not found: UnknownColumn 2026-01-07T17:53:43.191883Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ODE5NzY1OWItYTczMDMzYmMtOTczMmIxOTMtODFlZGM1ZTE=, ActorId: [1:7592679363038228707:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfn7d4dm6fm4gfg8a34km, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 31 } message: "At lambda, At function: Coalesce" end_position { row: 2 column: 31 } severity: 1 issues { position { row: 3 column: 37 } message: "At function: ==" end_position { row: 3 column: 37 } severity: 1 issues { position { row: 3 column: 23 } message: "At function: Member" end_position { row: 3 column: 23 } severity: 1 issues { position { row: 3 column: 23 } message: "Member not found: UnknownColumn" end_position { row: 3 column: 23 } severity: 1 } } } } } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 1291, MsgBus: 1182 2026-01-07T17:53:37.556320Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679340160780806:2208];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:37.556372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:37.777261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:37.781749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:37.781861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:37.878958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:37.892955Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679340160780625:2081] 1767808417544271 != 1767808417544274 2026-01-07T17:53:37.905156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:37.933056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:38.091960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:38.091986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:38.091993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:38.092100Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:38.561550Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:38.628596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:38.636957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:38.714297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.843698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.012981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.081061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.936269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679353045684381:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.936437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.936896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679353045684391:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.936959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.249614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.285949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.318037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.352136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.384761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.432687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.471293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.518139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.608519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679357340652559:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.608590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.608682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679357340652564:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.609032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679357340652566:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.609111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.612147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:41.627717Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679357340652567:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:41.760526Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679357340652619:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:42.555078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679340160780806:2208];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:42.556554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:43.364177Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZmQzNDc1YWQtZWY3NjQ0My0zYWUwODU0ZC01YWNhNzNjNw==, ActorId: [1:7592679365930587507:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfnb0ex5fbnqmacqkxyqn, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode. status# PRECONDITION_FAILED issues# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 22693, MsgBus: 17925 2026-01-07T17:53:37.898501Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679336770358135:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:37.899831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:38.168512Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:38.203807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:38.203930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:38.253968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:38.275414Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:38.417340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:38.417358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:38.417363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:38.417431Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:38.452233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:38.903126Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:38.908157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:38.977249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.150012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.336041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.401820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.181958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679353950229057:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.182040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.182398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679353950229067:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.182471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.510409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.542383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.578724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.609236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.646771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.681662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.722587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.799642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.884437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679353950229938:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.884517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.884771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679353950229943:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.884816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679353950229944:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.884921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.888340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:41.900900Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679353950229947:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:41.962613Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679353950230000:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:42.897423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679336770358135:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:42.897512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:43.492274Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679362540164862:2527], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-07T17:53:43.492734Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Mzg0OWUzNzAtNjQ5ODYxY2YtODUxYmY3MWItNmUwZTEyOWM=, ActorId: [1:7592679362540164853:2521], ActorState: ExecuteState, LegacyTraceId: 01kecsfngm8ahqz7dpzc96rp26, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 14933, MsgBus: 15274 2026-01-07T17:53:33.601933Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679320044063020:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:33.601985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:33.837729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:33.848757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:33.848861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:33.909567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:33.952538Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:33.957056Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679320044062993:2081] 1767808413599370 != 1767808413599373 2026-01-07T17:53:34.020732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:34.020759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:34.020765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:34.020839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:34.033200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:34.390638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:34.431497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.570377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.630124Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:34.713130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.780456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.658699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679332928966757:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.658850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.659369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679332928966767:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.659446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.966090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.040903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.075746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.105985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.140608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.174311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.243283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.287228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.356891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679337223934941:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.356989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.357202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679337223934946:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.357202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679337223934947:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.357253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.361072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:37.371991Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679337223934950:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:37.432559Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679337223935001:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:38.601805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679320044063020:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:38.601884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:38.948030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.986211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.049921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.845850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TSchemeShardServerLess::StorageBilling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 15070, MsgBus: 11620 2026-01-07T17:53:38.934048Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679343381153352:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:38.934089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:39.237576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:39.271085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:39.271198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:39.316359Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679343381153318:2081] 1767808418927286 != 1767808418927289 2026-01-07T17:53:39.332874Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:39.334228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:39.453276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:39.455715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:39.455744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:39.455753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:39.455824Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:39.890843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:39.901628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:39.956510Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:39.968146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.086076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.233243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.299068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.196523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679360561024377:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.196652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.197001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679360561024387:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.197068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.453478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.487365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.516698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.547837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.583812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.619244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.653994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.698998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.774688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679360561025258:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.774786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.774945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679360561025263:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.775027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679360561025264:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.775084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.778869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:42.789592Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679360561025267:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:42.868255Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679360561025318:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:43.934275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679343381153352:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:43.934366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:44.500240Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679369150960217:2537], status: GENERIC_ERROR, issues:
:4:32: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:44.500559Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTljMmE2N2QtY2FiZThmMTItMmRlYjIwZGQtZDg1OGQ0OTM=, ActorId: [1:7592679369150960208:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfpf9djbj9fhtgkjjq7kq, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 32 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 32 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:44.517881Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679369150960221:2539], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:44.518184Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTljMmE2N2QtY2FiZThmMTItMmRlYjIwZGQtZDg1OGQ0OTM=, ActorId: [1:7592679369150960208:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfpht3jvda9t6ycbg3skc, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:44.534485Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679369150960225:2541], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:44.534931Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTljMmE2N2QtY2FiZThmMTItMmRlYjIwZGQtZDg1OGQ0OTM=, ActorId: [1:7592679369150960208:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfpjbb31fb7xr4j95n109, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:44.556571Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679369150960229:2543], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:44.556986Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTljMmE2N2QtY2FiZThmMTItMmRlYjIwZGQtZDg1OGQ0OTM=, ActorId: [1:7592679369150960208:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfpjv2aa7gxz800j4nbqx, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:44.573579Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679369150960233:2545], status: GENERIC_ERROR, issues:
:3:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:44.573849Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTljMmE2N2QtY2FiZThmMTItMmRlYjIwZGQtZDg1OGQ0OTM=, ActorId: [1:7592679369150960208:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfpkj8v7tmvrqey2ebth9, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 3 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 3 column: 29 } severity: 1 } tx_id# trace_id# |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement [GOOD] >> KqpBatchUpdate::UnknownColumn [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 22303, MsgBus: 10317 2026-01-07T17:53:40.311357Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679353023925270:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:40.311587Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:40.566264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:40.566399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:40.599029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:40.644585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:40.653304Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679353023925224:2081] 1767808420304851 != 1767808420304854 2026-01-07T17:53:40.657313Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:40.724151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:40.724177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:40.724184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:40.724271Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:40.886453Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:41.156293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:41.214832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.317120Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:41.374424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.520790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.587380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.481149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679365908828989:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:43.481265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:43.481544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679365908828999:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:43.481601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:43.877126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.905930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.933342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.962754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.993364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.026607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.059186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.098023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.175501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679370203797165:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.175580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.176235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679370203797169:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.176290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679370203797171:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.176323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.179515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:44.189511Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679370203797174:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:44.251031Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679370203797225:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:45.311239Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679353023925270:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:45.311304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:45.726129Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679374498764826:2537], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At lambda, At function: Coalesce
:4:41: Error: At function: ==
:4:27: Error: At function: Member
:4:27: Error: Member not found: UnknownColumn 2026-01-07T17:53:45.726629Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Zjc3N2E4ZTctYWNkMzI1OC1kYjJmMWY3ZC05MWU4NzQ2NA==, ActorId: [1:7592679374498764817:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfqphbxhtrrm42c4xbqby, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 34 } message: "At lambda, At function: Coalesce" end_position { row: 3 column: 34 } severity: 1 issues { position { row: 4 column: 41 } message: "At function: ==" end_position { row: 4 column: 41 } severity: 1 issues { position { row: 4 column: 27 } message: "At function: Member" end_position { row: 4 column: 27 } severity: 1 issues { position { row: 4 column: 27 } message: "Member not found: UnknownColumn" end_position { row: 4 column: 27 } severity: 1 } } } } } tx_id# trace_id# 2026-01-07T17:53:45.769741Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679374498764835:2541], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:4:43: Error: At function: KiUpdateTable!
:4:43: Error: Column 'UnknownColumn' does not exist in table '/Root/Test'., code: 2017 2026-01-07T17:53:45.770136Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Zjc3N2E4ZTctYWNkMzI1OC1kYjJmMWY3ZC05MWU4NzQ2NA==, ActorId: [1:7592679374498764817:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfqr62640xrgyyastcy5w, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 43 } message: "At function: KiUpdateTable!" end_position { row: 4 column: 43 } severity: 1 issues { position { row: 4 column: 43 } message: "Column \'UnknownColumn\' does not exist in table \'/Root/Test\'." end_position { row: 4 column: 43 } issue_code: 2017 severity: 1 } } } tx_id# trace_id# |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 28850, MsgBus: 28651 2026-01-07T17:53:40.451739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679352515985487:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:40.452061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:40.733614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:40.733704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:40.735523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:40.740672Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:40.808097Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:40.869363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:40.869387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:40.869395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:40.869474Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:40.914113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:41.270292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:41.355557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.459667Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:41.505973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.660946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.740100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.544533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679365400889141:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:43.544618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:43.544846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679365400889151:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:43.544910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:43.839482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.871323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.898354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.927200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.955686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.987853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.045466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.086698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.156277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679369695857318:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.156357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.156543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679369695857323:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.156609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679369695857324:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.156690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:44.159686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:44.169850Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679369695857327:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:44.245427Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679369695857378:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:45.441865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679352515985487:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:45.441926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:45.673600Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679373990824979:2537], status: GENERIC_ERROR, issues:
:5:32: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:45.673976Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjhhZWIzN2MtMTQ5ODRhMWMtMmMzMmY2My1jZTUyYjYwNQ==, ActorId: [1:7592679373990824970:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfqkefxaax6hmynggyq8h, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 5 column: 32 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 5 column: 32 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:45.693554Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679373990824983:2539], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:45.695367Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjhhZWIzN2MtMTQ5ODRhMWMtMmMzMmY2My1jZTUyYjYwNQ==, ActorId: [1:7592679373990824970:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfqpg6pvb37vsb1s05xqb, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:45.712266Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679373990824987:2541], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:45.712560Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjhhZWIzN2MtMTQ5ODRhMWMtMmMzMmY2My1jZTUyYjYwNQ==, ActorId: [1:7592679373990824970:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfqq454qd1df2trchrkx6, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:45.728905Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679373990824991:2543], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:45.730315Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjhhZWIzN2MtMTQ5ODRhMWMtMmMzMmY2My1jZTUyYjYwNQ==, ActorId: [1:7592679373990824970:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfqqp7g3gj8r6f83vxvsd, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# 2026-01-07T17:53:45.778420Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679373990824996:2545], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-07T17:53:45.780089Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjhhZWIzN2MtMTQ5ODRhMWMtMmMzMmY2My1jZTUyYjYwNQ==, ActorId: [1:7592679373990824970:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsfqs68spx3y7zeqym2018, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardServerLess::StorageBillingLabels >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:46.804175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:46.804278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.804339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:46.804380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:46.804419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:46.804450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:46.804508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.804602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:46.805513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:46.807881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:46.905150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:46.905208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:46.920121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:46.920488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:46.920661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:46.935325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:46.935679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:46.944951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:46.951547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:46.964511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.964748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:46.973055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:46.973345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:46.973459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:46.974598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:46.985912Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:53:47.119819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:53:47.121681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.123318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:53:47.123437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:53:47.125049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:53:47.125156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:47.133713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:47.140506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:53:47.140845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.141023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:53:47.141077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:53:47.141116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:53:47.143987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.144062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:53:47.144111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:53:47.147246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.147298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.147365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:53:47.147423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:53:47.153639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:53:47.156892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:53:47.158293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:53:47.159583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:47.159742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:47.159791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:53:47.160132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:53:47.160187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:53:47.160363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:53:47.163776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:47.167092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:47.167164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409551 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409551 2026-01-07T17:53:48.087333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-07T17:53:48.088094Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 Forgetting tablet 72075186234409551 2026-01-07T17:53:48.089601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2026-01-07T17:53:48.089847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409550 2026-01-07T17:53:48.090924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-07T17:53:48.091132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2026-01-07T17:53:48.091989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-07T17:53:48.092303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:48.092366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-07T17:53:48.092474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-07T17:53:48.093391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:48.093447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-07T17:53:48.093532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:48.096573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:53:48.096629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2026-01-07T17:53:48.096752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-07T17:53:48.096777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2026-01-07T17:53:48.097201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:53:48.097269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2026-01-07T17:53:48.098586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:53:48.098668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:53:48.098920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:53:48.098956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:53:48.099369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:53:48.099450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:53:48.099489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:948:2807] TestWaitNotification: OK eventTxId 106 2026-01-07T17:53:48.099936Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:53:48.100158Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 207us result status StatusPathDoesNotExist 2026-01-07T17:53:48.100295Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:53:48.100830Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:53:48.100993Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 134us result status StatusPathDoesNotExist 2026-01-07T17:53:48.101105Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:53:48.101503Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:53:48.101711Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 197us result status StatusSuccess 2026-01-07T17:53:48.102062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2026-01-07T17:53:48.103926Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2026-01-07T17:53:48.104028Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2026-01-07T17:53:48.104093Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2026-01-07T17:53:48.104157Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:46.805640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:46.805715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.805767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:46.805802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:46.805834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:46.805860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:46.805906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.805978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:46.806751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:46.807853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:46.901458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:46.901520Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:46.919367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:46.919654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:46.920111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:46.933667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:46.933915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:46.944904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:46.951536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:46.962861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.964767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:46.973054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:46.973370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:46.973470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:46.974585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.150681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.151606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.151727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.151828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.151904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.151989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.152717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T17:53:48.484756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:53:48.484831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-07T17:53:48.485000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:53:48.485140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1054:2984], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:53:48.488224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:48.488279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:53:48.488454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:48.488500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:53:48.488593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:53:48.488655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2026-01-07T17:53:48.488696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 240 -> 240 2026-01-07T17:53:48.489705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:53:48.489801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:53:48.489841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:53:48.489872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:53:48.489909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-01-07T17:53:48.489985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-07T17:53:48.492678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:53:48.492738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:53:48.492873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:53:48.492912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:53:48.492978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:53:48.493037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:53:48.493085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-07T17:53:48.493128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:53:48.493183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:53:48.493223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:53:48.493406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:53:48.494465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:53:48.495869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:53:48.495921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:53:48.496417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:53:48.496529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:53:48.496566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1262:3142] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-07T17:53:48.499958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:53:48.500118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2026-01-07T17:53:48.500170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2026-01-07T17:53:48.500395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2026-01-07T17:53:48.500483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2026-01-07T17:53:48.502870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:48.503090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2026-01-07T17:53:48.506257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:53:48.506433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2026-01-07T17:53:48.506471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2026-01-07T17:53:48.506620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2026-01-07T17:53:48.506690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2026-01-07T17:53:48.509281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:48.509520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:47.533544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:47.533628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:47.533681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:47.533740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:47.533779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:47.533804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:47.533866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:47.533957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:47.534822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:47.535148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:47.625270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:47.625325Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:47.645784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:47.646157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:47.646382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:47.653370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:47.653730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:47.654446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:47.654708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:47.657960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:47.658175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:47.659285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:47.659344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:47.659472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:47.659515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:47.659613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:47.659812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.666328Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:53:47.813949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:53:47.814195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.814367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:53:47.814409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:53:47.814691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:53:47.814772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:47.818187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:47.818394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:53:47.818594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.818684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:53:47.818720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:53:47.818745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:53:47.821822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.821894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:53:47.821941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:53:47.824007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.824068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.824122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:53:47.824171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:53:47.827191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:53:47.828625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:53:47.828789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:53:47.829781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:47.829872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:47.829910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:53:47.830200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:53:47.830258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:53:47.830408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:53:47.830503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:47.833962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:47.834017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2026-01-07T17:53:48.605462Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-01-07T17:53:48.606429Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-01-07T17:53:48.607239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2026-01-07T17:53:48.607507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409548 2026-01-07T17:53:48.608675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-07T17:53:48.608889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409547 2026-01-07T17:53:48.609955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-07T17:53:48.610838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:48.610900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-07T17:53:48.611048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-07T17:53:48.611775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:53:48.611823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-07T17:53:48.611889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-07T17:53:48.614818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-07T17:53:48.614895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2026-01-07T17:53:48.615002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-07T17:53:48.615029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2026-01-07T17:53:48.615481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-07T17:53:48.615535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2026-01-07T17:53:48.617128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:53:48.617255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:53:48.617571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:53:48.617617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:53:48.618092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:53:48.618199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:53:48.618236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:934:2792] TestWaitNotification: OK eventTxId 106 2026-01-07T17:53:48.618822Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:53:48.619032Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 227us result status StatusPathDoesNotExist 2026-01-07T17:53:48.619203Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:53:48.619702Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:53:48.619920Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 204us result status StatusPathDoesNotExist 2026-01-07T17:53:48.620048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:53:48.620544Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:53:48.620708Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 167us result status StatusSuccess 2026-01-07T17:53:48.621169Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2026-01-07T17:53:48.621799Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2026-01-07T17:53:48.621870Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2026-01-07T17:53:48.621909Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2026-01-07T17:53:48.621980Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:46.804171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:46.804269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.804327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:46.804363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:46.804394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:46.804420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:46.804471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.804532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:46.805372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:46.807895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:46.901494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:46.901554Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:46.920658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:46.921009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:46.921191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:46.931430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:46.933152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:46.944939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:46.951725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:46.961769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.964777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:46.973062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:46.973305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:46.973488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:46.974571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.144252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.145957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.146021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.146075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pathId: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:53:48.567700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6434: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2026-01-07T17:53:48.567747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2026-01-07T17:53:48.567875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2026-01-07T17:53:48.567924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:795: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2026-01-07T17:53:48.567965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 138 -> 240 2026-01-07T17:53:48.572526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:53:48.572740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-07T17:53:48.572786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-07T17:53:48.572908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:53:48.572967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:53:48.573009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-07T17:53:48.573044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:53:48.573089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-07T17:53:48.573146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-07T17:53:48.573194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-07T17:53:48.573226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-07T17:53:48.573304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-07T17:53:48.579076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-07T17:53:48.579130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-07T17:53:48.579650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-07T17:53:48.579753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-07T17:53:48.579797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1348:3228] TestWaitNotification: OK eventTxId 106 2026-01-07T17:53:48.580526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:53:48.580742Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 241us result status StatusSuccess 2026-01-07T17:53:48.581224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:48.581908Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2026-01-07T17:53:48.582109Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 187us result status StatusSuccess 2026-01-07T17:53:48.582526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409549 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976725657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2026-01-07T17:53:48.583208Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:53:48.583395Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 176us result status StatusSuccess 2026-01-07T17:53:48.583734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:48.584431Z node 1 :HIVE INFO: tablet_helpers.cpp:1652: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:39 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:47.529325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:47.529419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:47.529478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:47.529524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:47.529562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:47.529597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:47.529678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:47.529763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:47.530529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:47.530809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:47.615904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:47.615962Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:47.632552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:47.632906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:47.633161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:47.640916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:47.641378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:47.642193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:47.642462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:47.646855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:47.647135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:47.648546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:47.648618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:47.648767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:47.648823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:47.648965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:47.649274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.790754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.794501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.794687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.794767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.794836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.794881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.794929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.794987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.795062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.795161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.795210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.795257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.795314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.795401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.795478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 86234409549, txId: 281474976725658 2026-01-07T17:53:48.970827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186234409549, txId: 281474976725658, pathId: [OwnerId: 72075186234409549, LocalPathId: 1], version: 6 2026-01-07T17:53:48.970860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186234409549, LocalPathId: 1] was 5 2026-01-07T17:53:48.970919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725658, ready parts: 0/1, is published: true 2026-01-07T17:53:48.974568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 2026-01-07T17:53:48.974722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2026-01-07T17:53:48.974840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:53:48.974977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-07T17:53:48.975119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725658 2026-01-07T17:53:48.975221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:48.975333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:48.975379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-07T17:53:48.975639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-07T17:53:48.975689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-07T17:53:48.975825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:53:48.975922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1060:2990], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2026-01-07T17:53:48.978090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:48.978129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:53:48.978260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:48.978293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-07T17:53:48.978559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:53:48.978603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2026-01-07T17:53:48.978642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 240 -> 240 2026-01-07T17:53:48.979443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:53:48.979534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-07T17:53:48.979574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-07T17:53:48.979625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-07T17:53:48.979664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-01-07T17:53:48.979730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-07T17:53:48.982772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-07T17:53:48.982831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-07T17:53:48.982934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:53:48.982965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:53:48.983009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-07T17:53:48.983044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:53:48.983086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-07T17:53:48.983120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-07T17:53:48.983154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-07T17:53:48.983183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-07T17:53:48.983341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:53:48.983803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-07T17:53:48.985592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-07T17:53:48.985631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-07T17:53:48.986030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-07T17:53:48.986124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-07T17:53:48.986163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1265:3145] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-07T17:53:48.989317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:53:48.989548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2026-01-07T17:53:48.989602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2026-01-07T17:53:48.989746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2026-01-07T17:53:48.989807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2026-01-07T17:53:48.992785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:53:48.993063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:46.804209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:46.804327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.804387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:46.804430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:46.804469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:46.804501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:46.804582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.804663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:46.805590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:46.807927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:46.904029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:46.904095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:46.921896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:46.922251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:46.922430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:46.944718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:46.945156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:46.945945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:46.951545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:46.961799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.965824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:46.973075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:46.973332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:46.973459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:46.974594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.133314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.135836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.136884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.137013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.137099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/pg_class' 2026-01-07T17:53:48.911333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720662 2026-01-07T17:53:48.911391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/partition_stats' 2026-01-07T17:53:48.911431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720674 2026-01-07T17:53:48.911468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720675 2026-01-07T17:53:48.911515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720675 2026-01-07T17:53:48.911554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_queries_by_request_units_one_hour' 2026-01-07T17:53:48.911589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720674 2026-01-07T17:53:48.911632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_by_tli_one_hour' 2026-01-07T17:53:48.914890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720676 2026-01-07T17:53:48.914970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720678 2026-01-07T17:53:48.915020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720676 2026-01-07T17:53:48.915065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720678 2026-01-07T17:53:48.915179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_hour' 2026-01-07T17:53:48.915238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_queries_by_duration_one_minute' 2026-01-07T17:53:48.915273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720677 2026-01-07T17:53:48.915307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720677 2026-01-07T17:53:48.915344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/pg_tables' 2026-01-07T17:53:48.915444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720680 2026-01-07T17:53:48.915506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720680 2026-01-07T17:53:48.915564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_users' 2026-01-07T17:53:48.922942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720679 2026-01-07T17:53:48.923080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720679 2026-01-07T17:53:48.923137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2026-01-07T17:53:48.923210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_group_members' 2026-01-07T17:53:48.923305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720682 2026-01-07T17:53:48.923392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720682 2026-01-07T17:53:48.923443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2026-01-07T17:53:48.923499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/streaming_queries' 2026-01-07T17:53:48.923564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/resource_pools' 2026-01-07T17:53:48.923614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2026-01-07T17:53:48.923662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-01-07T17:53:48.926110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2026-01-07T17:53:48.926231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_permissions' 2026-01-07T17:53:48.926290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-01-07T17:53:48.926380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_minute' 2026-01-07T17:53:48.926414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:864:2817] at schemeshard: 72075186233409546 Send TEvRosterUpdateFinished TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-07T17:53:48.931470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeTryKeepInMemory } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeRegular } } } } TxId: 108 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2026-01-07T17:53:48.931705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 108:0, at schemeshard: 72075186233409549 2026-01-07T17:53:48.931975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , at schemeshard: 72075186233409549 2026-01-07T17:53:48.935643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: " TxId: 108 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-07T17:53:48.935884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2026-01-07T17:53:48.939368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeRegular } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeTryKeepInMemory } } } } TxId: 109 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2026-01-07T17:53:48.939598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 109:0, at schemeshard: 72075186233409549 2026-01-07T17:53:48.939861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, at schemeshard: 72075186233409549 2026-01-07T17:53:48.946059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other" TxId: 109 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-07T17:53:48.946462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 109, wait until txId: 109 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> KqpBatchDelete::Large_1 [GOOD] >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KqpBatchDelete::Large_2 [GOOD] >> KqpBatchUpdate::Large_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 25283, MsgBus: 6667 2026-01-07T17:53:26.998917Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679293273831803:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.999055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.325953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.343255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.343369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.365737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.448288Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679293273831770:2081] 1767808406993339 != 1767808406993342 2026-01-07T17:53:27.457606Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.596658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.742268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.742296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.742302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.742367Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.012076Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.490624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.498047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.566721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.716274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.892487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.967921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.288832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310453702819:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.288989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.289339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310453702829:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.289389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.777269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.813962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.856638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.888555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.918140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.953765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.989887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.051865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.143907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314748670996:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.143986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.144592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314748671001:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.144631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679314748671002:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.144658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.148821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.164371Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679314748671005:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:31.223988Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679314748671056:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:32.001067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679293273831803:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.001131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.936746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 46131:7762515]; 2026-01-07T17:53:44.618834Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:44.628912Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:44.715326Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:44.720902Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592679366705194817:2081] 1767808424617721 != 1767808424617724 2026-01-07T17:53:44.741880Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:44.760483Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:44.760584Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:44.763427Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:44.802531Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:44.802552Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:44.802571Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:44.802667Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:45.037691Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:45.301924Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:45.313108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.372046Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.532346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.605538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.649965Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:48.075934Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679383885065873:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.076044Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.076420Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679383885065882:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.076536Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.162073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:48.241882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:48.290483Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:48.326003Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:48.364243Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:48.403986Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:48.442031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:48.488185Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:48.563381Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679383885066757:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.563470Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.563541Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679383885066762:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.563865Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679383885066764:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.563931Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:48.567174Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:48.579903Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679383885066765:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:48.637442Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679383885066817:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:49.618984Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679366705194844:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:49.619058Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:50.237274Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 19991, MsgBus: 23177 2026-01-07T17:53:33.336501Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679321522603881:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:33.336862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:33.553027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:33.591032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:33.591155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:33.633322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:33.684276Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:33.740833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:33.740858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:33.740877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:33.741000Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:33.844754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:34.195424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:34.239596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.342965Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:34.378411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.543498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.612991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.304742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679334407507520:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.316698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.317217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679334407507530:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.317309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:36.632630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.669506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.708328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.749281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.778291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.817043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.883388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.927000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.007259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679338702475696:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.007337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.007764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679338702475701:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.007822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679338702475702:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.008172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:37.011651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:37.021463Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679338702475705:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:37.102808Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679338702475756:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:38.335841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679321522603881:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:38.335927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:38.547940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 5083, MsgBus: 11049 2026-01-07T17:53:43.620213Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeliver ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:43.628741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:43.714485Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:43.715977Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679362846985555:2081] 1767808423619251 != 1767808423619254 2026-01-07T17:53:43.750041Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:43.753884Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:43.753958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:43.758973Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:43.802336Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:43.802367Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:43.802374Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:43.802467Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:44.046450Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:44.227721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:44.233707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:44.244357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.318061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.471496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.538698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:44.638475Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:47.025461Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679380026856615:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.025540Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.025862Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679380026856625:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.025921Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.098126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:47.129143Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:47.157960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:47.189977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:47.220201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:47.262112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:47.301129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:47.357684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:47.449357Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679380026857496:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.449452Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.449768Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679380026857502:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.449820Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679380026857501:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.449852Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:47.454330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:47.466758Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679380026857505:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:47.523588Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679380026857556:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:48.620588Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679362846985582:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:48.620653Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:49.095701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 1494, MsgBus: 21465 2026-01-07T17:53:26.999482Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679289455026413:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.999630Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.237048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.398335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.398497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.453494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.457527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.476612Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.743038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.743062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.743068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.743124Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.008873Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.454881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.461318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.507407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.704893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.878485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.954320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.344338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306634897437:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.344477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.344983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306634897447:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.345030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.777427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.811008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.849342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.886019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.922599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.955528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.991418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.068333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.153639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310929865609:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.153714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.153949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310929865614:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.154000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679310929865615:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.154239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.157901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.169138Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679310929865618:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:31.269756Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679310929865669:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:31.998211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679289455026413:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:31.998289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.936824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... 360151050358:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:42.001626Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:42.120455Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:42.121908Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:42.123330Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679355856083017:2081] 1767808421999704 != 1767808421999707 2026-01-07T17:53:42.139455Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:42.139545Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:42.163821Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:42.230414Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:42.230437Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:42.230445Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:42.230536Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:42.333736Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:42.700667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:42.710752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:42.718939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.784376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.957939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:43.022687Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:43.065560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.502197Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679373035954076:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.502305Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.502577Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679373035954086:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.502630Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.583127Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.615858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.655802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.689259Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.718705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.752533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.787791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.850945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:45.936738Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679373035954964:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.936841Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.936996Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679373035954969:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.937051Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679373035954971:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.937098Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:45.940697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:45.953738Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679373035954973:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:46.012560Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679377330922320:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:47.001893Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679360151050358:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:47.002829Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:47.629320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> VDiskIOTest::HugeBlobIOCount [GOOD] >> TStorageBalanceTest::TestScenario3 [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> VDiskIOTest::HugeBlobIOCount [GOOD] Test command err: Total puts# 5000 Total vdisk writes# 10093 Total log vdisk writes# 5084 Total chunk vdisk writes# 5009 Log vdisk writes per put# 1.0168 Chunk vdisk writes per put# 1.0018 VDisk writes per put# 2.0186 DeviceWrites 10093 WriteLog requests 5084 WriteHugeUser requests 5000 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2026-01-07T17:50:36.989005Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.018042Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.018314Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.019193Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.019541Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:37.020696Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:37.020758Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.021858Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:53:2077] ControllerId# 72057594037932033 2026-01-07T17:50:37.021897Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.022007Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.022151Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.035907Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.035985Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.038523Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:61:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.038708Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:62:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.038878Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:63:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.039034Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:64:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.039172Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:65:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.039318Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:66:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.039451Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:67:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.039479Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.039557Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:53:2077] 2026-01-07T17:50:37.039589Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:53:2077] 2026-01-07T17:50:37.039650Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.039694Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.040622Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.040698Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:37.043637Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.043786Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:37.044148Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:37.044364Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:37.045230Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:78:2076] ControllerId# 72057594037932033 2026-01-07T17:50:37.045263Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:37.045319Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:37.045419Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:37.054638Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:37.054705Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:37.056589Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:84:2080] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.056766Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:85:2081] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.056908Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:86:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.057077Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:87:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.057213Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:88:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.057344Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:89:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.057477Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:90:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:37.057503Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:37.057563Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:78:2076] 2026-01-07T17:50:37.057592Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:78:2076] 2026-01-07T17:50:37.057669Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:37.057717Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:37.058112Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:37.058369Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.070874Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:53:2077] 2026-01-07T17:50:37.071019Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.072820Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.072883Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:43:2064] 2026-01-07T17:50:37.072920Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:43:2064] 2026-01-07T17:50:37.073545Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.075848Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:78:2076] 2026-01-07T17:50:37.075905Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.076138Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.076191Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.076413Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.076584Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:97:2093] 2026-01-07T17:50:37.076612Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:97:2093] 2026-01-07T17:50:37.076713Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.076746Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.077139Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.077374Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.077469Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.077550Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:97:2093] 2026-01-07T17:50:37.077617Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageC ... faf241] Result# TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-07T17:53:52.798627Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [33dac03beefaf241] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:53:52.800292Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-07T17:53:52.800509Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} commited cookie 1 for step 487 2026-01-07T17:53:52.802516Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1477, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2026-01-07T17:53:52.802588Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1477, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:53:52.802867Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1477, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{986, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2026-01-07T17:53:52.802931Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1477, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:53:52.803064Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1357:2259] 2026-01-07T17:53:52.803103Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [20:1357:2259] 2026-01-07T17:53:52.803159Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1293:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) 2026-01-07T17:53:52.906707Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1478, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-07T17:53:52.906793Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1478, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:53:52.906928Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137245539548288}: tablet 72075186224037932 wasn't changed 2026-01-07T17:53:52.906973Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137245539548288}: tablet 72075186224037932 skipped channel 0 2026-01-07T17:53:52.907063Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137245539548288}: tablet 72075186224037932 skipped channel 1 2026-01-07T17:53:52.907098Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137245539548288}: tablet 72075186224037932 skipped channel 2 2026-01-07T17:53:52.907167Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137245539548288}(72075186224037932)::Execute - TryToBoot was not successfull 2026-01-07T17:53:52.907263Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1478, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{987, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2026-01-07T17:53:52.907314Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1478, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:53:52.927058Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [10b1fde6d2294410] bootstrap ActorId# [20:11712:4481] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:488:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:53:52.927227Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [10b1fde6d2294410] Id# [72057594037927937:2:488:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:53:52.927278Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [10b1fde6d2294410] restore Id# [72057594037927937:2:488:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:53:52.927339Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [10b1fde6d2294410] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:488:0:0:246:1] Marker# BPG33 2026-01-07T17:53:52.927385Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [10b1fde6d2294410] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:488:0:0:246:1] Marker# BPG32 2026-01-07T17:53:52.927511Z node 20 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [20:444:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:488:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:53:52.934084Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [10b1fde6d2294410] received {EvVPutResult Status# OK ID# [72057594037927937:2:488:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 505 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 506 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-07T17:53:52.934234Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [10b1fde6d2294410] Result# TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-07T17:53:52.934302Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [10b1fde6d2294410] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:53:52.935095Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-07T17:53:52.935256Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} commited cookie 1 for step 488 2026-01-07T17:53:52.938896Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2026-01-07T17:53:52.938964Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:53:52.939201Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{988, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2026-01-07T17:53:52.939255Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:53:52.939383Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1357:2259] 2026-01-07T17:53:52.939418Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [20:1357:2259] 2026-01-07T17:53:52.939473Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1293:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) 2026-01-07T17:53:53.041103Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-07T17:53:53.041196Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:53:53.041340Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137245538965440}: tablet 72075186224037979 wasn't changed 2026-01-07T17:53:53.041393Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137245538965440}: tablet 72075186224037979 skipped channel 0 2026-01-07T17:53:53.041490Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137245538965440}: tablet 72075186224037979 skipped channel 1 2026-01-07T17:53:53.041533Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137245538965440}: tablet 72075186224037979 skipped channel 2 2026-01-07T17:53:53.041619Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137245538965440}(72075186224037979)::Execute - TryToBoot was not successfull 2026-01-07T17:53:53.041702Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{989, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2026-01-07T17:53:53.041762Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:44.257969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:44.343615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:44.350885Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:44.351961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:44.352111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:44.720432Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:44.809808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:44.809929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:44.845951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:44.916911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:45.907970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:45.908906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:45.908962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:45.908982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:45.909184Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:45.973600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:46.496992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:48.653359Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:48.658587Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:48.669107Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:48.702829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:48.702977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:48.746294Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:48.748176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:48.922998Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:48.923114Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:48.924753Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.928742Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.929508Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.930492Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.930683Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.930977Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.931088Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.931343Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.931451Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:48.946850Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.117053Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:49.157457Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:49.157544Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:49.190096Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:49.190375Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:49.190566Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:49.190622Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:49.190688Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:49.190730Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:49.190783Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:49.190821Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:49.191183Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:49.193227Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.193373Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2115:2582], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.208336Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2150:2614] 2026-01-07T17:51:49.210093Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2150:2614], schemeshard id = 72075186224037897 2026-01-07T17:51:49.301843Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2666] 2026-01-07T17:51:49.304425Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:49.313643Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Describe result: PathErrorUnknown 2026-01-07T17:51:49.313692Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Creating table 2026-01-07T17:51:49.313762Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:49.318625Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2305:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:49.325126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:49.331335Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:49.331453Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:49.340642Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:49.476893Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:49.546123Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:49.810392Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:49.929013Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:49.929180Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Column diff is empty, finishing 2026-01-07T17:51:50.459568Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... or_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5986:4684], schemeshard id = 72075186224037897 ... waiting for TEvPeriodicTableStats2 2026-01-07T17:53:01.758460Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:03.974738Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:03.974935Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 3 2026-01-07T17:53:03.975074Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-07T17:53:07.237490Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:09.401600Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:09.401768Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 4 2026-01-07T17:53:09.401879Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-07T17:53:12.664527Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:12.785477Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:53:12.785587Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:53:12.785633Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:53:12.785680Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:53:14.936544Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:14.936713Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-07T17:53:14.936837Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-07T17:53:14.960021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:53:14.960113Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:18.165046Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:20.177557Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:20.177683Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-07T17:53:20.177770Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-07T17:53:23.280811Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:25.375660Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:25.375796Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-07T17:53:25.375902Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-07T17:53:28.613446Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:30.906281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:30.906458Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 8 2026-01-07T17:53:30.906528Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-07T17:53:30.941572Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:53:30.941701Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:30.942043Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:53:30.961614Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:53:34.084359Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:36.288035Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:36.288217Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 9 2026-01-07T17:53:36.288353Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 9 2026-01-07T17:53:36.311018Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:53:36.311104Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:36.311351Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:53:36.326207Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:53:39.677591Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:41.805413Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:41.805574Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 10 2026-01-07T17:53:41.805693Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2026-01-07T17:53:41.840099Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:53:41.840162Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:41.840326Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:53:41.855682Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T17:53:45.060737Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:45.160740Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-07T17:53:45.160794Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:53:45.160818Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-07T17:53:45.160856Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-07T17:53:47.383255Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:47.383427Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2026-01-07T17:53:47.383555Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-07T17:53:47.439761Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:53:47.439841Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:47.439989Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:53:47.456698Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_COLUMNSHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for TEvPeriodicTableStats2 (done) ... waiting for stats update from SchemeShard 2026-01-07T17:53:50.767680Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:52.816377Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:52.816599Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-07T17:53:52.816756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-07T17:53:52.849705Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-07T17:53:52.849815Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-07T17:53:52.850130Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-07T17:53:52.864499Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics 2026-01-07T17:53:55.996517Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-07T17:53:58.181418Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-07T17:53:58.181611Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 ... waiting for TEvPropagateStatistics (done) 2026-01-07T17:53:58.181997Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-07T17:53:58.182083Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7758:5439]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:58.182378Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:53:58.182426Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:7758:5439], StatRequests.size() = 1 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |96.0%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching >> TProxyActorTest::TestCreateSemaphoreInterrupted >> TProxyActorTest::TestCreateSemaphore |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> KqpBatchDelete::SimpleOnePartition [GOOD] >> TProxyActorTest::TestCreateSemaphore [GOOD] >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TProxyActorTest::TestAttachSession [GOOD] >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 62063, MsgBus: 12100 2026-01-07T17:53:29.139217Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679305408704437:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:29.139275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:29.475132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:29.475243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:29.510079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:29.514847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:29.560591Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679305408704405:2081] 1767808409123878 != 1767808409123881 2026-01-07T17:53:29.564710Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:29.635575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:29.635593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:29.635603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:29.635669Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:29.669731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:29.986042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:30.049421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.161704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.262158Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:30.312471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.366909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.224810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318293608171:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.224946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.225295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318293608181:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.225378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.526488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.561329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.592244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.625674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.656423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.695343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.736321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.778281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.843512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318293609052:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.843609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.843690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318293609057:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.843978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318293609060:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.844033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.847288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:32.857218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679318293609059:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:32.953843Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679318293609112:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:34.141080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679305408704437:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:34.141163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9077, MsgBus: 1517 2026-01-07T17:53:37.555349Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679339830819667:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:37.555412Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sna ... accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:50.810011Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679373280108285:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:50.810084Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4936, MsgBus: 10790 2026-01-07T17:53:54.341425Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592679411687717975:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:54.341515Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:54.357419Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:54.424450Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:54.464821Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:54.464889Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:54.467572Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:54.499353Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:54.499375Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:54.499385Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:54.499460Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:54.622424Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:54.922782Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:54.939655Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:55.019553Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:55.166114Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:55.276862Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:55.392055Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:57.855880Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679424572621701:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:57.855972Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:57.856259Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679424572621710:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:57.856315Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:57.915662Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:57.942430Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:57.975694Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:58.004279Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:58.031396Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:58.061068Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:58.090844Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:58.147202Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:58.214589Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679428867589873:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:58.214673Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:58.214763Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679428867589878:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:58.214805Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679428867589880:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:58.214831Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:58.217847Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:58.226250Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679428867589882:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:58.291306Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679428867589933:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:59.341423Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679411687717975:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:59.341506Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR cookie 0 ... waiting for blocked registrations (done) 2026-01-07T17:54:03.051941Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> KqpBatchDelete::Large_3 [GOOD] >> KqpBatchUpdate::Large_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 3328, MsgBus: 3435 2026-01-07T17:53:31.451145Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679311968652488:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:31.451184Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:31.683839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:31.683978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:31.733134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:31.751413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:31.763471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:31.765713Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679311968652468:2081] 1767808411450218 != 1767808411450221 2026-01-07T17:53:31.845443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:31.845485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:31.845500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:31.845627Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:31.956313Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:32.251623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:32.302797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.435330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.533409Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:32.590604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.653441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.553090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679324853556225:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.553187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.553585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679324853556235:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.553657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.871036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.899145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.928059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.960477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.992272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:35.026290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:35.087101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:35.127940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:35.207609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679329148524401:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.207690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.207850Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679329148524406:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.207869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679329148524407:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.207913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:35.212559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:35.223528Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679329148524410:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:35.313360Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679329148524461:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:36.451520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679311968652488:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:36.451591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:36.812635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create ... 99536658146131:7762515]; 2026-01-07T17:53:48.929469Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:48.946908Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:49.021613Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:49.025038Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679386356520505:2081] 1767808428919316 != 1767808428919319 2026-01-07T17:53:49.059474Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:49.059567Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:49.063962Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:49.121470Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:49.121495Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:49.121501Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:49.121577Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:49.186521Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:49.583991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:49.595352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:49.645076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:49.816960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:49.884601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:49.952311Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:52.018544Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679403536391557:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.018650Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.018925Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679403536391566:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.018971Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.072071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:52.100089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:52.127543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:52.155961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:52.187048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:52.227188Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:52.266279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:52.314409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:52.388352Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679403536392436:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.388467Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.388652Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679403536392441:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.388699Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679403536392442:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.388735Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:52.391850Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:52.402133Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679403536392445:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:52.493060Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679403536392496:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:53.927549Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679386356520631:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:53.929059Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:54.003240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:04.012329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:54:04.012360Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 13273, MsgBus: 29735 2026-01-07T17:53:38.266418Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679344861667825:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:38.266479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:38.529758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:38.540171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:38.540322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:38.571301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:38.609404Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679344861667722:2081] 1767808418230100 != 1767808418230103 2026-01-07T17:53:38.618708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:38.733541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:38.733562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:38.733568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:38.733635Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:38.760500Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:39.210960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:39.217908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:39.286214Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:39.305477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.428354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.596943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.670674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.399192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679357746571480:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.399312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.399609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679357746571490:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.399657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.771063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.801495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.834783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.863496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.897956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.939700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.972629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.037824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.109206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679362041539654:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.109284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.109384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679362041539659:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.109563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679362041539661:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.109638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.112911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:42.124348Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679362041539663:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:42.192780Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679362041539714:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:43.266319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679344861667825:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:43.266399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:43.691307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... eTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 14759, MsgBus: 29039 2026-01-07T17:53:58.553990Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592679427768175054:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:58.554062Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:58.577252Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:58.646973Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:58.693241Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:58.693358Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:58.697837Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:58.742161Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:58.742192Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:58.742201Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:58.742291Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:58.804211Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:59.194570Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:59.213353Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:59.268380Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:59.428292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:59.493848Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:59.557358Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:01.895954Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679440653078779:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.896055Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.896408Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679440653078789:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.896471Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.963362Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.997111Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:02.028081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:02.060093Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:02.090549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:02.125973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:02.163792Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:02.213353Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:02.297159Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679444948046955:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:02.297281Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:02.297627Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679444948046961:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:02.297674Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:02.297678Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679444948046960:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:02.301897Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:02.314207Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679444948046964:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:02.406528Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679444948047015:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:03.554271Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679427768175054:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:03.554356Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:04.016611Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 4783, MsgBus: 11645 2026-01-07T17:53:30.426924Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679310285061707:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:30.427025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:30.613059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:30.634795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:30.635089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:30.691058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:30.706929Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679310285061679:2081] 1767808410425432 != 1767808410425435 2026-01-07T17:53:30.722079Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:30.768105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:30.768125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:30.768135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:30.768261Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:30.781528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:31.204217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:31.260327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.382761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.482035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:31.551916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.623714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.483241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679323169965439:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:33.483628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:33.484435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679323169965449:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:33.484503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:33.834395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.868147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.895733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.918640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.951928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:33.997678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.032871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.079867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:34.161068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679327464933622:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.161160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.161408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679327464933627:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.161410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679327464933628:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.161451Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:34.164967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:34.175912Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679327464933631:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:34.247342Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679327464933682:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:35.427180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679310285061707:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:35.427247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1747, MsgBus: 24724 2026-01-07T17:53:40.077921Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679352744831064:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:40.078022Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sna ... 58.231195Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592679427188809565:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:58.231238Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:58.239440Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:58.325354Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:58.326082Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:58.335575Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592679427188809537:2081] 1767808438230156 != 1767808438230159 2026-01-07T17:53:58.335864Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:58.335965Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:58.367383Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:58.414340Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:58.414366Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:58.414372Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:58.414456Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:58.523956Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:58.752917Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:58.757689Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:58.771080Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:58.843958Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:58.976660Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:59.041494Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:59.237996Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:01.393889Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679440073713299:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.393985Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.394193Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679440073713309:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.394240Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.456921Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.482664Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.511312Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.538412Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.567200Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.597154Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.629263Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.670739Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:01.744046Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679440073714180:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.744133Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.744145Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679440073714185:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.744317Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679440073714187:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.744357Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:01.747463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:01.757335Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679440073714188:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:01.832592Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679440073714240:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:03.231337Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679427188809565:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:03.231404Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> HttpRequest::Probe [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> KqpStats::DataQueryWithEffects+UseSink >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2026-01-07T17:52:06.660784Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1767808326660752 2026-01-07T17:52:06.972389Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592678946336263105:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:06.972449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:07.052692Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:52:07.054211Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592678951893094121:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:52:07.054276Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:52:07.063266Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:52:07.186769Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:07.187262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:07.327737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:07.327859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:07.329073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:07.329180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:07.341086Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:52:07.341346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:07.344521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:07.346227Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:07.346967Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:52:07.421240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:07.590020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/003893/r3tmp/yandexKCeyvf.tmp 2026-01-07T17:52:07.590062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/003893/r3tmp/yandexKCeyvf.tmp 2026-01-07T17:52:07.591270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/003893/r3tmp/yandexKCeyvf.tmp 2026-01-07T17:52:07.591373Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:07.830102Z INFO: TTestServer started on Port 21640 GrpcPort 1185 2026-01-07T17:52:07.831483Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:52:07.980265Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:1185 2026-01-07T17:52:07.995692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:08.060211Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:52:09.301020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678959221166317:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.301021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678959221166326:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.301149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.301407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592678959221166333:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.301468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:09.308636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:09.328623Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592678959221166332:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-07T17:52:09.470428Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592678959221166421:2986] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:09.809864Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592678959221166434:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:52:09.810711Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592678960483029052:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:52:09.811089Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZGQ2OGQzZGYtM2U5NjI2NmItNGNiZjgwMGUtZTdjNWI4YmM=, ActorId: [2:7592678960483029011:2299], ActorState: ExecuteState, LegacyTraceId: 01kecscsr06g23dj0g1hn6r8sx, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:52:09.815169Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:52:09.816086Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTA0MzhkMWUtOWYwMzc1ZGMtYWI4YTI3NDItNGMwZjBmM2Y=, ActorId: [1:7592678959221166315:2328], ActorState: ExecuteState, LegacyTraceId: 01kecscsj780hrygsvghr6m7kb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:52:09.816420Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:52:09.904213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... 7:54:06.253018Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:54:06.253046Z node 16 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:54:06.253073Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:54:06.253124Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-07T17:54:06.253174Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:54:06.253640Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-07T17:54:06.253667Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-07T17:54:06.253729Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:54:06.254036Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0 2026-01-07T17:54:06.254749Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767808446254 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T17:54:06.254851Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-07T17:54:06.255071Z :INFO: [] MessageGroupId [src] SessionId [src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0] Write session: close. Timeout = 0 ms 2026-01-07T17:54:06.255145Z :INFO: [] MessageGroupId [src] SessionId [src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0] Write session will now close 2026-01-07T17:54:06.255217Z :DEBUG: [] MessageGroupId [src] SessionId [src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0] Write session: aborting 2026-01-07T17:54:06.255891Z :INFO: [] MessageGroupId [src] SessionId [src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0] Write session: gracefully shut down, all writes complete 2026-01-07T17:54:06.255949Z :DEBUG: [] MessageGroupId [src] SessionId [src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0] Write session: destroy 2026-01-07T17:54:06.256515Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0 grpc read done: success: 0 data: 2026-01-07T17:54:06.256556Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0 grpc read failed 2026-01-07T17:54:06.256610Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 1 sessionId: src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0 2026-01-07T17:54:06.256634Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|b9a45e40-fbb6b6de-8e068dfa-1f9e21f2_0 is DEAD 2026-01-07T17:54:06.257059Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-07T17:54:06.257401Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [15:7592679464089069295:2457] destroyed 2026-01-07T17:54:06.257448Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-07T17:54:06.257494Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:54:06.257521Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:06.257551Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:54:06.257567Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:06.257595Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:54:06.284368Z :INFO: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Starting read session 2026-01-07T17:54:06.284424Z :DEBUG: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Starting cluster discovery 2026-01-07T17:54:06.284717Z :INFO: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:62439: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:62439
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:62439. " 2026-01-07T17:54:06.284811Z :DEBUG: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Restart cluster discovery in 0.009108s 2026-01-07T17:54:06.295118Z :DEBUG: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Starting cluster discovery 2026-01-07T17:54:06.295443Z :INFO: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:62439: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:62439
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:62439. " 2026-01-07T17:54:06.295496Z :DEBUG: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Restart cluster discovery in 0.017108s 2026-01-07T17:54:06.308648Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:54:06.308686Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:06.308703Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:54:06.308724Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:06.308750Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:54:06.313104Z :DEBUG: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Starting cluster discovery 2026-01-07T17:54:06.313327Z :INFO: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:62439: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:62439
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:62439. " 2026-01-07T17:54:06.313353Z :DEBUG: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Restart cluster discovery in 0.026161s 2026-01-07T17:54:06.340082Z :DEBUG: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Starting cluster discovery 2026-01-07T17:54:06.340317Z :NOTICE: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:62439: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:62439
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:62439. " } 2026-01-07T17:54:06.340574Z :NOTICE: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:62439: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:62439
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:62439. " } 2026-01-07T17:54:06.340783Z :INFO: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Closing read session. Close timeout: 0.000000s 2026-01-07T17:54:06.340957Z :NOTICE: [/Root] [/Root] [2f756d31-ad477303-f0aa03e0-61e50092] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T17:54:06.409010Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:54:06.409047Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:06.409066Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:54:06.409088Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:06.409107Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:54:06.509345Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:54:06.509387Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:06.509404Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:54:06.509426Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:06.509455Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T17:54:06.978904Z node 15 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [15:7592679464089069337:2468] TxId: 281474976715674. Ctx: { TraceId: 01kecsgc39ab7adnf3m3jkxdfn, Database: /Root, SessionId: ydb://session/3?node_id=15&id=NjE5OGU2OTMtNjJiOTU2YzktNDA5YTdhMDAtMTFhNGUyMDI=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 16 trace_id# 2026-01-07T17:54:06.979092Z node 15 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [15:7592679464089069341:2468], TxId: 281474976715674, task: 3. Ctx: { TraceId : 01kecsgc39ab7adnf3m3jkxdfn. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=15&id=NjE5OGU2OTMtNjJiOTU2YzktNDA5YTdhMDAtMTFhNGUyMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [15:7592679464089069337:2468], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |96.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> KqpExplain::UpdateSecondaryConditional+UseSink >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> KqpLimits::KqpMkqlMemoryLimitException >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpParams::ImplicitParameterTypes >> KqpParams::DefaultParameterValue >> KqpQuery::Now >> KqpLimits::BigParameter >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap >> KqpStats::JoinNoStatsScan >> KqpQueryDiscard::DiscardSelectSupport >> KqpExplain::PrecomputeRange >> KqpStats::RequestUnitForBadRequestExecute >> KqpStats::OneShardLocalExec+UseSink >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal >> KqpQuery::YqlSyntaxV0 >> KqpStats::StatsProfile >> KqpExplain::SortStage |96.1%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:44.298285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:44.363681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:44.369590Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:44.369846Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:44.369951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:44.720770Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:44.809837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:44.809934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:44.845950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:44.931084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:45.908022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:45.908876Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:45.908908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:45.908954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:45.909109Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:45.971981Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:46.486047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:48.927047Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:48.931665Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:48.934052Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:48.956821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:48.956905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:48.994051Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:48.995594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.160361Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:49.160449Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:49.161579Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.162069Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.162499Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.163143Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.163249Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.163470Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.163550Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.163674Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.163803Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.178422Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.333650Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:49.361743Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:49.361839Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:49.414696Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:49.416031Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:49.416245Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:49.416309Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:49.416362Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:49.416474Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:49.416531Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:49.416582Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:49.417248Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:49.419374Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:51:49.421541Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:49.428967Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:51:49.429011Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:51:49.429072Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:49.434070Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.434142Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.455664Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:51:49.456054Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:51:49.458730Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:49.493108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:49.506918Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:49.507009Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:49.516940Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:49.666050Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:49.719470Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:49.994714Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:50.123965Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:50.124037Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:51:50.823570Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:21.737635Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:5410:4750], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-07T17:52:21.871264Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:5490:4798], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:52:21.933926Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:5489:4797] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:22.191886Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:5511:4811]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:22.192333Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:52:22.192458Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:5513:4813] 2026-01-07T17:52:22.192541Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:5513:4813] 2026-01-07T17:52:22.192883Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5514:4814] 2026-01-07T17:52:22.193034Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5513:4813], server id = [2:5514:4814], tablet id = 72075186224037894, status = OK 2026-01-07T17:52:22.193141Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5514:4814], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:52:22.193240Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:52:22.193467Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:52:22.193577Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5511:4811], StatRequests.size() = 1 2026-01-07T17:52:22.193685Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:53:18.700478Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsd5mx2w037znd75qwsz5b", SessionId: ydb://session/3?node_id=2&id=YmJlODZlMWEtYmY1OWMyOGYtOGYzN2ExZjctZmU5NWE0OWI=, Slow query, duration: 57.032031s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:53:18.703151Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5383:4736], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:53:18.705770Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5383:4736], Start read next stream part 2026-01-07T17:53:18.706122Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 37090, txId: 18446744073709551615] shutting down 2026-01-07T17:53:18.709828Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5706:4987], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:53:18.712982Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5383:4736], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:53:18.713061Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5383:4736], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzQ0MjI3MDctNzRjMTBhM2YtYjViZWE1NDAtZjZiZmQyYTM=, TxId: 2026-01-07T17:53:18.714754Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5706:4987], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:53:18.714884Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5706:4987], Start read next stream part 2026-01-07T17:53:18.827642Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5729:5006]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:18.827928Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:53:18.827984Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5729:5006], StatRequests.size() = 1 2026-01-07T17:54:07.971811Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsexbt0r8tgbvf5xm2rq4p", SessionId: ydb://session/3?node_id=2&id=MmEyZWVmYjMtYWYyYjBlYmQtMWQ5YTE0NmYtZGQ3NDM2OTI=, Slow query, duration: 49.253375s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:54:07.974201Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5706:4987], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:54:07.974311Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5706:4987], Start read next stream part 2026-01-07T17:54:07.974708Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 37160, txId: 18446744073709551615] shutting down 2026-01-07T17:54:07.975275Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5884:5145], ActorId: [2:5887:5147], Starting query actor #1 [2:5888:5148] 2026-01-07T17:54:07.975333Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5887:5147], ActorId: [2:5888:5148], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:54:07.978142Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5706:4987], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:54:07.978206Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5373:2463], ActorId: [2:5706:4987], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjhlYTg5MjktNDM5M2RjMmQtMmZkN2QxN2UtYTY2ODU2MWY=, TxId: 2026-01-07T17:54:07.980660Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5887:5147], ActorId: [2:5888:5148], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDU4MjMwOWItODQ3YWUwNy1iZDQzMGE3ZC0xYmJlNWQyZQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:54:08.088956Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5901:5160]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:54:08.089338Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:54:08.089391Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5901:5160], StatRequests.size() = 1 2026-01-07T17:54:08.263315Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5887:5147], ActorId: [2:5888:5148], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDU4MjMwOWItODQ3YWUwNy1iZDQzMGE3ZC0xYmJlNWQyZQ==, TxId: 2026-01-07T17:54:08.263415Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5887:5147], ActorId: [2:5888:5148], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDU4MjMwOWItODQ3YWUwNy1iZDQzMGE3ZC0xYmJlNWQyZQ==, TxId: 2026-01-07T17:54:08.263962Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5884:5145], ActorId: [2:5887:5147], Got response [2:5888:5148] SUCCESS 2026-01-07T17:54:08.264376Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:54:08.313024Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:54:08.313095Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=D%ǀ, ActorId=[1:3678:3499] 2026-01-07T17:54:08.314123Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:5918:4151]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-07T17:54:08.314362Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:54:08.314409Z node 1 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:54:08.314605Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:54:08.314644Z node 1 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:54:08.314702Z node 1 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T17:54:08.329460Z node 1 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table[Value]=100' |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpParams::MissingParameter >> KqpQuery::QueryCache >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService-UseSink >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::PureExpr >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryMulti >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpStats::OneShardLocalExec-UseSink >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpStats::StatsProfile [FAIL] >> KqpStats::StreamLookupStats+StreamLookupJoin >> KqpQuery::Now [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> KqpLimits::BigParameter [GOOD] >> KqpLimits::AffectedShardsLimit >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink >> KqpParams::MissingParameter [GOOD] >> KqpParams::MissingOptionalParameter+UseSink >> ColumnStatistics::EqWidthHistogramSmallParamTypes [GOOD] >> KqpExplain::UpdateSecondaryConditional+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary+UseSink >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn >> KqpQuery::QueryCache [GOOD] >> KqpStats::JoinNoStatsScan [GOOD] >> KqpQuery::QueryCacheTtl >> KqpStats::DeferredEffects+UseSink >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal >> KqpBatchUpdate::Large_3 [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::EqWidthHistogramSmallParamTypes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:48.687467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:48.757256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:48.762470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:48.762751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:48.762843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:49.050099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:49.137264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:49.137366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:49.171298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.245867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:49.839444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:49.840369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:49.840400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:49.840421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:49.840745Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:49.902319Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:50.421100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:52.804072Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:52.809339Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:52.812085Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:52.834179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:52.834259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:52.871058Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:52.872224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.024169Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:53.024248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:53.025304Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.025864Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.026403Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.027325Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.027849Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.028035Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.028199Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.028410Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.028628Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:53.042906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:53.208102Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:53.243162Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:53.243226Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:53.268010Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:53.269070Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:53.269266Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:53.269324Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:53.269368Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:53.269400Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:53.269456Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:53.269493Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:53.270054Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:53.281099Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:53.281174Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:53.327061Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2633] 2026-01-07T17:51:53.327816Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2633], schemeshard id = 72075186224037897 2026-01-07T17:51:53.372324Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2248:2661] 2026-01-07T17:51:53.374053Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:53.384255Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. Describe result: PathErrorUnknown 2026-01-07T17:51:53.384303Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. Creating table 2026-01-07T17:51:53.384367Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:53.390995Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2315:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:53.393808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:53.399156Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:53.399247Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:53.408611Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:53.576129Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:53.595799Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:53.883196Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:54.021595Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:54.021660Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2255:2667] Owner: [2:2254:2666]. Column diff is empty, finishing 2026-01-07T17:51:54.772198Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:23.903350Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4633:4118]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:52:23.903555Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:52:23.903615Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4635:4120] 2026-01-07T17:52:23.903668Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4635:4120] 2026-01-07T17:52:23.903907Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4636:4121] 2026-01-07T17:52:23.904001Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4635:4120], server id = [2:4636:4121], tablet id = 72075186224037894, status = OK 2026-01-07T17:52:23.904046Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4636:4121], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:52:23.904107Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:52:23.904206Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:52:23.904276Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4633:4118], StatRequests.size() = 1 2026-01-07T17:52:23.904349Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:53:18.707161Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsd77z3wt9w6tnw701c0zg", SessionId: ydb://session/3?node_id=2&id=NWYwOWJkNjUtNWNjYTA4ZDctZWY4ZjU2YS00M2Q0NGU0Ng==, Slow query, duration: 55.404148s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value),min(Value),max(Value) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-07T17:53:18.708626Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4505:4043], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:53:18.708870Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4505:4043], Start read next stream part 2026-01-07T17:53:18.709465Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32120, txId: 18446744073709551615] shutting down 2026-01-07T17:53:18.709701Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4773:4245], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:53:18.712701Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4505:4043], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:53:18.712784Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4505:4043], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjgyNTZmZjAtYTI2MTZiNjItYmIwY2Y1OTItOWYyNWNlMGE=, TxId: 2026-01-07T17:53:18.714928Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4773:4245], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)),AGGREGATE_BY(Value,$f1(2,-1l,1l)) FROM `/Root/Database/Table1` 2026-01-07T17:53:18.715043Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4773:4245], Start read next stream part 2026-01-07T17:53:18.855521Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4797:4265]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:18.855810Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:53:18.855871Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4797:4265], StatRequests.size() = 1 2026-01-07T17:54:17.200162Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsexbt4nyw8sv0j6bektzx", SessionId: ydb://session/3?node_id=2&id=YzVkNDM1M2YtOGQ0MzQxYTctZjQ1NmQzODItM2Y4ZWYwNjU=, Slow query, duration: 58.481489s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)),AGGREGATE_BY(Value,$f1(2,-1l,1l)) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-07T17:54:17.202254Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4773:4245], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:54:17.202392Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4773:4245], Start read next stream part 2026-01-07T17:54:17.202726Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32180, txId: 18446744073709551615] shutting down 2026-01-07T17:54:17.203071Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4908:4367], ActorId: [2:4910:4368], Starting query actor #1 [2:4911:4369] 2026-01-07T17:54:17.203122Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4910:4368], ActorId: [2:4911:4369], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:54:17.205697Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4773:4245], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:54:17.205762Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4495:2463], ActorId: [2:4773:4245], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjFmYWVkNmEtZDY3ZWYxMzItNWVjYzE4MDgtNGUzYWEyYjk=, TxId: 2026-01-07T17:54:17.207176Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4910:4368], ActorId: [2:4911:4369], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjdkYmMxYTYtZDE0NmE3N2ItZWY2OWJkYi01OGYyMGY2Nw==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:54:17.282540Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4927:4383]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:54:17.282853Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:54:17.282900Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4927:4383], StatRequests.size() = 1 2026-01-07T17:54:17.454778Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4910:4368], ActorId: [2:4911:4369], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjdkYmMxYTYtZDE0NmE3N2ItZWY2OWJkYi01OGYyMGY2Nw==, TxId: 2026-01-07T17:54:17.454862Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4910:4368], ActorId: [2:4911:4369], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjdkYmMxYTYtZDE0NmE3N2ItZWY2OWJkYi01OGYyMGY2Nw==, TxId: 2026-01-07T17:54:17.455297Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4908:4367], ActorId: [2:4910:4368], Got response [2:4911:4369] SUCCESS 2026-01-07T17:54:17.456080Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:54:17.528766Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:54:17.528852Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3367:3467] 2026-01-07T17:54:17.530615Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4944:4393]], StatType[ 3 ], StatRequestsCount[ 1 ] 2026-01-07T17:54:17.530999Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:54:17.531059Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T17:54:17.531330Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T17:54:17.531388Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T17:54:17.531443Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 2 ] 2026-01-07T17:54:17.536720Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink >> KqpQueryDiscard::DiscardSelectSupport [GOOD] >> KqpQueryDiscard::DiscardSelectEnsureExecuted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 3159, MsgBus: 10188 2026-01-07T17:53:26.997187Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679290182593972:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.997246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.062797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:27.449811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.537616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.537704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.543759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.587675Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.635602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.741623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.741643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.741646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.741703Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.013112Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.505787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.514257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.574724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.749100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.913529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:29.008047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.436553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679307362464989:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.436677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.437007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679307362464999:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.437058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.777445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.816788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.847782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.880614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.915239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.959854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.001427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.086305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.161864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679311657433171:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.161953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.162080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679311657433177:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.162118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.162132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679311657433176:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.165581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.175153Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679311657433180:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:31.232512Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679311657433231:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:31.997520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679290182593972:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:32.001987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.936161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itsel ... ed 2026-01-07T17:53:53.261001Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679408085651920:2081] 1767808433158997 != 1767808433159000 2026-01-07T17:53:53.303155Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:53.303259Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:53.307809Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:53.364440Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:53.364467Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:53.364475Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:53.364558Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:53.451891Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:53.794742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:53.804670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:53.861466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:54.003446Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:54.069179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:54.189837Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:56.589884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679420970555683:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.589981Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.590468Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679420970555693:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.590518Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.590696Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679420970555695:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.590741Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.659564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:56.688574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:56.714878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:56.740857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:56.771006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:56.815604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:56.856497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:56.911732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:56.999473Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679420970556564:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.999568Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.999906Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679420970556570:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.999917Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679420970556569:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:56.999978Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:57.004052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:57.017384Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679420970556573:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:57.109291Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679425265523920:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:58.162278Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679408085652040:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:58.164790Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:58.661937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:08.243522Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:54:08.243553Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpQuery::RewriteIfPresentToMap >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRangesFullScan >> KqpStats::DataQueryMulti [GOOD] >> KqpStats::CreateTableAsStats+IsOlap >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::SqlIn >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery >> KqpParams::MissingOptionalParameter+UseSink [GOOD] >> KqpParams::MissingOptionalParameter-UseSink >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::CurrentUtcTimestamp >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink >> KqpLimits::TooBigQuery+useSink >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate >> KqpLimits::AffectedShardsLimit [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryReplySize >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> KqpScripting::StreamOperationTimeout [GOOD] >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap >> KqpExplain::UpdateOnSecondary+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 28177, MsgBus: 64191 2026-01-07T17:53:07.757953Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679210525241950:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:07.774789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:07.993617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:08.000563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:08.000650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:08.014457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:08.077881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679210525241922:2081] 1767808387755428 != 1767808387755431 2026-01-07T17:53:08.083117Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:08.228733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:08.228760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:08.228768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:08.228887Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:08.282052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:08.688479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:08.763635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.787188Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:08.939458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.121357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:09.194442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.103020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679227705112989:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.103124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.103459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679227705112999:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.103504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.511350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.539846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.568130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.593952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.622766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.652795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.684179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.729116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:11.803461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679227705113869:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.803538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.803739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679227705113874:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.803763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679227705113875:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.803798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:11.807260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:11.817816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679227705113878:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:11.890623Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679227705113931:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:12.757577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679210525241950:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:12.757650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:13.526304Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808393561, txId: 281474976710674] shutting down 2026-01-07T17:53:13.807850Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [s ... ot: [step: 1767808458584, txId: 281474976711271] shutting down Trying to start YDB, gRPC: 15570, MsgBus: 2740 2026-01-07T17:54:19.722003Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679517321640020:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:19.722450Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:19.747752Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:19.828745Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:19.862376Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:19.862481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:19.889024Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:19.939760Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:19.939789Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:19.939807Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:19.939914Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:19.960753Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:20.470940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:20.482105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:20.487845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:20.563272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:20.730108Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:20.854640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:20.940353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:23.704938Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679534501511027:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:23.705063Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:23.705326Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679534501511036:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:23.705382Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:23.776795Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:23.813269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:23.848976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:23.881430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:23.919784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:23.982906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:24.018911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:24.070674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:24.181825Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679538796479203:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:24.181925Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:24.182211Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679538796479208:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:24.182250Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679538796479209:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:24.182499Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:24.187122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:24.201640Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679538796479212:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:24.256958Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679538796479263:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:24.722967Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679517321640020:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:24.723035Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:26.403937Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 1ms, session id ydb://session/3?node_id=2&id=NjZiNGU3MC1lNDM0Mzc0Yi03ZjdiYTczYS1lOTg1ZThlNA== } |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpQueryDiscard::DiscardSelectEnsureExecuted [GOOD] >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RowsLimit >> KqpExplain::ReadTableRangesFullScan [GOOD] >> KqpExplain::ReadTableRanges >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::EmptyListForListParameterExecuteDataQuery >> KqpQuery::UpdateWhereInSubquery [GOOD] >> KqpQuery::UpdateThenDelete-UseSink >> KqpParams::MissingOptionalParameter-UseSink [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> KqpTypes::UnsafeTimestampCastV0 >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::DdlInDataQuery >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> TNodeBrokerTest::TestRandomActions [GOOD] >> KqpStats::CreateTableAsStats+IsOlap [GOOD] >> KqpStats::CreateTableAsStats-IsOlap >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::QueryExecTimeoutCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2026-01-07T17:52:42.170451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:42.170534Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-07T17:52:42.356762Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-07T17:52:42.369891Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-07T17:52:42.398812Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-07T17:52:42.428985Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:52:42.617359Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-07T17:52:43.597843Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-07T17:52:43.610744Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-07T17:52:43.637264Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-07T17:52:44.201559Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-07T17:52:44.215376Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2026-01-07T17:52:44.216508Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:44.242736Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:44.720789Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:52:44.746740Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:52:44.748178Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-07T17:52:44.748482Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:52:44.786347Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2026-01-07T17:52:44.790575Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:52:44.791160Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2026-01-07T17:52:44.791650Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-07T17:52:45.256056Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-07T17:52:45.257033Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-07T17:52:45.257364Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-07T17:52:45.284586Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.070405Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.084449Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.084807Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.085057Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.085381Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.098170Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.492833Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.571268Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.577043Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.578170Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:52:46.649200Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2026-01-07T17:52:46.748483Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-07T17:52:46.749386Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-07T17:52:46.749927Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2026-01-07T17:52:46.784752Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-07T17:52:46.789923Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-07T17:52:46.891193Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-07T17:52:47.652568Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-07T17:52:47.652969Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.011974Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.012736Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.013258Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.028701Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.029437Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.029893Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.030874Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.031393Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.032000Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.032905Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.033466Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.033893Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.034480Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.034955Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.035531Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.035967Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.036528Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.037670Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.400597Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.780634Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.793664Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.843609Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.857264Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.857589Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.872488Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.873772Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.875170Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.875811Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-07T17:52:48.901455Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.902137Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.902442Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.903006Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-07T17:52:48.903378Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-07T17:52:48.9359 ... RONG_REQUEST: Node has expired 2026-01-07T17:54:23.453993Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:54:23.459939Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:54:23.466086Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:54:23.467982Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:54:23.469976Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-07T17:54:23.471924Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:54:23.473671Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-07T17:54:23.483280Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-07T17:54:23.485619Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:54:23.542083Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-07T17:54:24.418019Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-07T17:54:24.497978Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2026-01-07T17:54:24.500181Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2026-01-07T17:54:25.555871Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-07T17:54:25.610277Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-07T17:54:25.612647Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.238220Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.257976Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.260442Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.279824Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.284910Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.325631Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.437689Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.456639Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.459171Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-07T17:54:26.474340Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-07T17:54:27.125535Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-07T17:54:27.179855Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-07T17:54:27.182472Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-07T17:54:27.267858Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2026-01-07T17:54:27.313569Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-07T17:54:27.600974Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-07T17:54:29.548896Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-07T17:54:30.045263Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-07T17:54:30.827276Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:54:30.967293Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-07T17:54:31.777152Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-07T17:54:31.822491Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.724517Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:54:32.727152Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-07T17:54:32.768054Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-07T17:54:32.771139Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.773196Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-07T17:54:32.779304Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.868463Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.877335Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.879491Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-07T17:54:32.881891Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.884026Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.890007Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.892100Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2026-01-07T17:54:32.925148Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.930376Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-07T17:54:32.933009Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.940691Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.949354Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-07T17:54:32.960129Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.962068Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2026-01-07T17:54:32.967907Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-07T17:54:32.970179Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.972367Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:32.974253Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-07T17:54:33.055215Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-07T17:54:33.062152Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:33.064763Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-07T17:54:33.066272Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-07T17:54:33.067678Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-07T17:54:33.634733Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.689199Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.711304Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.814079Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.865346Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.868532Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.892631Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.907883Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.942201Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.988963Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-07T17:54:33.996633Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] Test command err: 2026-01-07T17:52:33.568998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.569059Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 22543, MsgBus: 11834 2026-01-07T17:53:14.793035Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679238268072203:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:14.793242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:15.023158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:15.042741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:15.042926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:15.077580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:15.091776Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:15.095428Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679238268071972:2081] 1767808394777786 != 1767808394777789 2026-01-07T17:53:15.209495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:15.209518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:15.209530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:15.209611Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:15.303704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:15.637184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:15.683767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:15.782055Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:15.809788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:15.965020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:16.041254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:17.855910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679251152975738:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.855995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.856214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679251152975748:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:17.856247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.254010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.281003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.307920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.334780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.361676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.393581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.428242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.468331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:18.525786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255447943913:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.525839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.525908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255447943918:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.525998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679255447943920:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.526076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:18.529057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:18.538023Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679255447943922:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:18.602082Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679255447943973:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:19.785210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679238268072203:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:19.785273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:20.288421Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808400323, txId: 281474976715674] shutting down 2026-01-07T17:53:20.544766Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [s ... N: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808465367, txId: 281474976716268] shutting down 2026-01-07T17:54:25.725823Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808465752, txId: 281474976716271] shutting down Trying to start YDB, gRPC: 3470, MsgBus: 30287 2026-01-07T17:54:26.928709Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679550121306974:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:26.929780Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:26.962736Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:27.047705Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:27.069932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:27.070057Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:27.090349Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:27.161021Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:27.171248Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:27.171274Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:27.171289Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:27.171394Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:27.618231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:27.632511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:27.693340Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:27.845026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:27.974193Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:28.005732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:30.829773Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679567301177960:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:30.829855Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:30.830273Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679567301177970:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:30.830322Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:30.901634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:30.944908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.019893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.056058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.090965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.131290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.169365Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.218992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.306709Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679571596146139:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:31.306807Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:31.307122Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679571596146144:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:31.307170Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679571596146145:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:31.307211Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:31.311231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:31.328068Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679571596146148:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:31.404280Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679571596146199:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:31.930196Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679550121306974:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:31.930257Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:34.711765Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808474733, txId: 281474976715673] shutting down |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] |96.1%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::InvalidJson >> KqpExplain::UpdateOnSecondary-UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink >> KqpExplain::LimitOffset >> KqpParams::EmptyListForListParameterExecuteDataQuery [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::UpdateThenDelete-UseSink [GOOD] >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] >> KqpAnalyze::AnalyzeTable+ColumnStore >> KqpStats::JoinNoStatsYql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 12914, MsgBus: 31831 2026-01-07T17:54:10.094220Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679481079761514:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.094273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.538218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.538334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.540103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.653687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.718911Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.745069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.942330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.942355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.942361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.942432Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.107154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.953432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.962092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:13.662500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679493964664248:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.662643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.663185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679493964664258:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.663261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.129762Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679498259631570:2508] txid# 281474976710658, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0PK\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } 2026-01-07T17:54:14.167284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679498259631578:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.167371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.167769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679498259631581:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.167867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.185860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.325079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679498259631676:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.325153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.326881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679498259631679:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.326949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.362933Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679498259631690:2579] txid# 281474976710660, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } Trying to start YDB, gRPC: 18935, MsgBus: 23073 2026-01-07T17:54:15.182615Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679501570506228:2136];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.182667Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:15.203699Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:15.321120Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:15.333082Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679501570506124:2081] 1767808455176505 != 1767808455176508 2026-01-07T17:54:15.347080Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:15.355501Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:15.355585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:15.361299Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:15.419294Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:15.419317Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:15.419323Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:15.419406Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:15.589154Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:15.718348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:15.723668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:54:16.204438Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:18.249421Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679514455408894:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:18.249526Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:18.249847Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679514455408904:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:18.249935Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: E ... } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.531911Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.532314Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037973;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.532761Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.533204Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.534023Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.534126Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.534933Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.534994Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037957;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.535791Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.535824Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.536694Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.537009Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.537675Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.537991Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.538535Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.538888Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.539392Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.540238Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.540282Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037995;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.541489Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-07T17:54:36.542067Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] Test command err: 2026-01-07T17:52:33.569406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:33.569492Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DdlInDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 14634, MsgBus: 62133 2026-01-07T17:54:10.112808Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479783773960:2237];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.113109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.446521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.483952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.484117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.498488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.619419Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.640521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.945576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.945618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.945627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.945697Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.113447Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.756315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:13.534356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492668676519:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.534491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.534937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492668676528:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.534992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.537587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492668676533:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.548011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:13.560315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679492668676535:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:54:13.670787Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679492668676586:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:14.136308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.605938Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZDEzOGY2YTMtNGY4ZjgwM2QtYzAxYzk5YjgtOTY5ZTQ0Yg==, ActorId: [1:7592679496963643995:2341], ActorState: ExecuteState, LegacyTraceId: 01kecsgkkd4xe7rngpngspx3ak, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Decimal value for precision:  status# BAD_REQUEST issues# trace_id# Trying to start YDB, gRPC: 18546, MsgBus: 65058 2026-01-07T17:54:15.615898Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679501249315541:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.616797Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:15.637030Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:15.722223Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679501249315397:2081] 1767808455609499 != 1767808455609502 2026-01-07T17:54:15.735785Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:15.752901Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:15.753133Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:15.757974Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:15.830888Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:15.830910Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:15.830914Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:15.830982Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:15.914989Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:16.237187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:16.621995Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:18.992554Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679514134218169:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:18.992559Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679514134218180:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:18.992630Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:18.995751Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:18.997142Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679514134218186:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:18.997249Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:19.007672Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679514134218185:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:54:19.064477Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679518429185534:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:19.094281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:19. ... at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.912207Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.955172Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:33.021436Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.827910Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679586660685036:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.828036Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.832430Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679586660685052:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.832658Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.833531Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679586660685120:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.833597Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.858942Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.892697Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.932384Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.968948Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:36.009470Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:36.047807Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:36.090202Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:36.143273Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:36.229018Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679590955653215:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:36.229103Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:36.229331Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679590955653220:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:36.229384Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679590955653221:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:36.229420Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:36.232430Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:36.242071Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679590955653224:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:36.305741Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679590955653275:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:36.938048Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679569480814006:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:36.938134Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:38.038705Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7592679599545588186:2539], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2026-01-07T17:54:38.039140Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=ZThhZTA5OTctZjU1ZjE0ZmYtMjQ5NjFkYTUtY2M0ZmEyZDM=, ActorId: [5:7592679599545588178:2534], ActorState: ExecuteState, LegacyTraceId: 01kecshat08kjpt7c9m2nxdyfy, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 5 column: 30 } message: "Operation \'CreateTable\' can\'t be performed in data query" end_position { row: 5 column: 30 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2026-01-07T17:54:38.068270Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7592679599545588199:2542], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2026-01-07T17:54:38.068581Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=ZThhZTA5OTctZjU1ZjE0ZmYtMjQ5NjFkYTUtY2M0ZmEyZDM=, ActorId: [5:7592679599545588178:2534], ActorState: ExecuteState, LegacyTraceId: 01kecshav228b6f29s4sefckbe, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 24 } message: "Operation \'DropTable\' can\'t be performed in data query" end_position { row: 2 column: 24 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2026-01-07T17:54:38.103673Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7592679599545588208:2546], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2026-01-07T17:54:38.104211Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=ZThhZTA5OTctZjU1ZjE0ZmYtMjQ5NjFkYTUtY2M0ZmEyZDM=, ActorId: [5:7592679599545588178:2534], ActorState: ExecuteState, LegacyTraceId: 01kecshaw3f3fw2ypd7hsd5cc7, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 54 } message: "Operation \'AlterTable\' can\'t be performed in data query" end_position { row: 2 column: 54 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13001, MsgBus: 22554 2026-01-07T17:54:10.093514Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479377988885:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.093602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.439162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.485777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.485915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.562016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.679393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.704260Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.943889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.943917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.943922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.944015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.102003Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.707699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.726012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.794160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.017588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.213510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.293474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.578840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492262892609:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.578965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.579235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492262892619:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.579308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.154858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.182591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.228668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.272534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.310601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.359254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.419430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.510104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496557860787:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.510174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.510425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496557860792:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.510483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496557860793:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.510533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.514059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.525684Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679496557860796:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.615967Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679496557860849:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.093279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679479377988885:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.093330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:16.783918Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679505147795764:2543], status: GENERIC_ERROR, issues:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRI ... epts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:29.131869Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679540759327547:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:29.131939Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19541, MsgBus: 15495 2026-01-07T17:54:31.357798Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592679572758961591:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:31.358117Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:31.375659Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:31.471486Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:31.471572Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:31.471922Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:31.485252Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:31.558624Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:31.609514Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:31.609537Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:31.609544Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:31.609624Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:32.032078Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:32.051468Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.129359Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.281593Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.342365Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.367204Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:34.905231Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679585643865242:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.905332Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.905565Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679585643865251:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.905612Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.994662Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.038313Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.070399Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.106380Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.142560Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.219638Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.257285Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.317037Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.403824Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679589938833416:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.403911Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.404040Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679589938833421:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.404087Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679589938833422:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.404115Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.408271Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:35.417949Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679589938833425:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:35.483880Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679589938833476:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:36.357208Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679572758961591:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:36.357312Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpQuery::MixedCreateAsSelect >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpExplain::UpdateConditional+UseSink >> KqpStats::StreamLookupStats+StreamLookupJoin [FAIL] >> KqpStats::StreamLookupStats-StreamLookupJoin >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard [GOOD] >> KqpQueryDiscard::DiscardSelectMultiLine >> KqpStats::CreateTableAsStats-IsOlap [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> KqpLimits::DatashardReplySize [GOOD] >> KqpLimits::ManyPartitions >> KqpStats::RequestUnitForExecute [FAIL] >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> KqpQuery::QuerySkipHasNoColumns [GOOD] >> KqpQuery::QueryResultsTruncated >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::CreateTableAsStats-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 13263, MsgBus: 21045 2026-01-07T17:54:10.103499Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679478543448911:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.103573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.502102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.564116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.564234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.648693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.687822Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.719432Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.943160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.943193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.943220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.943322Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.101766Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.801234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.809770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.884121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.074720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.326561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.417180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.875190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491428352431:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.875328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.880119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491428352441:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.880229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.159905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.212388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.246149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.281220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.313259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.350511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.413453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.455767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.533067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495723320607:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.533178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.533431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495723320613:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.533463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495723320612:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.533510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.537064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.547679Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679495723320616:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.636538Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679495723320667:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.104639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679478543448911:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.104701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14918, MsgBus: 1497 2026-01-07T17:54:17.332558Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679509706006229:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:17.332603Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cp ... "Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":647,"Max":647,"Min":647,"History":[43,647]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":43,"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1}}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":12079,"CpuTimeUs":7157},"ProcessCpuTimeUs":1077,"TotalDurationUs":356879,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","PlanNodeType":"Query"}} 2026-01-07T17:54:40.556824Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:40.565724Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) query_phases { duration_us: 48464 table_access { name: "/Root/.tmp/sessions/21360cff-4db7-7f62-080c-95ade7333cca/Root/Destination_d2558608-4b64-99ef-3e92-6e9f779e67f0" updates { rows: 2 bytes: 24 } partitions_count: 1 } table_access { name: "/Root/Source" reads { rows: 2 bytes: 24 } partitions_count: 1 } cpu_time_us: 1270 affected_shards: 1 } compilation { duration_us: 12079 cpu_time_us: 7157 } process_cpu_time_us: 1077 query_plan: "{\"Plan\":{\"Plans\":[{\"Tables\":[\"Destination\"],\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Source\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Source\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Source\",\"ReadColumns\":[\"Col1 (-\342\210\236, +\342\210\236)\",\"Col2\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"4\",\"Push\":{}},{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[2,8]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":647,\"Max\":647,\"Min\":647,\"History\":[2,647]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"WaitOutputTimeUs\":{\"Count\":1,\"Sum\":41,\"Max\":41,\"Min\":41,\"History\":[2,41]},\"Tasks\":1,\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Source\",\"ReadRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ReadBytes\":{\"Count\":1,\"Sum\":24,\"Max\":24,\"Min\":24}}],\"BaseTimeMs\":1767808480445,\"CpuTimeUs\":{\"Count\":1,\"Sum\":728,\"Max\":728,\"Min\":728,\"History\":[2,728]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[2,64]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[2,64]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1025,\"Max\":1025,\"Min\":1025,\"History\":[2,1025]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":1}}],\"Node Type\":\"Map\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"Egress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[43,28]}},\"Name\":\"KqpTableSink\",\"Egress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[43,28]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":631,\"Max\":631,\"Min\":631,\"History\":[43,631]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/.tmp\\/sessions\\/21360cff-4db7-7f62-080c-95ade7333cca\\/Root\\/Destination_d2558608-4b64-99ef-3e92-6e9f779e67f0\"}],\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Introspections\":[\"1 tasks same as previous stage\"],\"EgressBytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"DurationUs\":{\"Count\":1,\"Sum\":41000,\"Max\":41000,\"Min\":41000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[43,1048576]},\"BaseTimeMs\":1767808480445,\"CpuTimeUs\":{\"Count\":1,\"Sum\":542,\"Max\":542,\"Min\":542,\"History\":[43,542]},\"EgressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"StageDurationUs\":41000,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[43,8]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[43,8]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":647,\"Max\":647,\"Min\":647,\"History\":[43,647]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":43,\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/Destination\",\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":12079,\"CpuTimeUs\":7157},\"ProcessCpuTimeUs\":1077,\"TotalDurationUs\":356879,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"FillTable\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Source\" \'\"72057594046644480:42\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Col1\" \'\"Col2\") \'() (Void) \'()))\n(let $3 \'(\'(\'\"_logical_id\" \'474) \'(\'\"_id\" \'\"48345233-4a2ea842-e809e4ce-f6399f05\") \'(\'\"_partition_mode\" \'\"single\") \'(\'\"_wide_channels\" (StructType \'(\'\"Col1\" (DataType \'Uint64)) \'(\'\"Col2\" (OptionalType (DataType \'Int32)))))))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($10) (block \'(\n (let $11 (lambda \'($12) (Member $12 \'\"Col1\") (Member $12 \'\"Col2\")))\n (return (FromFlow (ExpandMap (ToFlow $10) $11)))\n))) $3))\n(let $5 (DqCnMap (TDqOutput $4 \'\"0\")))\n(let $6 \'\"/Root/.tmp/sessions/21360cff-4db7-7f62-080c-95ade7333cca/Root/Destination_d2558608-4b64-99ef-3e92-6e9f779e67f0\")\n(let $7 (KqpTable $6 \'\"\" \'\"\" \'\"\"))\n(let $8 (KqpTableSinkSettings $7 \'\"true\" \'\"fill_table\" \'\"0\" \'\"true\" \'\"false\" \'\"false\" \'(\'(\'\"OriginalPath\" \'\"/Root/Destination\"))))\n(let $9 (DqPhyStage \'($5) (lambda \'($13) (FromFlow (NarrowMap (ToFlow $13) (lambda \'($14 $15) (AsStruct \'(\'\"Col1\" $14) \'(\'\"Col2\" $15)))))) \'(\'(\'\"_logical_id\" \'539) \'(\'\"_id\" \'\"e7530ea0-1cbeeb57-eea657ea-3597cb99\")) \'((DqSink \'\"0\" (DataSink \'\"KqpTableSink\" \'\"db\") $8))))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($4 $9) \'() \'() \'(\'(\'\"type\" \'\"generic\") \'(\'\"with_effects\")))) \'() \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 356879 total_cpu_time_us: 9504 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/.tmp/sessions/21360cff-4db7-7f62-080c-95ade7333cca/Root/Destination_d2558608-4b64-99ef-3e92-6e9f779e67f0\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":47},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Col1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":true,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Col2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Col1\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1767808480\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"c3b8faaf-2b85e4a0-7c096bf9-28dd8636\",\"version\":\"1.0\"}" 2026-01-07T17:54:40.860595Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679585766837132:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:40.860675Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 29513, MsgBus: 25873 2026-01-07T17:54:10.092861Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479848843946:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.092937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.383126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.448798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.448913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.588886Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.593205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.595248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.941944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.943857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.943894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.943902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.943996Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.110295Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.773560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.793583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:13.608014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492733746691:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.608024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492733746683:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.608130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.608358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492733746698:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.608442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.611185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:13.620267Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679492733746697:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:54:13.754584Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679492733746750:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:14.137852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.840283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:15.092717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679479848843946:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.092782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:15.169034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:15.177138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 29299, MsgBus: 17384 2026-01-07T17:54:16.083615Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679508288179378:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:16.083669Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:16.117078Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:16.193502Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:16.193579Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:16.199656Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679508288179350:2081] 1767808456082407 != 1767808456082410 2026-01-07T17:54:16.208724Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:16.209844Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:16.292364Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:16.292389Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:16.292397Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:16.292472Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:16.329446Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:16.680270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:16.685585Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:54:17.094142Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:19.294884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679521173082125:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:19.294969Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:19.295174Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679521173082134:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:19.295219Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:19.298880Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TP ... node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037889 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715504Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037925 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715545Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037952 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715565Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037906 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715601Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037904 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715627Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037902 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715647Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037898 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715674Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037900 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715677Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715718Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037892 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.715721Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037896 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 2026-01-07T17:54:35.721383Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:35.731102Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 27577, MsgBus: 2037 2026-01-07T17:54:37.298233Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592679596713758243:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:37.300247Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:37.337686Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:37.449850Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:37.456963Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:37.457054Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:37.485634Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:37.528680Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:37.551405Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:37.551433Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:37.551442Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:37.551532Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:38.083881Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:38.101684Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:54:38.118118Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:54:38.196027Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:54:38.313946Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:41.273041Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679613893628326:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:41.273156Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:41.273548Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679613893628335:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:41.273648Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:41.273998Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679613893628341:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:41.278979Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:41.288807Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679613893628343:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-07T17:54:41.376025Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679613893628397:2605] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:41.415469Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:41.685213Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:41.938997Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:41.945130Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:41.955785Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:42.297760Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679596713758243:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:42.297848Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MultiUsedStage >> KqpQuery::QueryClientTimeout >> KqpExplain::Predicates [GOOD] >> KqpParams::InvalidJson [GOOD] >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::Pure >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> KqpExplain::ExplainStream >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs [GOOD] >> KqpQuery::CreateAsSelect_DisableDataShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [FAIL] Test command err: Trying to start YDB, gRPC: 12997, MsgBus: 12513 2026-01-07T17:54:10.112611Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679478409286176:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.112844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.521037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.552039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.552150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.644670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.740957Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.809015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.945658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.945694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.945698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.945758Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.116013Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.717966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.734970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.799790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.018258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.282925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.389780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.989164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491294189760:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.989300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.989668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491294189770:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.989720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.326656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.375429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.414975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.487739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.519234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.548802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.578639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.620860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.698317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495589157939:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.698386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.698456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495589157944:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.698600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495589157946:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.698638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.702747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.720974Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679495589157947:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:14.784428Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679495589157999:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.106841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679478409286176:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.106905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:16.435693Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679504179092897:2538], status: GENERIC_ERROR, issues:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT ... 728Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:30.617503Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:30.617526Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:30.617533Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:30.617618Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:30.626519Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:31.151252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:31.176142Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.245861Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.420732Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:31.472628Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:31.553285Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.032611Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679585156960284:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.032702Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.033216Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679585156960294:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.033285Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.103570Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.140592Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.177679Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.209645Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.265553Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.340665Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.378246Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.424795Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.496963Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679585156961168:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.497041Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.497079Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679585156961173:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.497344Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679585156961175:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.497423Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.500395Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:34.513490Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679585156961176:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:34.601714Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679585156961228:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:35.414747Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679567977089243:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:35.414834Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 263 Consumed units: 1 assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:383, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseRequestUnitForExecute::Execute_(NUnitTest::TTestContext &): (result.GetConsumedRu() > 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1BFA2FAB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1C49BE8B 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:383: Execute_ @ 0x1BB09C60 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: operator() @ 0x1BB29307 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1BB29307 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1BB29307 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1C4D4AF9 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1C4D4AF9 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1C4D4AF9 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1C4A2B67 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: Execute @ 0x1BB285FE 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1C4A431F 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1C4CE95C 15. ??:0: ?? @ 0x7F92CA9DBD8F 16. ??:0: ?? @ 0x7F92CA9DBE3F 17. ??:0: ?? @ 0x18BC5028 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: Trying to start YDB, gRPC: 29680, MsgBus: 1468 2026-01-07T17:54:10.092195Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479112157777:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.092452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.445223Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.462832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.462959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.529787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.684392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.697000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.945540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.945562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.945569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.945663Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.111600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.773517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.839421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.017500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.232608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.325310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.741052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491997061500:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.741174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.741429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491997061510:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.741476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.156602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.190496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.220858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.258805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.292708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.353956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.400818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.474934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496292029679:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.475009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.475070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496292029684:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.475120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496292029686:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.475162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.478386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.487716Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679496292029688:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.555742Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679496292029739:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.092389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679479112157777:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.092443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connectio ... 600513523991:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:38.038853Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:38.060156Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:38.164365Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:38.191636Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:38.191743Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:38.193269Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592679600513523963:2081] 1767808478038048 != 1767808478038051 2026-01-07T17:54:38.212435Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:38.297589Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:38.297618Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:38.297627Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:38.297715Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:38.331276Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:38.888596Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:38.898302Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:38.915657Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:38.992303Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:39.123316Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:39.185762Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:39.281613Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.060241Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679617693395015:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.060322Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.060717Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679617693395024:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.060783Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.140861Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.175707Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.207540Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.243187Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.282605Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.367464Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.407410Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.466576Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.560301Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679617693395893:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.560434Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.560990Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679617693395898:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.561055Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679617693395899:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.561352Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.565845Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:42.577654Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679617693395902:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:42.673629Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679617693395953:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:43.040859Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679600513523991:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:43.041083Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:44.603254Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 17995, MsgBus: 26057 2026-01-07T17:54:12.541235Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679490333918319:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:12.541342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:12.576507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:12.864735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:12.864872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:12.883057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:12.937666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:12.958208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:12.965032Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679490333918181:2081] 1767808452513664 != 1767808452513667 2026-01-07T17:54:13.057183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:13.057212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:13.057222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:13.057322Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:13.153025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:13.450034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:13.519568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.569536Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:13.660211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.800847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.856065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:15.564256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679503218821962:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:15.564386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:15.564811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679503218821972:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:15.564858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:15.854747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:15.884226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:15.915055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:15.951120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:15.986684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:16.021768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:16.056188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:16.121701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:16.213999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679507513790143:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:16.214088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679507513790148:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:16.214100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:16.214312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679507513790150:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:16.214387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:16.217761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:16.228250Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679507513790151:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:16.298123Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679507513790203:3781] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:17.542332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679490333918319:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:17.542407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:17.713408Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id= ... ,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:38.928375Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:38.928605Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592679602275777487:2081] 1767808478764757 != 1767808478764760 2026-01-07T17:54:38.968130Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:38.972294Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:39.057380Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:39.057406Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:39.057415Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:39.057506Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:39.527531Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:39.602960Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:39.610639Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:39.627611Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:39.704198Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:39.820901Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:39.863803Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:39.940069Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.922325Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679619455648542:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.922441Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.922977Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679619455648552:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.923037Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.003405Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.045696Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.090198Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.139047Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.180894Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.245924Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.311124Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.371941Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.504832Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679623750616725:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.504958Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.505426Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679623750616731:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.505731Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679623750616730:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.505777Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.509732Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:43.527966Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679623750616734:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:43.624489Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679623750616785:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:43.766935Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679602275777515:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:43.767009Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:45.518961Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:45.669999Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTgyOTA4OTgtZTUyZjY1NGYtMmJhMGFjYzgtYmUyYmQwMmQ=, ActorId: [5:7592679632340551699:2533], ActorState: ExecuteState, LegacyTraceId: 01kecshj6f8zkndaw7625r216h, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Json value status# BAD_REQUEST issues# trace_id#
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Json value |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 64239, MsgBus: 21351 2026-01-07T17:54:10.099289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679480620367822:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.099826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.441698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.492100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.492201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.627917Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.638543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.707575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.945567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.945600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.945641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.945727Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.108668Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.767851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.781622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.898743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.053059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.243414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.313353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.757496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679493505271529:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.757627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.758177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679493505271539:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.758239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.160535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.193409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.227624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.301466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.352124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.400566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.462028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.540437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497800239719:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.540556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.540744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497800239725:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.540750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497800239724:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.540793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.544625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.554557Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679497800239728:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:14.638930Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679497800239779:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.099814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679480620367822:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.099871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15701, MsgBus: 27123 2026-01-07T17:54:17.363181Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:54:17.385022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137 ... p:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679570475559418:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:36.258154Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5827, MsgBus: 19186 2026-01-07T17:54:39.195313Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592679606618675303:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:39.195641Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:39.273857Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:39.331846Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:39.337146Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592679606618675183:2081] 1767808479158639 != 1767808479158642 2026-01-07T17:54:39.348406Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:39.348500Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:39.373766Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:39.447404Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:39.447430Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:39.447438Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:39.447531Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:39.453621Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:40.013186Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:40.029101Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:40.109158Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:40.196040Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:40.306318Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:40.396480Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.197079Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679623798546243:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.197202Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.197716Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679623798546253:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.197777Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.278330Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.323507Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.370730Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.419097Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.475482Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.522042Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.564095Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.615638Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.714035Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679623798547123:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.714124Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.714552Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679623798547128:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.714595Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679623798547129:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.714888Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.719575Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:43.733720Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679623798547132:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:43.808311Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679623798547183:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:44.173083Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679606618675303:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:44.173149Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::StreamLookupStats-StreamLookupJoin [FAIL] >> KqpStats::SelfJoin >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink >> KqpExplain::UpdateConditional+UseSink [GOOD] >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] Test command err: 2026-01-07T17:52:45.062809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:52:45.062874Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> KqpStats::MultiTxStatsFullExpYql |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal |96.1%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 4504, MsgBus: 29154 2026-01-07T17:54:23.123257Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679535246735115:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:23.126007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:23.147935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:23.357083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:23.359282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:23.359378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:23.390848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:23.453693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:23.454502Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679535246735089:2081] 1767808463121933 != 1767808463121936 2026-01-07T17:54:23.541981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:23.542005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:23.542018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:23.542095Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:23.620999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:23.918320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:23.923707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:23.972894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:24.128454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:24.134869Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:24.272542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:24.335616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.164566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679548131638843:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.164719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.165214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679548131638853:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.165330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.489180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.519653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.556365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.590167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.632463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.669004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.731752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.773748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.857298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679548131639727:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.857408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.857750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679548131639732:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.857812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679548131639733:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.857860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:26.861193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:26.871701Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679548131639736:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:26.946254Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679548131639787:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:28.124874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679535246735115:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:28.125071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... DATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6434, MsgBus: 12961 2026-01-07T17:54:42.348910Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592679619833939979:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:42.348993Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:42.369023Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:42.466232Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:42.471226Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592679619833939874:2081] 1767808482339508 != 1767808482339511 2026-01-07T17:54:42.494580Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:42.494664Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:42.505712Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:42.544261Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:42.586558Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:42.586582Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:42.586590Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:42.586668Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:42.995616Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:43.014952Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:43.030897Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.091969Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.232115Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.304440Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.369273Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:45.888861Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679632718843623:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.888982Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.889630Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679632718843633:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.889694Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.998933Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.036069Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.073362Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.108509Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.141226Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.179655Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.216996Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.270303Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.362683Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679637013811799:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.362809Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.362896Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679637013811804:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.363044Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679637013811806:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.363096Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.371144Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:46.391130Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679637013811807:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:46.471883Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679637013811859:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:47.348572Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679619833939979:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:47.348654Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditional+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25553, MsgBus: 17852 2026-01-07T17:54:10.620780Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479531510899:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.641244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.881911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.896233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.896352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.972059Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.976418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:11.079659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:11.097578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:11.097618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:11.097647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:11.097766Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.641936Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.773792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.781652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.831235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.017539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.211924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.308792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.969228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492416414600:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.969315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.969599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492416414610:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.969657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.222363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.251837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.283355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.315547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.348860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.380341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.416830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.464463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.535709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496711382771:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.535791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.535998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496711382776:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.536043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496711382777:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.536097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.539241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.548398Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679496711382780:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:14.605730Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679496711382831:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.623822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679479531510899:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.623887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"Eigh ... 01-07T17:54:41.963725Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:41.985726Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:42.049888Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:42.049966Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:42.049974Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:42.050071Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:42.241867Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:42.547820Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:42.555567Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:42.565825Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.646981Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.815941Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.875092Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:42.948310Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:45.983031Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679631630549750:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.983139Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.983419Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679631630549759:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.983481Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.059479Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.093742Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.138140Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.176806Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.212587Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.249107Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.340288Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.391259Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.484141Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679635925517928:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.484225Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.484320Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679635925517933:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.486599Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679635925517935:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.486691Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.488392Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:46.500600Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679635925517936:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:46.592369Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679635925517988:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:46.847634Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679614450678826:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:46.847705Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"E-Rows":"0","Inputs":[{"ExternalPlanNodeId":1}],"Predicate":"item.Data \u003E 0","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"EightShard","ReadColumns":["Key (-∞, +∞)","Data"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Upsert"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpLimits::StreamWrite+Allowed >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MergeConnection >> KqpParams::Decimal+QueryService+UseSink [GOOD] >> Secret::Validation [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> KqpLimits::ManyPartitionsSorting [GOOD] >> TSubscriberSinglePathUpdateTest::OneRingGroup >> TSubscriberSinglePathUpdateTest::TwoRingGroups >> TSubscriberTest::StrongNotificationAfterCommit >> KqpQuery::Pure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22029, MsgBus: 20476 2026-01-07T17:54:10.094201Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679480091642671:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.094240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.457086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.463814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.463937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.635859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.655451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.692792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.941592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.941624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.941630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.941696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.105602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.710093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.735186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.795066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.022322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.217170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.319962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.838181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492976546397:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.838290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.838659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492976546407:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.838722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.142123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.172257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.204614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.238525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.292835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.327060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.361215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.404201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.488293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497271514570:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.488405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.488909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497271514575:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.488993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497271514576:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.489151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.493489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.503354Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679497271514579:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.597873Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679497271514630:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.094406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679480091642671:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.094486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14594, MsgBus: 19968 2026-01-07T17:54:17.069046Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679511220246952:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:17.069091Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.c ... ble.cpp:689) 2026-01-07T17:54:47.778638Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.815658Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.851775Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.892390Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.929906Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.982080Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.079364Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679643469806904:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.079477Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.079774Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679643469806909:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.079820Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679643469806910:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.079942Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.084242Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:48.097458Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679643469806913:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:48.163090Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679643469806964:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:48.167288Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679621994967796:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:48.167346Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:49.985046Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.290174Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7592679656354709371:2582], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2026-01-07T17:54:51.292849Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=ZGEzMGI4MGEtYmI2MGQ0ODgtZjhjNmZlNzgtODZkZWViN2U=, ActorId: [5:7592679656354709369:2581], ActorState: ExecuteState, LegacyTraceId: 01kecshqr6515g8y6vaqpde98k, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 17 } message: "At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda" end_position { row: 4 column: 17 } severity: 1 issues { position { row: 3 column: 25 } message: "At function: Parameter, At function: DataType" end_position { row: 3 column: 25 } severity: 1 issues { position { row: 3 column: 25 } message: "Invalid decimal precision: 99" end_position { row: 3 column: 25 } severity: 1 } } } } tx_id# trace_id# 2026-01-07T17:54:51.458998Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=OWEzYzRlYTYtYmRmZDFmMzQtNzNiN2EzZGUtODgzNTMzNjA=, ActorId: [5:7592679656354709375:2584], ActorState: ExecuteState, LegacyTraceId: 01kecshqsa84eg4n9vkdgxdj27, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1555: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 status# BAD_REQUEST issues# trace_id# 2026-01-07T17:54:51.504178Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7592679656354709390:2590], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-07T17:54:51.504686Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=YzlmYzBlNGEtYTFhNzkxYzItNGE3Y2MyNi03NTVmZGNlZQ==, ActorId: [5:7592679656354709388:2589], ActorState: ExecuteState, LegacyTraceId: 01kecshqyn25nxarfw52tc3p98, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 7 column: 29 } message: "At function: KiWriteTable!" end_position { row: 7 column: 29 } severity: 1 issues { position { row: 7 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 7 column: 50 } severity: 1 issues { position { row: 4 column: 25 } message: "Implicit decimal cast would lose precision" end_position { row: 4 column: 25 } severity: 1 } issues { position { row: 7 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 7 column: 50 } severity: 1 } } issues { position { row: 7 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 7 column: 50 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:54:51.541949Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7592679656354709401:2595], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-07T17:54:51.542810Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=MmYyMjExODEtYWEzZDI1ZWMtMzhjMjA4MDktYjgwZTYyMw==, ActorId: [5:7592679656354709399:2594], ActorState: ExecuteState, LegacyTraceId: 01kecshqzy3s0n5tw4wzdqr3dx, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 29 } message: "At function: KiWriteTable!" end_position { row: 3 column: 29 } severity: 1 issues { position { row: 3 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 3 column: 50 } severity: 1 issues { position { column: 14 } message: "Implicit decimal cast would lose precision" end_position { column: 14 } severity: 1 } issues { position { row: 3 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 3 column: 50 } severity: 1 } } issues { position { row: 3 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 3 column: 50 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSubscriberSinglePathUpdateTest::OneRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup >> TSubscriberSinglePathUpdateTest::TwoRingGroups [GOOD] >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:52:01.313470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:52:01.475632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:52:01.509615Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:52:01.510119Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:52:01.510162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:52:01.948692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:01.948829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:02.042649Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808319354696 != 1767808319354700 2026-01-07T17:52:02.060901Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:52:02.109563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:52:02.236329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:52:03.479699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:52:03.480869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:52:03.480918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:52:03.481029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:52:03.481265Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:52:03.494518Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-07T17:52:15.496087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:953:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:15.496273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:15.496742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:963:2822], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:15.496819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2026-01-07T17:52:25.993760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:984:2839], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:25.993927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:26.007397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:989:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:26.010435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:26.025451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:26.202626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1096:2918], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:26.202818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:26.203244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1101:2923], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:26.203466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1102:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:26.203724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:52:26.213390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:52:26.344555Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1105:2927], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:52:27.060235Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1197:2992] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:52:27.763731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:52:28.136673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:28.656426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:29.330999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:52:29.773921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:52:31.106496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:52:31.404249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-07T17:52:31.463236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:52:31.463299Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded REQUEST=ALTER OBJECT secret1 (TYPE S ... ain>: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-07T17:53:07.591685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:08.511434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:10.411856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715725:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-07T17:53:10.877288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715728:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-07T17:53:36.089816Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=16; 2026-01-07T17:53:36.090196Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 16 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:53:36.090473Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 16 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-07T17:53:36.091008Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:3664:4841], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3576:4841]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3664:4841].{
: Error: Conflict with existing key., code: 2012 } 2026-01-07T17:53:36.091724Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:3658:4841], SessionActorId: [1:3576:4841], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3576:4841]. 2026-01-07T17:53:36.093675Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MTMwMGRlYmQtOTUzZjhiOTUtYmFhYmFhZWQtNTBmYTJkYQ==, ActorId: [1:3576:4841], ActorState: ExecuteState, LegacyTraceId: 01kecsfe7ebk0xnmt6qmm47ee8, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3659:4841] from: [1:3658:4841] trace_id# 2026-01-07T17:53:36.094074Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3659:4841] TxId: 281474976715755. Ctx: { TraceId: 01kecsfe7ebk0xnmt6qmm47ee8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwMGRlYmQtOTUzZjhiOTUtYmFhYmFhZWQtNTBmYTJkYQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-07T17:53:36.094575Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTMwMGRlYmQtOTUzZjhiOTUtYmFhYmFhZWQtNTBmYTJkYQ==, ActorId: [1:3576:4841], ActorState: ExecuteState, LegacyTraceId: 01kecsfe7ebk0xnmt6qmm47ee8, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-07T17:53:36.108289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01kecsfdzecd2bd7y9yxd84qv4" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MTMwMGRlYmQtOTUzZjhiOTUtYmFhYmFhZWQtNTBmYTJkYQ==" tx_control { tx_id: "01kecsfdzecd2bd7y9yxd84qv4" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-07T17:53:59.647594Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4134:5281], for# root@builtin, access# DescribeSchema 2026-01-07T17:53:59.647731Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4134:5281], for# root@builtin, access# DescribeSchema 2026-01-07T17:53:59.649456Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:4131:5278], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:53:59.651520Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YjViNzg5YzctZGRmMjY3MGUtMjVhNDA5OWYtODFjZWFjNWU=, ActorId: [1:4124:5272], ActorState: ExecuteState, LegacyTraceId: 01kecsg5ahcy5takghev049g2s, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-07T17:54:11.173271Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |96.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::Sync >> TSubscriberSyncQuorumTest::TwoRingGroups >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch [GOOD] >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DeleteWhereInSubquery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] Test command err: Trying to start YDB, gRPC: 1253, MsgBus: 28723 2026-01-07T17:54:10.092071Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479931427344:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.092134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.567474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.599788Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.648846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.648968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.661960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.826728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.942923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.942950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.942955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.943008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.105103Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.775152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.782135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.851503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.063815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.294612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.406428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.902008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492816331075:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.902171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.902807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492816331085:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.902885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.186829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.229674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.272289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.304115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.341130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.384844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.426365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.481234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.547874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497111299255:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.547960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.548025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497111299260:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.548146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497111299262:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.548194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.550912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.559745Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679497111299264:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.629931Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679497111299315:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.092248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679479931427344:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.092310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:18.020912Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679509996201704:2598], status: GENERIC_ERROR, issues:
:1:16: Warning: DISCARD can only be used at the top level, not inside subqueries, code: 4541
: Error: DISCARD can only be used at the top level, not inside subqueries, code: 2008 2026-0 ... VE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:42.280443Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:42.282588Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:42.333255Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:42.351160Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:42.351324Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:42.351331Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:42.351423Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:42.948796Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:42.964420Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:42.987767Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.065613Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.203112Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:43.280201Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.381409Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.057692Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679634185890639:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.057794Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.058185Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679634185890649:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.058241Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.133900Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.176836Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.210626Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.280888Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.314386Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.353077Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.388735Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.443447Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.531597Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679634185891518:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.531672Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.531730Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679634185891523:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.532014Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679634185891526:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.532061Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.536719Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:46.547742Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679634185891525:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:46.636288Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679634185891578:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:47.130094Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679617006019617:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:47.130156Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:52.661859Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7592679659955695998:2656], status: GENERIC_ERROR, issues:
:1:0: Error: mismatched input 'sub' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES} 2026-01-07T17:54:52.664107Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=ODc2OWFhOTMtZGY5NjNlYmQtMWJiZjExMDgtNmE3MDA4Mg==, ActorId: [4:7592679659955695996:2655], ActorState: ExecuteState, LegacyTraceId: 01kecshs374494676ramnzfmj2, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 1 } message: "mismatched input \'sub\' expecting {\';\', \'(\', \'$\', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES}" end_position { row: 1 } severity: 1 } tx_id# trace_id# |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> TSubscriberSyncQuorumTest::TwoRingGroups [GOOD] >> TSubscriberTest::Boot >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] >> TSubscriberTest::Sync [GOOD] >> TSubscriberTest::Boot [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-07T17:54:54.612841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-07T17:54:54.619175Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-07T17:54:54.619315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-07T17:54:54.619366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-07T17:54:54.619408Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2026-01-07T17:54:54.619457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2026-01-07T17:54:54.619499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-07T17:54:54.619929Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-01-07T17:54:54.621379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-07T17:54:54.621451Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-07T17:54:54.621502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2026-01-07T17:54:54.621545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2026-01-07T17:54:54.621655Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:54.621744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-07T17:54:54.621806Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2026-01-07T17:54:54.626990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2026-01-07T17:54:54.627117Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:29:2075] 2026-01-07T17:54:54.627192Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Update to strong state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2026-01-07T17:54:54.629165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2026-01-07T17:54:54.629245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:30:2075] 2026-01-07T17:54:54.629322Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2026-01-07T17:54:54.629582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2026-01-07T17:54:54.629640Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:31:2075] 2026-01-07T17:54:54.629685Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:3298559222387:0] 2026-01-07T17:54:54.629905Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:13:2060] 2026-01-07T17:54:54.629964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:32:2075] 2026-01-07T17:54:54.630058Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:4398070850163:0] 2026-01-07T17:54:54.630283Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:14:2061] 2026-01-07T17:54:54.630362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:33:2075] 2026-01-07T17:54:54.630410Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:5497582477939:0] 2026-01-07T17:54:54.630691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:15:2062] 2026-01-07T17:54:54.630771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:34:2075] 2026-01-07T17:54:54.630816Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 6) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-07T17:54:54.887675Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:54:54.888165Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [2:23:2066] 2026-01-07T17:54:54.888227Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:4:2051] Upsert description: path# TestPath 2026-01-0 ... Update { Owner: 1 Generation: 1 }: sender# [2:18:2065], cookie# 0, event size# 80 2026-01-07T17:54:54.902577Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:4:2051] Update description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-07T17:54:54.902638Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:4:2051] Upsert description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path TestPath, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 34} 2026-01-07T17:54:54.902791Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:4:2051] 2026-01-07T17:54:54.910361Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:520: [proxy][2:20:2066][TestPath] Cluster state mismatch in replica notification: sender# [2:23:2066], subscriber cluster state# {Generation: 0, GUID: 0}, replica cluster state# {Generation: 1 Guid: 0} 2026-01-07T17:54:54.910542Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:23:2066] 2026-01-07T17:54:54.910707Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:4:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: TestPath }: sender# [2:23:2066] 2026-01-07T17:54:54.912134Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:4:2051] Unsubscribe: subscriber# [2:23:2066], path# TestPath 2026-01-07T17:54:54.912312Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:20:2066] 2026-01-07T17:54:54.912411Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:19:2066][TestPath] Ignore empty state: owner# [2:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-07T17:54:55.174621Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:54:55.175354Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-07T17:54:55.175436Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-07T17:54:55.175477Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2026-01-07T17:54:55.175769Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2026-01-07T17:54:55.175934Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-07T17:54:55.176024Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-07T17:54:55.176115Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.176189Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-07T17:54:55.176235Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.176351Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2026-01-07T17:54:55.176446Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2026-01-07T17:54:55.176501Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2026-01-07T17:54:55.176592Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-07T17:54:55.176644Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-07T17:54:55.176807Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2026-01-07T17:54:55.176855Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:54:55.176907Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2026-01-07T17:54:55.176962Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:54:55.177028Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2026-01-07T17:54:55.177057Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2026-01-07T17:54:55.177105Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-07T17:54:55.177154Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:24339059:0] whose ring group state is: 0 2026-01-07T17:54:55.177274Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2026-01-07T17:54:55.177458Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2026-01-07T17:54:55.177521Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2026-01-07T17:54:55.177581Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2026-01-07T17:54:55.177626Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:54:55.177719Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12346 2026-01-07T17:54:55.177792Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2026-01-07T17:54:55.177831Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-07T17:54:55.177881Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-07T17:54:55.177970Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-07T17:54:55.178032Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.178086Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2026-01-07T17:54:55.178121Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:30:2075], cookie# 12346, current cookie# 0 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSorting [GOOD] Test command err: Trying to start YDB, gRPC: 32041, MsgBus: 27820 2026-01-07T17:54:10.091712Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479319207372:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.091766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.448601Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.463797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.463905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.485133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.589882Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.791919Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.945350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.945374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.945378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.945433Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.113879Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.834920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.899759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.074207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.286454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.374840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.690059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492204111089:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.690222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.690753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492204111099:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.690828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.159212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.190914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.222514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.254021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.312574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.349193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.385561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.467092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496499079264:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.467167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.467436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496499079270:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.467468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.467504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496499079269:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.470806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.480829Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679496499079273:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.579942Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679496499079324:3759] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.091833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679479319207372:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.091900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:16.059395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:19.238734Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7592679517973916909:2645] TxId: 281474976710674. Ctx: { TraceId: 01kecsgr9x0x1wh1rw2 ... ons } 2026-01-07T17:54:52.883261Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679662862508553:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:52.883311Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:52.887670Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:52.899782Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679662862508554:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:54:52.979342Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679662862508608:5436] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 72036 table_access { name: "/Root/ManyShardsTable" reads { rows: 1100 bytes: 8800 } partitions_count: 100 } cpu_time_us: 11905 affected_shards: 100 } compilation { duration_us: 146360 cpu_time_us: 138802 } process_cpu_time_us: 302 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"ManyShardsTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"4\",\"Push\":{}},{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":174,\"Max\":59,\"Min\":31},\"Chunks\":{\"Count\":4,\"Sum\":4,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":59,\"Max\":59,\"Min\":59},\"FirstMessageMs\":{\"Count\":4,\"Sum\":174,\"Max\":59,\"Min\":31},\"ActiveMessageMs\":{\"Count\":4,\"Max\":59,\"Min\":31},\"Bytes\":{\"Count\":4,\"Sum\":7600,\"Max\":1933,\"Min\":1862,\"History\":[33,1933,42,3866,44,5728,60,7600]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"WaitTimeUs\":{\"Count\":1,\"Sum\":57693,\"Max\":57693,\"Min\":57693,\"History\":[60,57693]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":59,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":4194304,\"Max\":1048576,\"Min\":1048576,\"History\":[0,3145728,2,4194304,60,4194304]},\"DurationUs\":{\"Count\":4,\"Sum\":158000,\"Max\":56000,\"Min\":28000},\"Introspections\":[\"4 tasks from DSScanMinimalThreads setting\"],\"WaitOutputTimeUs\":{\"Count\":1,\"Sum\":110,\"Max\":110,\"Min\":110,\"History\":[50,110,59,110]},\"Tasks\":4,\"FinishedTasks\":4,\"IngressRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"PhysicalStageId\":0,\"StageDurationUs\":56000,\"Table\":[{\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ReadBytes\":{\"Count\":4,\"Sum\":8800,\"Max\":2208,\"Min\":2192}}],\"BaseTimeMs\":1767808493134,\"WaitInputTimeUs\":{\"Count\":4,\"Sum\":152238,\"Max\":54352,\"Min\":26754,\"History\":[21,31531,22,47623,33,58105,42,77614,43,97372,44,97886,50,144156,59,152238]},\"CpuTimeUs\":{\"Count\":4,\"Sum\":7330,\"Max\":2227,\"Min\":1521,\"History\":[0,1107,2,1177,21,2922,22,3533,33,3964,42,4837,43,5590,44,5677,50,6118,59,7330]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":174,\"Max\":59,\"Min\":31},\"ActiveMessageMs\":{\"Count\":4,\"Max\":59,\"Min\":3},\"FirstMessageMs\":{\"Count\":4,\"Sum\":16,\"Max\":5,\"Min\":3},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[21,9504,22,13024,33,15840,42,21120,43,26048,44,26368,50,28512,60,35200]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":158,\"Max\":56,\"Min\":28}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":174,\"Max\":59,\"Min\":31},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":174,\"Max\":59,\"Min\":31},\"FirstMessageMs\":{\"Count\":4,\"Sum\":16,\"Max\":5,\"Min\":3},\"ActiveMessageMs\":{\"Count\":4,\"Max\":59,\"Min\":3},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[21,9504,22,13024,33,15840,42,21120,43,26048,44,26368,50,28512,59,35200]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":158,\"Max\":56,\"Min\":28},\"WaitTimeUs\":{\"Count\":4,\"Sum\":173915,\"Max\":59457,\"Min\":31480,\"History\":[21,40678,22,61300,33,72383,42,93034,43,113829,44,114458,50,164073,60,173915]},\"WaitPeriods\":{\"Count\":4,\"Sum\":5,\"Max\":2,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":59,\"Min\":1}}}],\"UpdateTimeMs\":59}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"FirstMessageMs\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"Bytes\":{\"Count\":1,\"Sum\":7576,\"Max\":7576,\"Min\":7576,\"History\":[66,7576]}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576,66,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":33000,\"Max\":33000,\"Min\":33000},\"Introspections\":[\"1 minimum tasks for compute\"],\"InputBytes\":{\"Count\":1,\"Sum\":7600,\"Max\":7600,\"Min\":7600},\"Tasks\":1,\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":33000,\"BaseTimeMs\":1767808493134,\"CpuTimeUs\":{\"Count\":1,\"Sum\":4575,\"Max\":4575,\"Min\":4575,\"History\":[2,487,66,4575]},\"UpdateTimeMs\":64,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":61,\"Max\":61,\"Min\":61},\"ActiveMessageMs\":{\"Count\":1,\"Max\":61,\"Min\":60},\"FirstMessageMs\":{\"Count\":1,\"Sum\":60,\"Max\":60,\"Min\":60},\"Bytes\":{\"Count\":1,\"Sum\":7600,\"Max\":7600,\"Min\":7600,\"History\":[66,7600]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":59,\"Max\":59,\"Min\":59},\"Chunks\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":59,\"Max\":59,\"Min\":59},\"FirstMessageMs\":{\"Count\":1,\"Sum\":31,\"Max\":31,\"Min\":31},\"ActiveMessageMs\":{\"Count\":1,\"Max\":59,\"Min\":31},\"Bytes\":{\"Count\":1,\"Sum\":7600,\"Max\":7600,\"Min\":7600,\"History\":[66,7600]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28},\"WaitTimeUs\":{\"Count\":1,\"Sum\":14423,\"Max\":14423,\"Min\":14423,\"History\":[66,14423]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":59,\"Min\":2}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":146360,\"CpuTimeUs\":138802},\"ProcessCpuTimeUs\":302,\"TotalDurationUs\":326483,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":98500},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:38\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'368) \'(\'\"_id\" \'\"3e02bbee-d6e2d7d0-9591b19d-29d08889\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'380) \'(\'\"_id\" \'\"897622b5-d6be6978-3d002f03-ee47c688\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 326483 total_cpu_time_us: 151009 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":38},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1767808493\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"86ae2e33-a97de7a3-ba15c9c6-5bd2cbb3\",\"version\":\"1.0\"}" |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-07T17:54:54.613790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:54:54.619561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-07T17:54:54.619720Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-07T17:54:54.619807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-07T17:54:54.621910Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-07T17:54:54.622032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-07T17:54:54.622088Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:54.622162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-07T17:54:54.622209Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2026-01-07T17:54:54.627734Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2026-01-07T17:54:54.627835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:20:2066] 2026-01-07T17:54:54.627924Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Update to strong state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2026-01-07T17:54:54.629475Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2026-01-07T17:54:54.629555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:21:2066] 2026-01-07T17:54:54.629665Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2026-01-07T17:54:54.629918Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2026-01-07T17:54:54.629975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:22:2066] 2026-01-07T17:54:54.630038Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-07T17:54:54.878855Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:54:54.879555Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2026-01-07T17:54:54.879633Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2026-01-07T17:54:54.879685Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-07T17:54:54.880001Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:29:2075] 2026-01-07T17:54:54.880089Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:30:2075] 2026-01-07T17:54:54.880142Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:28:2075][TestPath] Set up state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:54.880193Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:31:2075] 2026-01-07T17:54:54.880235Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:24339059:0] 2026-01-07T17:54:54.880574Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:4:2051] 2026-01-07T17:54:54.880649Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:29:2075] 2026-01-07T17:54:54.880708Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Update to strong state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:1099535966835:0] 2026-01-07T17:54:54.881025Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:5:2052] 2026-01-07T17:54:54.881094Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:30:2075] 2026-01-07T17:54:54.881154Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:2199047594611:0] 2026-01-07T17:54:54.881419Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:6:2053] 2026-01-07T17:54:54.881480Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:31:2075] 2026-01-07T17:54:54.881528Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:3298559222387:0] Sending path update to replica: [2:4398070850163:0] Sending path update to replica: [2:5497582477939:0] ... waiting for initial path lookups 2026-01-07T17:54:55.151676Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2026-01-07T17:54:55.152234Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-07T17:54:55.152285Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-07T17:54:55.152321Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-07T17:54:55.152540Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-07T17:54:55.152631Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-07T17:54:55.152706Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.152759Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-07T17:54:55.152792Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:24339059:0] 2026-01-07T17:54:55.153101Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:4:2051] 2026-01-07T17:54:55.153179Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:29:2075] 2026-01-07T17:54:55.153240Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Update to strong state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:1099535966835:0] 2026-01-07T17:54:55.153529Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:5:2052] 2026-01-07T17:54:55.153588Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:30:2075] 2026-01-07T17:54:55.153650Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:2199047594611:0] 2026-01-07T17:54:55.153822Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:6:2053] 2026-01-07T17:54:55.153865Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:31:2075] 2026-01-07T17:54:55.153904Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:3298559222387:0] Sending path update to replica: [3:4398070850163:0] Sending path update to replica: [3:5497582477939:0] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 4546, MsgBus: 6194 2026-01-07T17:54:13.694621Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679491378941153:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:13.694922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:13.937030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:13.967522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:13.967693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:13.989856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:14.034847Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:14.040074Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679491378941048:2081] 1767808453690372 != 1767808453690375 2026-01-07T17:54:14.081565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:14.081594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:14.081612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:14.081703Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:14.182011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:14.555117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:14.605338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.709959Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:14.719026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.849741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.906257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:16.710309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679504263844810:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:16.710444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:16.710798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679504263844820:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:16.710862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:17.154185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:17.193481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:17.228265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:17.261717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:17.294678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:17.353055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:17.394036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:17.441545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:17.518240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679508558812990:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:17.518294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:17.518523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679508558812995:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:17.518547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679508558812996:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:17.518579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:17.522924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:17.534724Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679508558812999:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:17.642565Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679508558813050:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:18.694981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679491378941153:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:18.695051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3704, MsgBus: 14252 2026-01-07T17:54:20.109135Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679524742600536:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:20.109190Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snap ... t4nz, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 12 column: 30 } message: "At function: KiWriteTable!" end_position { row: 12 column: 30 } severity: 1 issues { position { row: 12 column: 30 } message: "Cannot find table \'db.[/Root/Test]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 12 column: 30 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 18798, MsgBus: 18625 2026-01-07T17:54:46.782828Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592679634741234984:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:46.782916Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:46.798643Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:46.893156Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:46.895065Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:46.895163Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:46.901090Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:46.972334Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:46.974686Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:46.974708Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:46.974717Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:46.974814Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:47.434020Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:47.447818Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.527565Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.689151Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.769011Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.939425Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:50.830168Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679651921106006:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.830249Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.830472Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679651921106015:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.830508Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.921999Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.002008Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.047964Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.089090Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.132098Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.174582Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.213365Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.290207Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.391430Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679656216074190:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.391519Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.391568Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679656216074195:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.391731Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679656216074197:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.391788Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.396673Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:51.412413Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679656216074199:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:51.511755Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679656216074250:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:51.783236Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679634741234984:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:51.783322Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2026-01-07T17:54:54.612949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:54:54.619841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-07T17:54:54.619967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-07T17:54:54.620011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-07T17:54:54.620081Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-07T17:54:54.621514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-07T17:54:54.621664Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:54.621754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-07T17:54:54.621870Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:54.622340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-07T17:54:54.622440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-07T17:54:54.622499Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:54.622672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-07T17:54:54.622768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-07T17:54:54.622827Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.080635Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:54:55.081266Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2026-01-07T17:54:55.081331Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:6:2053] 2026-01-07T17:54:55.081370Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:9:2056] 2026-01-07T17:54:55.081431Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:38:2067] 2026-01-07T17:54:55.081482Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:39:2067] 2026-01-07T17:54:55.081533Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.081665Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:40:2067] 2026-01-07T17:54:55.081721Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.081823Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2026-01-07T17:54:55.083898Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2026-01-07T17:54:55.084004Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2026-01-07T17:54:55.084046Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2026-01-07T17:54:55.084183Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2026-01-07T17:54:55.084230Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2026-01-07T17:54:55.084259Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2026-01-07T17:54:55.084314Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2026-01-07T17:54:55.084354Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:54:55.084392Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2026-01-07T17:54:55.084432Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:54:55.084499Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2026-01-07T17:54:55.084526Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> KqpStats::SelfJoin [GOOD] >> KqpQuery::MixedCreateAsSelect [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: ... waiting for initial path lookups 2026-01-07T17:54:55.096786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-07T17:54:55.099102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-07T17:54:55.099184Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-07T17:54:55.099211Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-07T17:54:55.099234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2026-01-07T17:54:55.099261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2026-01-07T17:54:55.099288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] Poisoning replica: [1:5497582477939:0] 2026-01-07T17:54:55.099575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12345 2026-01-07T17:54:55.099641Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-01-07T17:54:55.100910Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-07T17:54:55.100971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-07T17:54:55.101005Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2026-01-07T17:54:55.101047Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2026-01-07T17:54:55.101123Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.101179Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-07T17:54:55.101223Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.101315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12345 2026-01-07T17:54:55.101369Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12345 2026-01-07T17:54:55.101394Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:31:2075], cookie# 12345 2026-01-07T17:54:55.101452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12345 2026-01-07T17:54:55.101481Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12345 2026-01-07T17:54:55.101531Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:34:2075], cookie# 12345 2026-01-07T17:54:55.101639Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-07T17:54:55.101686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-07T17:54:55.101769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12345 2026-01-07T17:54:55.101815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12345 2026-01-07T17:54:55.101984Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12345 2026-01-07T17:54:55.102036Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:54:55.102080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:30:2075], cookie# 12345 2026-01-07T17:54:55.102127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:54:55.102162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2026-01-07T17:54:55.102265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12345 2026-01-07T17:54:55.102303Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-07T17:54:55.102343Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2026-01-07T17:54:55.102394Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-07T17:54:55.102440Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.102481Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12345 2026-01-07T17:54:55.102524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-07T17:54:55.102553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 1, failures# 0 2026-01-07T17:54:55.102601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12345 2026-01-07T17:54:55.102628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-07T17:54:55.102685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-07T17:54:55.102770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12345 2026-01-07T17:54:55.102798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:28:2075][TestPath] Sync cookie mismatch: sender# [1:34:2075], cookie# 12345, current cookie# 0 2026-01-07T17:54:55.102899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-07T17:54:55.102948Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:1099535966835:0] whose ring group state is: 0 2026-01-07T17:54:55.103080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12346 2026-01-07T17:54:55.103287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12346 2026-01-07T17:54:55.103370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12346 2026-01-07T17:54:55.103449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12346 2026-01-07T17:54:55.103499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:54:55.103543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12346 2026-01-07T17:54:55.103601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12346 2026-01-07T17:54:55.103658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12346 2026-01-07T17:54:55.103693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:54:55.103747Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12346 2026-01-07T17:54:55.103871Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12346 2026-01-07T17:54:55.103906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12346 2026-01-07T17:54:55.103980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12346 2026-01-07T17:54:55.104010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-01-07T17:54:55.104042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:30:2075], cookie# 12346 2026-01-07T17:54:55.104101Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-07T17:54:55.104146Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2026-01-07T17:54:55.104187Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-07T17:54:55.104246Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.104338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12346 2026-01-07T17:54:55.104393Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-07T17:54:55.104424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 1 2026-01-07T17:54:55.104471Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12346 2026-01-07T17:54:55.104502Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-07T17:54:55.104527Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 2, failures# 1, partial# 1 2026-01-07T17:54:55.104558Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-07T17:54:55.370734Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-07T17:54:55.371408Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:3:2050] 2026-01-07T17:54:55.371511Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:6:2053] 2026-01-07T17:54:55.371555Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:9:2056] 2026-01-07T17:54:55.371608Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2066] 2026-01-07T17:54:55.371672Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:38:2066] 2026-01-07T17:54:55.371720Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:36:2066][path] Set up state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-07T17:54:55.371768Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:39:2066] 2026-01-07T17:54:55.371808Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:36:2066][path] Ignore empty state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan |96.2%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardMoveTest::Chain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 10659, MsgBus: 28335 2026-01-07T17:54:31.554124Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679569523398362:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:31.554201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:31.766274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:31.766411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:31.849798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:31.862436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:31.865066Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679569523398134:2081] 1767808471529846 != 1767808471529849 2026-01-07T17:54:31.870484Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:32.001724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:32.001746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:32.001753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:32.001847Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:32.070670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:32.420747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:32.479436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.541228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:32.597511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.761984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:32.820239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.556691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679582408301897:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.556791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.557275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679582408301907:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.557360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:34.819611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.851569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.893759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.920983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.960468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:34.999854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.051556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.092668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:35.183788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679586703270076:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.183865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.183976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679586703270081:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.184076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679586703270083:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.184096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:35.187380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:35.196973Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679586703270085:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:35.272293Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679586703270136:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:36.546164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679569523398362:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:36.546227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:36.730390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... 1-07T17:54:54.569558Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.569575Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.571738Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.571794Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.571810Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.577795Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.577859Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.577874Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.580090Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.580146Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.580161Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.586249Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.586307Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.586322Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.588583Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.588644Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.588661Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.594772Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.594903Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.594926Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.597087Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.597141Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.597156Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.603949Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.604010Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.604039Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.611592Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.611647Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.611661Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.619122Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.619188Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.619203Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.627778Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.627840Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.627856Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.636036Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.636098Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.636112Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.643772Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.643830Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.643847Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-07T17:54:54.681691Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679667798310368:2790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.681799Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.682074Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679667798310370:2791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.682129Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.698968Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679667798310379:3878] txid# 281474976710660, issues: { message: "Type \'Datetime64\' specified for column \'Datetime\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSchemeShardMoveTest::MoveIndexSameDst >> TPartitionTests::UserActCount >> TSchemeShardMoveTest::MoveMigratedTable >> TPQTest::TestPQPartialRead >> TPartitionScaleManagerGraphCmpTest::ExtraRootPartitionsInTargetTopic [GOOD] >> TPartitionScaleManagerGraphCmpTest::SplittedTargetTopic [GOOD] >> TPartitionTests::Batching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::MixedCreateAsSelect [GOOD] Test command err: Trying to start YDB, gRPC: 2311, MsgBus: 12620 2026-01-07T17:54:10.110851Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479339794975:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.111002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.151771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:10.561790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.561916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.587223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.657146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.724143Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.835637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.941801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.941835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.941843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.941904Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.109952Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.757969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.807175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.017765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.225044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.313879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.692828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492224698513:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.692966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.693246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492224698522:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.693306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.153049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.183277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.223523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.283047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.334853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.386661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.432698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.516113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496519666690:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.516209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.516282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496519666695:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.516622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496519666698:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.516686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.519551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.530856Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679496519666697:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.609873Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679496519666750:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.110179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679479339794975:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.110245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63572, MsgBus: 26887 2026-01-07T17:54:17.564458Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679509587852919:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:17.564486Z node 2 :METADATA_PROVIDER ERROR: ... Result TxId: 281474976710669 2026-01-07T17:54:52.997057Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997071Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037957 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997099Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997112Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997132Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997156Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037953 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997163Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997201Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997226Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038004 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997265Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997289Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038006 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997313Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997333Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038002 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997346Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997376Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997377Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997408Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997423Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997452Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997467Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997487Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997525Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997535Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997571Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038013 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997581Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997598Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038015 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997627Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038009 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997627Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997654Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038011 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997691Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997702Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997726Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997763Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997769Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997793Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997817Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997824Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:52.997861Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-07T17:54:53.002230Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:53.008874Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:53.101350Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679666000539347:6075] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:53.114151Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:54.202304Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:54.209954Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TPQTest::TestCmdReadWithLastOffset >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval >> TPartitionTests::Batching [GOOD] >> TObjectStorageListingTest::MaxKeysAndSharding >> TSchemeShardMoveTest::TwoTables >> TPartitionTests::ChangeConfig >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TPartitionTests::ChangeConfig [GOOD] >> TSchemeShardMoveTest::ResetCachedPath >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 6924, MsgBus: 5174 2026-01-07T17:54:10.093127Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679478646354522:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.093182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.431862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.466057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.466155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.575094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.578048Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.593044Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679478646354482:2081] 1767808450087423 != 1767808450087426 2026-01-07T17:54:10.755696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.940423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.940517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.940525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.940621Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.109044Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.920982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.927041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.993192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.185557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.437764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.530414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.774380Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491531258240:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.774539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.774895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491531258250:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.774945Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.198790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.228744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.262295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.297651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.344808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.408882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.490852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.566963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495826226429:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.567055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.567129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495826226434:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.567291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679495826226436:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.567357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.570727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.580077Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679495826226437:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:14.646956Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679495826226489:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.093843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679478646354522:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.093902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{ ... or you don't have access permissions } 2026-01-07T17:54:52.977543Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:52.981674Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:52.996498Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679661910522015:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:53.090260Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679666205489365:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:53.986057Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679644730650099:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:53.986132Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"0","PlanNodeId":3,"LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","Path":"\/Root\/TwoShard","Columns":["Key"],"E-Rows":"0","Table":"TwoShard","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"4","Push":{}},{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":43,"Max":43,"Min":43,"History":[3,43]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":760,"Max":760,"Min":760,"History":[3,760]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[3,1048576]},"Introspections":["1 tasks for a single\/sequential source scan"],"WaitOutputTimeUs":{"Count":1,"Sum":30,"Max":30,"Min":30,"History":[3,30]},"Tasks":1,"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ReadBytes":{"Count":1,"Sum":48,"Max":48,"Min":48}}],"BaseTimeMs":1767808495252,"CpuTimeUs":{"Count":1,"Sum":392,"Max":392,"Min":392,"History":[3,392]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[3,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[3,96]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":1076,"Max":1076,"Min":1076,"History":[3,1076]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}],"UpdateTimeMs":3}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"WaitOutputTimeUs":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[4,25]},"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ReadBytes":{"Count":1,"Sum":24,"Max":24,"Min":24}}],"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":43,"Max":43,"Min":43},"Introspections":["1 tasks same as previous stage"],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[4,1048576]},"BaseTimeMs":1767808495252,"Output":[{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":97,"Max":97,"Min":97,"History":[4,97]},"WaitTimeUs":{"Count":1,"Sum":3767,"Max":3767,"Min":3767,"History":[4,3767]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}},{"Pop":{},"Name":"6","Push":{}}],"CpuTimeUs":{"Count":1,"Sum":475,"Max":475,"Min":475,"History":[4,475]},"StageDurationUs":1000,"WaitInputTimeUs":{"Count":1,"Sum":1493,"Max":1493,"Min":1493,"History":[4,1493]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":43,"Max":43,"Min":43,"History":[4,43]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":43,"Max":43,"Min":43,"History":[4,43]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":760,"Max":760,"Min":760,"History":[4,760]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}],"UpdateTimeMs":4,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[5,25]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":3714,"Max":3714,"Min":3714,"History":[5,3714]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":5,"Min":1}}},{"Pop":{},"Name":"8","Push":{}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[5,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Introspections":["1 minimum tasks for compute"],"InputBytes":{"Count":1,"Sum":97,"Max":97,"Min":97},"WaitOutputTimeUs":{"Count":1,"Sum":52,"Max":52,"Min":52,"History":[5,52]},"Tasks":1,"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"StageDurationUs":1000,"BaseTimeMs":1767808495252,"CpuTimeUs":{"Count":1,"Sum":805,"Max":805,"Min":805,"History":[5,805]},"UpdateTimeMs":5,"Input":[{"Pop":{},"Name":"4","Push":{}},{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":97,"Max":97,"Min":97,"History":[5,97]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":97,"Max":97,"Min":97,"History":[5,97]},"WaitTimeUs":{"Count":1,"Sum":3767,"Max":3767,"Min":3767,"History":[5,3767]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"RESULT","Push":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[5,25]}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[5,1048576]},"Introspections":["1 minimum tasks for compute"],"InputBytes":{"Count":1,"Sum":25,"Max":25,"Min":25},"Tasks":1,"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"StageDurationUs":0,"BaseTimeMs":1767808495252,"CpuTimeUs":{"Count":1,"Sum":437,"Max":437,"Min":437,"History":[5,437]},"UpdateTimeMs":5,"Input":[{"Pop":{},"Name":"6","Push":{}},{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[5,25]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[5,25]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":3714,"Max":3714,"Min":3714,"History":[5,3714]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":5,"Min":1}}}]}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":264554,"CpuTimeUs":259083},"ProcessCpuTimeUs":344,"TotalDurationUs":286898,"ResourcePoolId":"default","QueuedTimeUs":612},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"0","Columns":["Key"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-SelfCpu":0.805,"A-Cpu":0.805,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-SelfCpu":0.437,"A-Cpu":1.242,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] >> TPartitionTests::BlobKeyFilfer [GOOD] >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> KqpQuery::CreateAsSelect_DisableDataShard [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> TSchemeShardMoveTest::Index [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] >> TSchemeShardMoveTest::MoveIndex >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::BlobKeyFilfer [GOOD] Test command err: 2026-01-07T17:54:58.651998Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.723420Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:54:58.723496Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:54:58.723573Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:58.723662Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:54:58.741377Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:183:2196] 2026-01-07T17:54:58.742333Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:183:2196] Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.768596Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.811447Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.835524Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.836459Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: set offset in already dead session session-id-1 actual is session-id-2 2026-01-07T17:54:58.836509Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: set offset in already dead session session-id-3 actual is session-id-2 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.848239Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.232182Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.275810Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:54:59.275864Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:54:59.275902Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:59.275944Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:54:59.291172Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:182:2194] 2026-01-07T17:54:59.293742Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:54:59.000000Z 2026-01-07T17:54:59.293996Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:182:2194] 2026-01-07T17:54:59.304849Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.335919Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.356711Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.387678Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.427035Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.458802Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.541026Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.572199Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send change config Wait cmd write (initial) Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\372\343\314\27138\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\372\343\314\27138\001" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003Sclient-2" IncludeFrom: true To: "c0000000003Sclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003wclient-2|0000000000000000" IncludeFrom: true To: "c0000000003wclient-2|FFFFFFFFFFFFFFFF" IncludeTo: true } } CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\014\n\010client-1@\000\252\002\014\n\010client-3@\007" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\372\343\314\27138\001" StorageChannel: INLINE } Wait config changed 2026-01-07T17:54:59.715912Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.727501Z node 2 :PERSQUEUE WARN: partition.cpp:2983: [72057594037927937][Partition][3][StateIdle] Partition 3 Unknown consumer 'client-2' 2026-01-07T17:55:00.039164Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:00.086253Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:00.086316Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:00.086360Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:00.086408Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:00.101489Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [3:181:2194] 2026-01-07T17:55:00.102513Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:181:2194] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:54:59.271423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:54:59.271496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:54:59.271533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:54:59.271564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:54:59.271623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:54:59.271651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:54:59.271698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:54:59.271758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:54:59.272495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:54:59.272762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:54:59.358251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:54:59.358315Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:59.379280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:54:59.379657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:54:59.379857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:54:59.392373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:54:59.392752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:54:59.393474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:59.393724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:54:59.396390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:54:59.396559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:54:59.397772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:54:59.397830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:54:59.397929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:54:59.397973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:54:59.398013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:54:59.398209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:54:59.409043Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:54:59.545905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:54:59.546162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:59.546378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:54:59.546424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:54:59.546644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:54:59.546711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:59.548983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:59.549210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:54:59.549427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:59.549497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:54:59.549550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:54:59.549585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:54:59.551544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:59.551600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:54:59.551636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:54:59.553241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:59.553286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:59.553338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:59.553384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:54:59.556812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:54:59.558684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:54:59.558873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:54:59.559809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:59.559929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:54:59.559968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:59.560220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:54:59.560265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:59.560418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:54:59.560501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:54:59.562388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:54:59.562434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 16850Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:00.017116Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 239us result status StatusPathDoesNotExist 2026-01-07T17:55:00.017351Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:55:00.017840Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:00.018041Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 197us result status StatusSuccess 2026-01-07T17:55:00.018445Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:00.019206Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:00.019378Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 170us result status StatusPathDoesNotExist 2026-01-07T17:55:00.019499Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:55:00.019893Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:00.020084Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 150us result status StatusSuccess 2026-01-07T17:55:00.020443Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:00.021232Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:00.021393Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 165us result status StatusSuccess 2026-01-07T17:55:00.021755Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:54:58.054211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:54:58.054313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:54:58.054360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:54:58.054397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:54:58.054432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:54:58.054459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:54:58.054513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:54:58.054576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:54:58.055425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:54:58.055719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:54:58.144014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:54:58.144080Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:58.159954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:54:58.160365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:54:58.160568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:54:58.167504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:54:58.167899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:54:58.168558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:58.168860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:54:58.171837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:54:58.172064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:54:58.173293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:54:58.173353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:54:58.173482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:54:58.173533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:54:58.173597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:54:58.173814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.181135Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:54:58.305731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:54:58.306006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.306223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:54:58.306300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:54:58.306544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:54:58.306620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:58.309344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:58.309571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:54:58.309813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.309913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:54:58.309958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:54:58.309990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:54:58.312371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.312430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:54:58.312475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:54:58.314484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.314538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.314593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:58.314651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:54:58.318292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:54:58.320488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:54:58.320724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:54:58.321829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:58.321967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:54:58.322015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:58.322306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:54:58.322365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:58.322550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:54:58.322646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:54:58.325108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:54:58.325159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 57594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:00.281595Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:55:00.281834Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 259us result status StatusSuccess 2026-01-07T17:55:00.282604Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:00.283317Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:55:00.283544Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 254us result status StatusSuccess 2026-01-07T17:55:00.284196Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:54:58.537378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:54:58.537464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:54:58.537510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:54:58.537549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:54:58.537585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:54:58.537615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:54:58.537665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:54:58.537712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:54:58.538392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:54:58.538687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:54:58.614790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:54:58.614850Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:58.630314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:54:58.630626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:54:58.630813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:54:58.637165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:54:58.637541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:54:58.638223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:58.638470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:54:58.641151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:54:58.641351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:54:58.642541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:54:58.642595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:54:58.642710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:54:58.642782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:54:58.642831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:54:58.643005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.649862Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:54:58.780076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:54:58.780304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.780456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:54:58.780487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:54:58.780639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:54:58.780682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:58.782735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:58.782945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:54:58.783094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.783159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:54:58.783200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:54:58.783222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:54:58.785217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.785273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:54:58.785311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:54:58.788390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.788448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:54:58.788505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:58.788565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:54:58.795874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:54:58.797637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:54:58.797855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:54:58.798672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:58.798821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:54:58.798869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:58.799113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:54:58.799160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:54:58.799319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:54:58.799405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:54:58.801235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:54:58.801273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... ep: 5000042 2026-01-07T17:55:00.900634Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:00.900730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:00.900803Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-07T17:55:00.900854Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-07T17:55:00.902652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-07T17:55:00.902707Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-07T17:55:00.902807Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-07T17:55:00.902839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:55:00.902875Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-07T17:55:00.902905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:55:00.902938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-07T17:55:00.902989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:130:2154] message: TxId: 281474976710760 2026-01-07T17:55:00.903030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:55:00.903064Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-07T17:55:00.903094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-07T17:55:00.903155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-07T17:55:00.904693Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-07T17:55:00.904737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-07T17:55:00.904783Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-07T17:55:00.904905Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:867:2825], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-07T17:55:00.906444Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-07T17:55:00.906552Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:867:2825], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:55:00.906608Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:55:00.907872Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-07T17:55:00.907997Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:867:2825], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:55:00.908040Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-07T17:55:00.908173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:55:00.908217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1048:2994] TestWaitNotification: OK eventTxId 102 2026-01-07T17:55:00.908718Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:00.908957Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 239us result status StatusSuccess 2026-01-07T17:55:00.909464Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 41 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7020, MsgBus: 1075 2026-01-07T17:54:10.097156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679482104159388:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.097230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.445208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.540990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.541167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.582578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.615333Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.657022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.944543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.944574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.944579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.944663Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.111755Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.812986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.818796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.865454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.019947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.204819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.280666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.576711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679494989063113:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.576800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.577068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679494989063123:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.577103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.161466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.191016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.220407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.253879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.291151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.349287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.388731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.498329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679499284031292:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.498430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.498805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679499284031297:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.498851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679499284031298:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.499150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.503495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.519288Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679499284031301:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.618611Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679499284031354:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.093785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679482104159388:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.093869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:16.245551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689 ... cpp:689) 2026-01-07T17:54:50.261440Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:50.400554Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:50.453076Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:50.539399Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.420720Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679666160420977:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.420831Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.421250Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679666160420986:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.421317Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.517335Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.554609Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.588919Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.629072Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.668317Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.710590Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.751913Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.808881Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.940101Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679666160421856:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.940238Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.940570Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679666160421861:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.940625Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679666160421862:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.940697Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.945332Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:53.964864Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679666160421865:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:54.055537Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679670455389212:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:54.380079Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679648980549947:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:54.380167Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:56.297127Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:56.348038Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:56.397502Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_2_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_2_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"Filter","Name":"Iterator"},{"E-Rows":"2","Inputs":[],"Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"ConstantExpr-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_2_0","Node Type":"Precompute_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/SecondaryKeys","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"SecondaryKeys","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Key","Value"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"Filter"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TPQTest::TestCmdReadWithLastOffset [GOOD] >> TPQTest::TestAccountReadQuota >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval [GOOD] >> DqUnboxedValueDoNotFitToArrow::OptionalOfOptional [GOOD] >> DqUnboxedValueDoNotFitToArrow::LargeVariant >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:00.064317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:00.064386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:00.064413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:00.064443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:00.064480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:00.064502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:00.064574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:00.064621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:00.065395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:00.065612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:00.136531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:00.136581Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:00.151843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:00.152259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:00.152470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:00.159232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:00.159656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:00.160463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:00.160748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:00.164068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:00.164266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:00.165422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:00.165470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:00.165542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:00.165581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:00.165625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:00.165761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:00.299221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.300957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.301024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.301112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.301221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:00.301310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 44 2026-01-07T17:55:01.775673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2026-01-07T17:55:01.775833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:55:01.779864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2026-01-07T17:55:01.780054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000040 2026-01-07T17:55:01.780526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:01.780664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:01.780731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000040, at schemeshard: 72057594046678944 2026-01-07T17:55:01.781195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 129 2026-01-07T17:55:01.781365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-01-07T17:55:01.788091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:01.788164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-07T17:55:01.788424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:01.788469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-07T17:55:01.789061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.789149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2026-01-07T17:55:01.790314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:55:01.790431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-07T17:55:01.790486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-07T17:55:01.790536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 8 2026-01-07T17:55:01.790581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:55:01.790683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-07T17:55:01.791609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1367 } } CommitVersion { Step: 5000040 TxId: 105 } 2026-01-07T17:55:01.791656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2026-01-07T17:55:01.791804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1367 } } CommitVersion { Step: 5000040 TxId: 105 } 2026-01-07T17:55:01.791981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1367 } } CommitVersion { Step: 5000040 TxId: 105 } debug: NTableState::TProposedWaitParts operationId# 105:0 2026-01-07T17:55:01.792541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1079 RawX2: 4294970313 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-07T17:55:01.792601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2026-01-07T17:55:01.792743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1079 RawX2: 4294970313 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-07T17:55:01.792809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:55:01.792918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1079 RawX2: 4294970313 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-07T17:55:01.794288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:01.794333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.794399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-07T17:55:01.794460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 129 -> 240 2026-01-07T17:55:01.798363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-07T17:55:01.799769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.799907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.800228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.800286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-07T17:55:01.800391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:55:01.800429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:55:01.800487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-07T17:55:01.800529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:55:01.800568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2026-01-07T17:55:01.800646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:738:2715] message: TxId: 105 2026-01-07T17:55:01.800699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-07T17:55:01.800752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-07T17:55:01.800798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-07T17:55:01.800983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:55:01.802936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-07T17:55:01.802997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1246:3165] TestWaitNotification: OK eventTxId 105 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TSchemeShardMoveTest::Replace >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::MultiTxStatsFullYql >> TPQTest::TestPartitionTotalQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:54:58.685970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:54:58.686050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:54:58.686100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:54:58.686141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:54:58.686180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:54:58.686203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:54:58.686255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:54:58.686329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:54:58.687196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:54:58.687484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:54:58.775361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:54:58.775425Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:58.797978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:54:58.798319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:54:58.798495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:54:58.806491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:54:58.806870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:54:58.807565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:54:58.807810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:54:58.812977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:54:58.813209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:54:58.814347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:54:58.814412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:54:58.814535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:54:58.814583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:54:58.814625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:54:58.814819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:54:59.006824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.012229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.012420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.012548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.012622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.012677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.012741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.012800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.012896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.013013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.013074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.013163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.013224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.013313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:54:59.013409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:55:02.135085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 737 RawX2: 8589937311 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:55:02.135160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:02.135201Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-07T17:55:02.135244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:55:02.135288Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:2 129 -> 240 2026-01-07T17:55:02.137386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 741 RawX2: 8589937314 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:55:02.137432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-07T17:55:02.137534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 741 RawX2: 8589937314 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:55:02.137580Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:55:02.137652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 741 RawX2: 8589937314 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-07T17:55:02.137708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:02.137741Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:55:02.137774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:55:02.137814Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-07T17:55:02.139915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-07T17:55:02.142032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:55:02.142228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-07T17:55:02.142597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-07T17:55:02.142644Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:55:02.142701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-07T17:55:02.142840Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2026-01-07T17:55:02.142880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2026-01-07T17:55:02.142917Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2026-01-07T17:55:02.142949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2026-01-07T17:55:02.142988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2026-01-07T17:55:02.143610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:55:02.143862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:55:02.143902Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:55:02.143949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:55:02.144019Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2026-01-07T17:55:02.144045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-07T17:55:02.144075Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2026-01-07T17:55:02.144098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-07T17:55:02.144127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2026-01-07T17:55:02.144204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:792:2758] message: TxId: 102 2026-01-07T17:55:02.144253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-07T17:55:02.144296Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:55:02.144333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:55:02.144461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-01-07T17:55:02.144501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:55:02.144536Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-07T17:55:02.144559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-07T17:55:02.144589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-07T17:55:02.144611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:55:02.144631Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2026-01-07T17:55:02.144652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:2 2026-01-07T17:55:02.144691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-07T17:55:02.144713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:55:02.145087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:55:02.145141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:55:02.145210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:55:02.145256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:55:02.145290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:55:02.145322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:55:02.145353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:55:02.153540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:55:02.153619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:895:2853] 2026-01-07T17:55:02.154381Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::QueryFromSqs >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:00.983449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:00.983539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:00.983590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:00.983637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:00.983676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:00.983704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:00.983765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:00.983832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:00.984508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:00.984745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:01.064350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:01.064427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:01.081160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:01.081542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:01.081734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:01.089069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:01.089463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:01.090165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:01.090456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:01.094579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:01.094823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:01.096024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:01.096091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:01.096201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:01.096263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:01.096318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:01.096533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.104413Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:55:01.218405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:55:01.218669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.218900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:55:01.218944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:55:01.219159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:55:01.219228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:01.221438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:01.221668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:55:01.221906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.221992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:55:01.222054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:55:01.222093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:55:01.223993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.224051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:55:01.224089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:55:01.225819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.225871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:01.225925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:55:01.225986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:55:01.229468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:55:01.231494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:55:01.231704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:55:01.232791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:01.232944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:01.232990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:55:01.233258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:55:01.233311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:55:01.233485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:55:01.233565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:01.235674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:01.235716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:02.908685Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:55:02.908872Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 193us result status StatusSuccess 2026-01-07T17:55:02.909516Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:02.910175Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:55:02.910386Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 234us result status StatusSuccess 2026-01-07T17:55:02.911101Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardMoveTest::MoveTableForBackup >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWrite >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete >> TSchemeShardMoveTest::Replace [GOOD] >> TSchemeShardMoveTest::ReplaceVectorIndex >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TSchemeShardServerLess::StorageBillingLabels [GOOD] >> TSchemeShardMoveTest::Boot >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TPQTest::TestReadRuleVersions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13015, MsgBus: 5578 2026-01-07T17:54:10.097735Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679481669855852:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.097786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.438856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.438968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.513306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.570779Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.576832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.777949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.943713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.943766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.943774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.943867Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.102894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.866655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.877526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.957341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.197222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.421854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.516990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.767926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679494554759459:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.768049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.768462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679494554759469:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.768540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.166103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.199470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.231514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.292725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.324094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.358187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.416879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.486720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679498849727636:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.486823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.486915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679498849727641:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.486950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679498849727642:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.486979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.490354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.502478Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679498849727645:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.605301Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679498849727696:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.101011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679481669855852:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.101072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64238, MsgBus: 2365 2026-01-07T17:54:17.375299Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:17.375439Z node 2 :METADATA_PROVIDER ERROR: log.cpp:84 ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:43.903046Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:43.903120Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:43.909272Z node 5 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2026-01-07T17:54:43.909406Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:43.925473Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:43.940034Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:43.999528Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:44.019834Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:44.019857Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:44.019865Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:44.019944Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:44.590332Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:44.627583Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:44.720045Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:44.755496Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:44.782959Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:44.954823Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:45.059251Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.929526Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679640863326751:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.929642Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.930003Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679640863326760:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.930079Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.019830Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.088282Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.151460Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.212684Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.276973Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.338692Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.403808Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.482555Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.597695Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679645158295223:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.597782Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.598042Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679645158295228:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.598092Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679645158295229:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.598133Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:48.602515Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:48.622571Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679645158295232:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:48.681917Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679645158295306:4898] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:48.695042Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679623683455136:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:48.695119Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:48.731783Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7592679622669615446:2100];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:48.731858Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:58.847413Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:54:58.847444Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpExplain::IdxFullscan [GOOD] >> KqpExplain::MultiJoinCteLinks >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:49.456724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:49.456835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:49.456890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:49.456951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:49.456991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:49.457026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:49.457086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:49.457158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:49.458059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:49.458390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:49.543350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:49.543425Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:49.565502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:49.565948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:49.566203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:49.575511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:49.576029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:49.577031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:49.577390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:49.581447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:49.581782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:49.583502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:49.583597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:49.583801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:49.583875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:49.584016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:49.584336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:49.724477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.725609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.725766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.725861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.725938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:49.726781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-07T17:54:35.987283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-07T17:54:40.099632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:54:40.099771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:129: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2026-01-07T17:54:40.099860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:54:40.211952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-07T17:54:40.212078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:54:40.212150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:54:40.322128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2026-01-07T17:54:40.322257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-07T17:54:40.322335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-07T17:54:40.377986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-07T17:54:40.378098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-07T17:54:40.378178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-07T17:54:40.420594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0019 2026-01-07T17:54:40.462585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-07T17:54:40.462811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-07T17:54:40.462884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-07T17:54:40.463004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-07T17:54:40.473496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-07T17:54:44.576162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0019 2026-01-07T17:54:44.619510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-07T17:54:44.619721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-07T17:54:44.619790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-07T17:54:44.619903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-07T17:54:44.631261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-07T17:54:48.856545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2026-01-07T17:54:48.897971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-07T17:54:48.898163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-07T17:54:48.898233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-07T17:54:48.898332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-07T17:54:48.908733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-07T17:54:53.209587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0017 2026-01-07T17:54:53.251315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-07T17:54:53.251476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-07T17:54:53.251524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-07T17:54:53.251600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-07T17:54:53.262692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-07T17:54:57.433583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0017 2026-01-07T17:54:57.493157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-07T17:54:57.493347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-07T17:54:57.493410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-07T17:54:57.493530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-07T17:54:57.505180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-07T17:55:01.689424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0018 2026-01-07T17:55:01.731013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-07T17:55:01.731171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-07T17:55:01.731229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-07T17:55:01.731321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-07T17:55:01.741718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-07T17:55:05.605506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:05.605909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":41},"id":"72057594046678944-39-120-179-41","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"Category":"Table","k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2026-01-07T17:55:05.610500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering cookie 0 ... waiting for metering (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0c >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0c [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0a >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0a [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3a ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:04.483023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:04.483097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:04.483137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:04.483167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:04.483196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:04.483219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:04.483259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:04.483312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:04.483968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:04.484205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:04.563336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:04.563424Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:04.580548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:04.581006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:04.581210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:04.599929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:04.600340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:04.601119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:04.601393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:04.604703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:04.604977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:04.606261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:04.606328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:04.606462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:04.606523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:04.606565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:04.606880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:04.767514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.768592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.768721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.768833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.768908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:04.769734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EvNotifyTxCompletionResult to actorId: [2:787:2754] message: TxId: 102 2026-01-07T17:55:07.706587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-07T17:55:07.706633Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:55:07.706669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:55:07.706832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-07T17:55:07.706873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:55:07.706910Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-07T17:55:07.706933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-07T17:55:07.706977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:55:07.707004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:55:07.707317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:55:07.707370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:55:07.707457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:55:07.707505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:55:07.707540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-07T17:55:07.709691Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:55:07.709765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:55:07.709811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:884:2837] TestWaitNotification: OK eventTxId 102 2026-01-07T17:55:07.720828Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:07.721126Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 334us result status StatusPathDoesNotExist 2026-01-07T17:55:07.721291Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:55:07.721712Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:07.721880Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 182us result status StatusPathDoesNotExist 2026-01-07T17:55:07.722013Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:55:07.722390Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:55:07.722616Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 244us result status StatusSuccess 2026-01-07T17:55:07.723100Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:07.723693Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:55:07.723870Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 204us result status StatusSuccess 2026-01-07T17:55:07.724161Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TPQTabletTests::DropTablet_And_Tx >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3a [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4c >> TPQTabletTests::DropTablet_Before_Write >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> KqpQuery::DictJoin [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4c [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:06.492448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:06.492533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:06.492583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:06.492617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:06.492646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:06.492673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:06.492723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:06.492775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:06.493566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:06.493850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:06.573856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:06.573931Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:06.587733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:06.588036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:06.588229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:06.593346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:06.593688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:06.594425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:06.594679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:06.597205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:06.597402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:06.598543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:06.598609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:06.598714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:06.598788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:06.598893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:06.599077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:06.732745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.733539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.733658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.733738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.733792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.733860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.733910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.733951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.733998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.734053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.734087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.734136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.734210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.734270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:06.734375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 5186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-07T17:55:08.993964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:08.994005Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:55:08.994044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-07T17:55:08.994090Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-07T17:55:08.995153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 737 RawX2: 8589937311 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-07T17:55:08.995194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2026-01-07T17:55:08.995300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 737 RawX2: 8589937311 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-07T17:55:08.995350Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-07T17:55:08.995414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 737 RawX2: 8589937311 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-07T17:55:08.995458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:08.995483Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-07T17:55:08.995509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-07T17:55:08.995538Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:2 129 -> 240 2026-01-07T17:55:08.997849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:55:09.001806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-07T17:55:09.002874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:55:09.003421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-07T17:55:09.003489Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:55:09.003537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:55:09.003732Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 2/3 2026-01-07T17:55:09.003772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2026-01-07T17:55:09.003811Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 2/3 2026-01-07T17:55:09.003844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2026-01-07T17:55:09.003881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2026-01-07T17:55:09.005025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-07T17:55:09.005664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-07T17:55:09.005716Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:55:09.005763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-07T17:55:09.005846Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 3/3 2026-01-07T17:55:09.005870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-07T17:55:09.005902Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 3/3 2026-01-07T17:55:09.005923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-07T17:55:09.005948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2026-01-07T17:55:09.005985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-07T17:55:09.006028Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-07T17:55:09.006065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-07T17:55:09.006207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-01-07T17:55:09.006251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:55:09.006292Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2026-01-07T17:55:09.006314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:1 2026-01-07T17:55:09.006342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-07T17:55:09.006387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:55:09.006418Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2026-01-07T17:55:09.006438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:2 2026-01-07T17:55:09.006487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-07T17:55:09.006513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:55:09.007054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:55:09.007116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:55:09.007183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:55:09.007232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:55:09.007271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-07T17:55:09.007295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-07T17:55:09.007327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-07T17:55:09.016505Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [2:682:2671] Handle TEvGetProxyServicesRequest 2026-01-07T17:55:09.017064Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 103 2026-01-07T17:55:09.067049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-07T17:55:09.067109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-07T17:55:09.067533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-07T17:55:09.067631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:55:09.067681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:1094:2976] TestWaitNotification: OK eventTxId 103 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> BSCMovePDisk::PDiskMove_ErasureNone >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TPQTabletTests::Huge_ProposeTransacton |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] >> KqpQuery::QueryCancelWrite [GOOD] Test command err: Trying to start YDB, gRPC: 10857, MsgBus: 5649 2026-01-07T17:54:42.296279Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679617071364469:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:42.298647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:42.581243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:42.653200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:42.653350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:42.663955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:42.774355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:42.805266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679617071364345:2081] 1767808482278904 != 1767808482278907 2026-01-07T17:54:42.853239Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:42.901484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:42.901505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:42.901512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:42.901585Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:43.305597Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:43.404525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:43.413310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:45.570453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679629956267114:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.570565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679629956267126:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.570602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.570934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679629956267131:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.571020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.574536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:45.583327Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679629956267130:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:54:45.707097Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679629956267183:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:45.920707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 19785, MsgBus: 22452 2026-01-07T17:54:46.916143Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679634232699769:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:46.916545Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:46.941622Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:47.043877Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:47.043957Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:47.057077Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679634232699555:2081] 1767808486883402 != 1767808486883405 2026-01-07T17:54:47.069682Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:47.080086Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:47.142902Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:47.195967Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:47.195987Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:47.195993Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:47.196076Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:47.657231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:47.666371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:47.906477Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:49.894425Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679647117602323:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.894538Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.894846Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679647117602336:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.894886Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679647117602335:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.894930Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.898863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:49.910392Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679647117602339:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:54:50.008165Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679651412569688:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:50.035978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: ( ... accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:00.347431Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679672633783536:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:00.347504Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23135, MsgBus: 15496 2026-01-07T17:55:02.127388Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592679704221801495:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:02.127478Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:02.171942Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:02.245627Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:02.270053Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:02.270152Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:02.272806Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:02.325497Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:02.325520Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:02.325525Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:02.325589Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:02.399500Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:02.835477Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:02.853982Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:02.912551Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:03.059942Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:03.132336Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:03.135005Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:05.798026Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679717106705210:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.798117Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.798399Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679717106705220:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.798455Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.855423Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:05.897993Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:05.934727Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:05.973413Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:06.011195Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:06.065426Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:06.109335Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:06.182226Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:06.275780Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679721401673389:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.275889Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.275942Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679721401673394:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.276568Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679721401673397:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.276612Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.281053Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:06.295238Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679721401673396:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:55:06.353501Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679721401673449:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:07.129013Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679704221801495:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:07.129084Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate >> KqpStats::JoinStatsBasicScan [GOOD] >> Channels20::LocalChannelBackPressure >> KqpQuery::QueryFromSqs [GOOD] >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a [GOOD] Test command err: 2026-01-07T17:55:06.568596Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:06.656226Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:55:06.659386Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:55:06.659609Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:06.659647Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:06.659674Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:55:06.659727Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:55:06.659788Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:06.659838Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:06.681555Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:210:2214], now have 1 active actors on pipe 2026-01-07T17:55:06.681713Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:55:06.699389Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:55:06.702083Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:55:06.702197Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:06.702982Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:55:06.703116Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:55:06.703432Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:55:06.703762Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:218:2142] 2026-01-07T17:55:06.704587Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:55:06.704650Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:55:06.704688Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:218:2142] 2026-01-07T17:55:06.704743Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:55:06.704794Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:55:06.705275Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:55:06.705330Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:06.705360Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:06.705397Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:06.705422Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:06.705456Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:06.705516Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:55:06.705557Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:55:06.705605Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:06.705632Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:55:06.705659Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:06.705806Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:55:06.705879Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:55:06.706019Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:55:06.706179Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:06.708115Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:06.708193Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:55:06.708225Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:06.708252Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:06.708276Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:06.708301Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:06.708340Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:06.708380Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:06.708668Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:225:2222], now have 1 active actors on pipe 2026-01-07T17:55:06.709273Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:228:2224], now have 1 active actors on pipe 2026-01-07T17:55:06.710004Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-07T17:55:06.710068Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-07T17:55:06.710141Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-07T17:55:06.710190Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-07T17:55:06.710253Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-07T17:55:06.710294Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-07T17:55:06.710332Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from UNKNOWN to PREPARING 2026-01-07T17:55:06.710378Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3864: [PQ: 72057594037927937] Persist state PREPARED for TxId 67890 2026-01-07T17:55:06.710530Z node 1 :PQ_TX DEBUG: transaction.cpp:414: [TxId: 67890] Save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 181 RawX2: 4294969490 } Partitions { } 2026-01-07T17:55:06.710609Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:55:06.712475Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:55:06.712528Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARING 2026-01-07T17:55:06.712575Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARING 2026-01-07T17:55:06.712611Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARING to PREPARED 2026-01-07T17:55:06.716308Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3398: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 181 RawX2: 4294969490 } } St ... partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.448233Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:10.448278Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.448313Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:10.458717Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:10.479721Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:10.490365Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:10.490436Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.490462Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:10.490497Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.490542Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:10.511194Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:10.511270Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.511306Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:10.511351Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.511396Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:10.532122Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:10.542804Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:10.542870Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.542897Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:10.542931Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.542956Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:10.543421Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:380:2325], now have 1 active actors on pipe 2026-01-07T17:55:10.543692Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22226 TabletDest: 72057594037927937 TabletProducer: 22226 ReadSet: "\010\002" Seqno: 0 2026-01-07T17:55:10.543746Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 22226 2026-01-07T17:55:10.543789Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-07T17:55:10.543836Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 5/5 2026-01-07T17:55:10.543880Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-07T17:55:10.543923Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-07T17:55:10.543978Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-07T17:55:10.544026Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-07T17:55:10.544103Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-07T17:55:10.544144Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-07T17:55:10.544220Z node 6 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 100, TxId 67890 2026-01-07T17:55:10.544254Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:10.544287Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:10.544317Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:10.544365Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:55:10.544431Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-07T17:55:10.544465Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:10.544498Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:55:10.544534Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:10.544708Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:55:10.547373Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:10.547511Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:55:10.547571Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:10.547621Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:10.547663Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:10.547705Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:10.547744Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:10.547811Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:10.547895Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-07T17:55:10.547946Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-07T17:55:10.547992Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-07T17:55:10.548040Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-07T17:55:10.548085Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-07T17:55:10.548142Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-07T17:55:10.548187Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-07T17:55:10.548251Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-07T17:55:10.548335Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-07T17:55:10.548400Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22224 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:55:10.548453Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22226 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:55:10.548486Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22225 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:55:10.548522Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:55:10.548556Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22223 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-07T17:55:10.548591Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-07T17:55:10.548634Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-07T17:55:10.548679Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-07T17:55:10.548720Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-07T17:55:10.548756Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-07T17:55:10.548942Z node 6 :PQ_TX INFO: pq_impl.cpp:3459: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSetAck Step: 100 TxId: 67890 TabletSource: 22226 TabletDest: 72057594037927937 TabletConsumer: 72057594037927937 Flags: 0 Seqno: 0 2026-01-07T17:55:10.548991Z node 6 :PQ_TX DEBUG: transaction.cpp:344: [TxId: 67890] Handle TEvReadSetAck txId 67890 2026-01-07T17:55:10.549039Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2026-01-07T17:55:10.549078Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2026-01-07T17:55:10.549126Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-07T17:55:10.549158Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-07T17:55:10.549195Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] Test command err: RandomSeed# 14840760397270361825 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestSeveralOwners >> ConvertUnboxedValueToArrowAndBack::Struct [GOOD] >> ConvertUnboxedValueToArrowAndBack::ListOfJsons [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalListOfOptional [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalVariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalVariantOverTupleWithOptionals [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalOfOptional [GOOD] >> ConvertUnboxedValueToArrowAndBack::LargeVariant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 3631, MsgBus: 24783 2026-01-07T17:54:39.862693Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679604289977313:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:39.862765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:40.149947Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:40.156610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:40.156737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:40.215782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:40.227921Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:40.348264Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:40.348296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:40.348304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:40.348392Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:40.399676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:40.801102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:40.813855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:40.862886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:40.882989Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:40.999153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:41.145428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:41.207321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.002093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679617174881050:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.002255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.013100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679621469848356:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.013185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.340160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.379190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.417424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.452574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.483118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.524460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.573916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.622061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:43.701737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679621469849229:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.701831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.701880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679621469849234:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.702177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679621469849236:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.702271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:43.705278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:43.717020Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679621469849237:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:43.786975Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679621469849289:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:44.865069Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679604289977313:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:44.865134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10952, MsgBus: 17945 2026-01-07T17:54:46.948473Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679636001188286:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:46.948525Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cp ... 01-07T17:55:01.008447Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:01.033968Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:55:01.140696Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592679698057511471:2081] 1767808501007531 != 1767808501007534 2026-01-07T17:55:01.150405Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:01.153147Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:01.153221Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:01.153840Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:01.172752Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:01.242918Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:01.242940Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:01.242947Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:01.243028Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:01.332691Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:01.788246Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:01.795453Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:55:01.805014Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:01.883024Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:02.025298Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:02.033923Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:02.137164Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:04.780338Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679710942415225:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:04.780426Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:04.780869Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679710942415235:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:04.780915Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:04.862427Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:04.901532Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:04.939849Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:04.976625Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:05.010629Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:05.052085Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:05.090562Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:05.152954Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:05.250806Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679715237383412:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.250905Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.251416Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679715237383416:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.251462Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679715237383418:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.251497Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:05.256721Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:05.270114Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679715237383421:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:55:05.343602Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679715237383472:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:06.008640Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679698057511510:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:06.008716Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:10.207670Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808507899, txId: 281474976710673] shutting down |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> ConvertUnboxedValueToArrowAndBack::Tuple >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead >> ConvertUnboxedValueToArrowAndBack::Tuple [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverTupleWithOptionals [GOOD] >> DqOutputChannelTests::PartialRead [GOOD] >> DqOutputChannelTests::Overflow [GOOD] >> DqOutputChannelTests::BigRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 7645, MsgBus: 14093 2026-01-07T17:54:43.233706Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679621102418069:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:43.234260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:43.481832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:43.484001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:43.484078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:43.582905Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679621102417963:2081] 1767808483227215 != 1767808483227218 2026-01-07T17:54:43.590366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:43.600196Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:43.723524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:43.723546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:43.723556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:43.723659Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:43.737024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:44.188695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:44.194530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:44.237806Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:44.252180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:44.366386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:44.521633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:44.596643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.415854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679633987321723:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.415975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.416393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679633987321733:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.416462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.810404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.851463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.892012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.922592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.952891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.987538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.024841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.096809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.179732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679638282289897:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.179836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.180098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679638282289902:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.180130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679638282289903:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.180227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.183967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:47.197091Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679638282289906:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:47.283241Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679638282289959:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:48.231382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679621102418069:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:48.231439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:48.738494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 1 } cpu_time_us: 1062 affected_shards: 2 } compilation { duration_us: 204578 cpu_time_us: 198978 } process_cpu_time_us: 665 total_duration_us: 220926 total_cpu_time_us: 201911 Trying to start YDB, gRPC: 20586, MsgBus: 8676 2026-01-07T17:55:03.654206Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592679709583819080:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:03.655041Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:03.667852Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:03.731636Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:03.780397Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:03.780482Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:03.782216Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:03.825507Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:03.825529Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:03.825536Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:03.825624Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:03.836160Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:04.306322Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:04.323731Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:04.377408Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:04.574573Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:04.657410Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:04.665321Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:07.294067Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679726763690082:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.294174Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.294450Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679726763690092:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.294493Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.364454Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.396599Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.428574Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.466938Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.504436Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.541509Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.584283Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.645882Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.760437Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679726763690964:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.760531Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.760873Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679726763690969:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.760916Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679726763690970:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.761042Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:07.765308Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:07.774355Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679726763690973:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:07.851820Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679726763691024:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:08.654058Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679709583819080:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:08.654897Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:09.490600Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:03.472465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:03.472553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:03.472601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:03.472640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:03.472678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:03.472712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:03.472764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:03.472832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:03.473706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:03.474063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:03.566797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:03.566861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:03.582277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:03.582695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:03.582904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:03.591015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:03.591472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:03.592140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:03.592423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:03.595914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:03.596142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:03.597401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:03.597469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:03.597581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:03.597642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:03.597696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:03.597896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:03.605950Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-07T17:55:03.763353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-07T17:55:03.763629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:03.763827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-07T17:55:03.763872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-07T17:55:03.764104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-07T17:55:03.764170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:03.766748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:03.767003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-07T17:55:03.767234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:03.767342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-07T17:55:03.767388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-07T17:55:03.767422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-07T17:55:03.769724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:03.769784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:55:03.769824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-07T17:55:03.771798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:03.771849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-07T17:55:03.771909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:55:03.771970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-07T17:55:03.781040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T17:55:03.784754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-07T17:55:03.785015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-07T17:55:03.786124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:03.786256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:55:03.786304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:55:03.786589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-07T17:55:03.786660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-07T17:55:03.786864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-07T17:55:03.786960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:03.789346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:03.789404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 7205759 ... 26-01-07T17:55:11.639419Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 65], version: 18446744073709551615 2026-01-07T17:55:11.639487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 4 2026-01-07T17:55:11.639921Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:55:11.639992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:55:11.640015Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-07T17:55:11.640050Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 66], version: 18446744073709551615 2026-01-07T17:55:11.640095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 3 2026-01-07T17:55:11.640483Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:55:11.640558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-07T17:55:11.640586Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-07T17:55:11.640615Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 67], version: 18446744073709551615 2026-01-07T17:55:11.640652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 5 2026-01-07T17:55:11.640736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 4/6, is published: true 2026-01-07T17:55:11.641970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:4, at schemeshard: 72057594046678944 2026-01-07T17:55:11.642026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:4 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:55:11.642327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 4 2026-01-07T17:55:11.642443Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2026-01-07T17:55:11.642472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2026-01-07T17:55:11.642516Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2026-01-07T17:55:11.642545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2026-01-07T17:55:11.642580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 5/6, is published: true 2026-01-07T17:55:11.643179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-07T17:55:11.643226Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:2 ProgressState, at schemeshard: 72057594046678944 2026-01-07T17:55:11.643427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 3 2026-01-07T17:55:11.643524Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2026-01-07T17:55:11.643551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-07T17:55:11.643600Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2026-01-07T17:55:11.643638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-07T17:55:11.643674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 6/6, is published: true 2026-01-07T17:55:11.643745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:742:2719] message: TxId: 107 2026-01-07T17:55:11.643810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-07T17:55:11.643872Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-07T17:55:11.643912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-07T17:55:11.644039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-07T17:55:11.644134Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2026-01-07T17:55:11.644164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:1 2026-01-07T17:55:11.644200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 64] was 4 2026-01-07T17:55:11.644230Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2026-01-07T17:55:11.644261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:2 2026-01-07T17:55:11.644319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 2 2026-01-07T17:55:11.644346Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:3 2026-01-07T17:55:11.644364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:3 2026-01-07T17:55:11.644405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 2 2026-01-07T17:55:11.644429Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:4 2026-01-07T17:55:11.644448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:4 2026-01-07T17:55:11.644504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 3 2026-01-07T17:55:11.644531Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:5 2026-01-07T17:55:11.644560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:5 2026-01-07T17:55:11.644634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 68] was 1 2026-01-07T17:55:11.645149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.645216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.645259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.645383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:55:11.645432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 68], at schemeshard: 72057594046678944 2026-01-07T17:55:11.645513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 2 2026-01-07T17:55:11.648269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.648412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.648524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.648628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.651385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.651502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-07T17:55:11.651730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:55:11.651780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:4149:5899] 2026-01-07T17:55:11.651886Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TPartitionTests::ShadowPartitionCounters >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> DqOutputChannelTests::BigRow [GOOD] >> DqOutputChannelTests::ChunkSizeLimit [GOOD] >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpQuery::CreateAsSelectView >> Channels20::IcChannelLateBinding |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputChannelTests::ChunkSizeLimit [GOOD] |96.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted [GOOD] >> TPartitionTests::CorrectRange_Commit >> KqpParams::CheckQueryLimitsWorksAsExpected [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService >> BSCMovePDisk::PDiskMove_Mirror3dc |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers >> KqpExplain::FewEffects-UseSink [GOOD] >> KqpExplain::FullOuterJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted [GOOD] Test command err: 2026-01-07T17:55:09.159433Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:09.218251Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:55:09.221868Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:55:09.222182Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:09.222257Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:09.222297Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:55:09.222357Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:55:09.222407Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:09.222469Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:09.238881Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-07T17:55:09.239035Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:55:09.256737Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:55:09.259723Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:55:09.259831Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:09.261463Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-07T17:55:09.261623Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:55:09.261724Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:55:09.262327Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:55:09.262715Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:55:09.263631Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:55:09.263680Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:55:09.263740Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:55:09.263797Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:55:09.263880Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:55:09.264464Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:55:09.264536Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:09.264570Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:09.264626Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:09.264663Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:09.264700Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:09.264778Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:55:09.264818Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:55:09.264852Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:09.264899Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:55:09.264962Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:09.265169Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:55:09.265254Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:55:09.265409Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:55:09.265619Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:192:2142] 2026-01-07T17:55:09.266273Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:55:09.266328Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-07T17:55:09.266357Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:192:2142] 2026-01-07T17:55:09.266389Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:55:09.266427Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:55:09.266998Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-07T17:55:09.267032Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:55:09.267057Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:09.267089Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:09.267111Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:09.267139Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:09.267176Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-07T17:55:09.267209Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-07T17:55:09.267252Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:09.267282Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:55:09.267308Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:55:09.267439Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:55:09.267495Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:55:09.267653Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:55:09.267813Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:09.268008Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:55:09.268156Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:55:09.271800Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... QUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Add new write blob: topic 'topic' partition {0, KafkaTransactionWriteId{2, 0}, 100001} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100001_00000000000000000000_00000_0000000001_00000? size 64 WTime 1800333 2026-01-07T17:55:13.803692Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:55:13.803818Z node 6 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 100001 offset 0 partNo 0 count 1 size 64 2026-01-07T17:55:13.810122Z node 6 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 100001 offset 0 count 1 size 64 actorID [6:139:2142] 2026-01-07T17:55:13.810322Z node 6 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 100001 offset 0 partno 0 count 1 parts 0 suffix '63' size 64 2026-01-07T17:55:13.810404Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:13.810490Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 11 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:55:13.810578Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] TPartition::ReplyWrite. Partition: {0, KafkaTransactionWriteId{2, 0}, 100001} 2026-01-07T17:55:13.810655Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Answering for message sourceid: '2', Topic: 'topic', Partition: {0, KafkaTransactionWriteId{2, 0}, 100001}, SeqNo: 0, partNo: 0, Offset: 0 is stored on disk 2026-01-07T17:55:13.810909Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:55:13.810968Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:13.811020Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:13.811069Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:13.811111Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-07T17:55:13.811174Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-07T17:55:13.811277Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 128 CachedBlobs 2 2026-01-07T17:55:13.811347Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2026-01-07T17:55:13.812202Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3103: [PQ: 72057594037927937] Transaction for Kafka producer {Id: 1, Epoch: 0} is expired 2026-01-07T17:55:13.812268Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5321: [PQ: 72057594037927937] send TEvPQ::TEvDeletePartition to partition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-07T17:55:13.812766Z node 6 :PERSQUEUE DEBUG: partition.cpp:4395: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-07T17:55:13.813459Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:55:13.813527Z node 6 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2026-01-07T17:55:13.816549Z node 6 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 0 count 1 actorID [6:139:2142] 2026-01-07T17:55:13.816718Z node 6 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 100000 offset 0 partno 0 count 1 parts 0 suffix '63' size 64 2026-01-07T17:55:13.816855Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:13.817055Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] No data for blobs compaction 2026-01-07T17:55:13.817133Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:55:13.817189Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:13.817233Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:13.817282Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:13.817346Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Try persist 2026-01-07T17:55:13.817433Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] No data for blobs compaction 2026-01-07T17:55:13.817871Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 64 CachedBlobs 1 2026-01-07T17:55:13.818013Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-07T17:55:13.818081Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-07T17:55:13.818140Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-07T17:55:13.818241Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:55:13.821021Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:55:13.831580Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.862857Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.873500Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:13.873584Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:13.873625Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:13.873664Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:13.873701Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:13.989574Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:14.287258Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:55:14.287333Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:14.287370Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:14.287409Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:14.287456Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-07T17:55:14.309351Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-07T17:55:14.340895Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.363966Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:14.364029Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:14.364060Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:14.364115Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:14.364145Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:14.374476Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.385384Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:55:14.385452Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:14.385484Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:14.385517Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:14.385557Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-07T17:55:14.395868Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPartitionTests::ConflictingTxIsAborted >> TPQTabletTests::Multiple_PQTablets_1 >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 15760, MsgBus: 23974 2026-01-07T17:54:38.917502Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679599524189024:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:38.921016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:39.217107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:39.254388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:39.254491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:39.266479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:39.364510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:39.424120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:39.424147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:39.424153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:39.424258Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:39.444427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:39.843624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:39.851089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:39.918277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:39.929285Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:40.060127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:40.219640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:40.282526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:41.968102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679612409092723:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:41.968214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:41.968661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679612409092733:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:41.968716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.239949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.267798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.297474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.336616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.371276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.413761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.451193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.500549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:42.586111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679616704060902:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.586199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.586397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679616704060907:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.586457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679616704060908:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.586708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:42.591309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:42.608215Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679616704060911:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:42.691163Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679616704060962:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:43.917313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679599524189024:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:43.917389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\ ... alled at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.709740Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.836095Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:07.860632Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.941137Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:10.902417Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679738840493870:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:10.902533Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:10.902791Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679738840493879:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:10.902853Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:10.992578Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:11.031743Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:11.063269Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:11.098549Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:11.133749Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:11.177338Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:11.224193Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:11.293494Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:11.377068Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679743135462048:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:11.377174Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679743135462053:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:11.377180Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:11.377354Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679743135462055:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:11.377391Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:11.380526Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:11.392404Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679743135462056:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:11.487810Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679743135462108:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:11.827436Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679721660622843:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:11.827504Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"E-Size":"0","PlanNodeId":6,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"0","Table":"EightShard","Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":6}],"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":8}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":3,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"0","Columns":["Data","Key","Text"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":11,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded >> TSchemeShardServerLess::StorageBilling [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback >> TPQTabletTests::Multiple_PQTablets_2 >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] >> TKesusTest::TestAttachOutOfSequence >> TPQTabletTests::One_Tablet_For_All_Partitions >> KqpStats::MultiTxStatsFullScan [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:53:46.804208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:53:46.804346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.804432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:53:46.804481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:53:46.804526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:53:46.804560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:53:46.804629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:53:46.804713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:53:46.805727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:53:46.807877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:53:46.902761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:53:46.902831Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:46.918426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:53:46.918848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:53:46.920111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:53:46.931371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:53:46.933720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:53:46.944955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:53:46.951621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:53:46.964477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.964703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:53:46.973225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:53:46.973506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:53:46.973566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:53:46.973669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:53:46.974608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:53:47.162420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.163544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.163680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.163792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.163911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.163981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:53:47.164801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eBoard Send, to populator: [1:1107:3023], at schemeshard: 72075186233409549, txId: 107, path id: 32 2026-01-07T17:54:54.163095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-07T17:54:54.163160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2026-01-07T17:54:54.163264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-07T17:54:54.163317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2026-01-07T17:54:54.163371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2026-01-07T17:54:54.164761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2026-01-07T17:54:54.164879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2026-01-07T17:54:54.168954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2026-01-07T17:54:54.169076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 12 2026-01-07T17:54:54.169139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 6 2026-01-07T17:54:54.170411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2026-01-07T17:54:54.170534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2026-01-07T17:54:54.170567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2026-01-07T17:54:54.170595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], version: 18446744073709551615 2026-01-07T17:54:54.170651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2026-01-07T17:54:54.170739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2026-01-07T17:54:54.179657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-07T17:54:54.179736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2026-01-07T17:54:54.180101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-07T17:54:54.180286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-07T17:54:54.180354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:54:54.180395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-07T17:54:54.180426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:54:54.180479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2026-01-07T17:54:54.180554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1302:3181] message: TxId: 107 2026-01-07T17:54:54.180606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-07T17:54:54.180637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-07T17:54:54.180692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-07T17:54:54.180791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 2 2026-01-07T17:54:54.182357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-01-07T17:54:54.183801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-01-07T17:54:54.185519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:54:54.185566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:3474:5305] TestWaitNotification: OK eventTxId 107 2026-01-07T17:54:54.206234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 1274 RawX2: 4294970457 } TabletId: 72075186233409552 State: 4 2026-01-07T17:54:54.206344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2026-01-07T17:54:54.208510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72075186233409549 2026-01-07T17:54:54.208618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2026-01-07T17:54:54.209052Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 2026-01-07T17:54:54.211617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2026-01-07T17:54:54.211909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 1 2026-01-07T17:54:54.212643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2026-01-07T17:54:54.212690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 32], at schemeshard: 72075186233409549 2026-01-07T17:54:54.212782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2026-01-07T17:54:54.216068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72075186233409549:4 2026-01-07T17:54:54.216152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2026-01-07T17:54:54.216462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2026-01-07T17:54:54.332099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-07T17:54:54.332213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-07T17:54:54.332280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-07T17:54:54.332344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2026-01-07T17:54:54.332377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-07T17:54:54.332406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-07T17:54:54.332441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-07T17:54:54.332470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-07T17:54:54.332501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-07T17:54:54.399194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:54:54.399349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:06:00.000000Z 2026-01-07T17:54:54.399443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:16.109374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:16.109507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:07:00.000000Z 2026-01-07T17:55:16.109576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> TestArrowBlockSplitter::SplitLargeBlock [GOOD] >> TestArrowBlockSplitter::SplitWithScalars [GOOD] >> TestArrowBlockSplitter::PassSmallBlock [GOOD] >> TestArrowBlockSplitter::CheckLargeRows >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TestArrowBlockSplitter::CheckLargeRows [GOOD] >> TKesusTest::TestAttachThenReRegister >> TestArrowBlockSplitter::CheckLargeScalarRows [GOOD] >> HashShuffle::BackPressureWithSpillingMulti [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpQuery::QueryExplain >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TPQTabletTests::Limit_On_The_Number_Of_Transactons >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig |96.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestReleaseLockFailure |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> HashShuffle::BackPressureWithSpillingMulti [GOOD] |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::MultiTxStatsFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 24251, MsgBus: 15634 2026-01-07T17:54:50.476183Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679653311018109:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:50.476370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:50.745354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:50.745495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:50.776147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:50.805963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:50.811787Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:50.819811Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679653311017990:2081] 1767808490467702 != 1767808490467705 2026-01-07T17:54:50.973825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:50.975492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:50.975517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:50.975526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:50.975612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:51.445312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:51.476166Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:51.501287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.619867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.769221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.842100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.638823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679666195921748:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.638967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.639343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679666195921757:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.639388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:53.950381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:53.984881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:54.024920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:54.055469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:54.091803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:54.125279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:54.161007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:54.211585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:54.310286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679670490889921:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.310381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.310663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679670490889926:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.310683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679670490889927:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.310731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:54.314200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:54.326894Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679670490889930:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:54:54.423040Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679670490889981:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:55.471696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679653311018109:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:55.471754Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:56.063509Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808496069, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 28553, MsgBus: 8276 2026-01-07T17:54:57.133994Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;acto ... you don't have access permissions } 2026-01-07T17:55:13.806761Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:13.888474Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:13.925392Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:13.961360Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:13.999559Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:14.038607Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:14.077380Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:14.120696Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:14.180631Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:14.268121Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679754288836729:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:14.268247Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:14.269296Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679754288836734:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:14.269371Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592679754288836735:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:14.269523Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:14.273278Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:14.285546Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-07T17:55:14.286319Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679754288836738:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:55:14.382157Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679754288836789:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:14.938515Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679732813997535:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:14.938591Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:16.179456Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808516187, txId: 281474976710673] shutting down {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"4","Push":{}},{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[3,37],"Min":37},"WaitTimeUs":{"Count":1,"Max":1842,"Sum":1842,"History":[3,1842],"Min":1842},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[1,1048576,3,1048576],"Min":1048576},"Introspections":["1 tasks for a single\/sequential source scan"],"Tasks":1,"FinishedTasks":1,"IngressRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1767808516147,"Table":[{"Path":"\/Root\/EightShard","ReadBytes":{"Count":1,"Max":54,"Sum":54,"Min":54},"ReadRows":{"Count":1,"Max":3,"Sum":3,"Min":3}}],"CpuTimeUs":{"Count":1,"Max":1035,"Sum":1035,"History":[0,875,3,1035],"Min":1035},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[3,144],"Min":144},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}},"External":{},"Ingress":{},"Name":"KqpReadRangesSource","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[3,144],"Min":144},"WaitTimeUs":{"Count":1,"Max":1433,"Sum":1433,"History":[3,1433],"Min":1433},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1}}}],"UpdateTimeMs":2}}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[3,37],"Min":37},"WaitTimeUs":{"Count":1,"Max":2436,"Sum":2436,"History":[3,2436],"Min":2436},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[0,1048576,3,1048576],"Min":1048576},"Introspections":["1 minimum tasks for compute"],"InputBytes":{"Count":1,"Max":37,"Sum":37,"Min":37},"Tasks":1,"FinishedTasks":1,"InputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":1,"StageDurationUs":0,"BaseTimeMs":1767808516147,"CpuTimeUs":{"Count":1,"Max":612,"Sum":612,"History":[0,502,3,612],"Min":612},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[3,37],"Min":37},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[3,37],"Min":37},"WaitTimeUs":{"Count":1,"Max":1842,"Sum":1842,"History":[3,1842],"Min":1842},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1}}}],"UpdateTimeMs":2}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":215557,"ProcessCpuTimeUs":294,"Compilation":{"FromCache":false,"CpuTimeUs":173765,"DurationUs":188659}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":0.612,"A-Cpu":1.647,"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"A-SelfCpu":1.035,"A-Cpu":1.035,"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"1","ReadRange":["Key [150, 266]"],"E-Cost":"0","ReadColumns":["Data","Key","Text"],"Name":"TableRangeScan","E-Size":"0","Table":"EightShard"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> Channels20::LocalChannelBackPressure [GOOD] >> Channels20::LocalChannelAsyncRead >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TPQTabletTests::Limit_On_The_Number_Of_Transactons [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TKesusTest::TestCreateSemaphore [GOOD] >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout >> TKesusTest::TestQuoterHDRRParametersValidation >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2026-01-07T17:55:18.099905Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:18.100053Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:18.118268Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:18.118761Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:18.156659Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:18.157561Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=9053682764139833215, session=0, seqNo=0) 2026-01-07T17:55:18.157871Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:18.177098Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=9053682764139833215, session=1) 2026-01-07T17:55:18.177512Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=5009600832782442836, session=0, seqNo=0) 2026-01-07T17:55:18.177652Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:18.191841Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=5009600832782442836, session=2) 2026-01-07T17:55:18.193256Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:55:18.193438Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:55:18.193588Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:18.209763Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-07T17:55:18.210175Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-07T17:55:18.210359Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-07T17:55:18.210454Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-07T17:55:18.222878Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=112) 2026-01-07T17:55:18.223354Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=333, name="Lock1") 2026-01-07T17:55:18.223473Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-07T17:55:18.223686Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-07T17:55:18.223787Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2026-01-07T17:55:18.223857Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2026-01-07T17:55:18.223966Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-07T17:55:18.237726Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=333) 2026-01-07T17:55:18.237795Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=222) 2026-01-07T17:55:18.237832Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=223) 2026-01-07T17:55:18.238089Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=334, name="Lock2") 2026-01-07T17:55:18.238167Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2026-01-07T17:55:18.238212Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-07T17:55:18.250861Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=334) 2026-01-07T17:55:18.251460Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:166:2188], cookie=8323060912996923632, name="Lock1") 2026-01-07T17:55:18.251583Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:166:2188], cookie=8323060912996923632) 2026-01-07T17:55:18.252088Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:169:2191], cookie=9129010997228495686, name="Lock2") 2026-01-07T17:55:18.252154Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:169:2191], cookie=9129010997228495686) 2026-01-07T17:55:18.263630Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:18.263754Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:18.264109Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:18.264713Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:18.303521Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:18.303711Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-07T17:55:18.303768Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2026-01-07T17:55:18.304129Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:208:2221], cookie=16073304936510371318, name="Lock1") 2026-01-07T17:55:18.304232Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:208:2221], cookie=16073304936510371318) 2026-01-07T17:55:18.304804Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:216:2228], cookie=3067688823860208550, name="Lock2") 2026-01-07T17:55:18.304868Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:216:2228], cookie=3067688823860208550) 2026-01-07T17:55:18.736806Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:18.736918Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:18.754025Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:18.754300Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:18.768408Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:18.769187Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=8694246119351610234, session=0, seqNo=0) 2026-01-07T17:55:18.769336Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:18.792035Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=8694246119351610234, session=1) 2026-01-07T17:55:18.792368Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=17987837645739254388, session=0, seqNo=0) 2026-01-07T17:55:18.792551Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:18.804668Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=17987837645739254388, session=2) 2026-01-07T17:55:18.806635Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:55:18.806848Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:55:18.806934Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:18.819029Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=111) 2026-01-07T17:55:18.819375Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-07T17:55:18.819523Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-07T17:55:18.819605Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-07T17:55:18.831700Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=112) 2026-01-07T17:55:18.832097Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=333, session=1, semaphore="Lock1" count=1) 2026-01-07T17:55:18.832323Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-07T17:55:18.832388Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-07T17:55:18.832486Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-07T17:55:18.844582Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=333) 2026-01-07T17:55:18.844666Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=222) 2026-01-07T17:55:18.844702Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=223) 2026-01-07T17:55:18.845251Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:164:2186], cookie=2469600543194276028, name="Lock1") 2026-01-07T17:55:18.845350Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:164:2186], cookie=2469600543194276028) 2026-01-07T17:55:18.845804Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2189], cookie=9903244578724868637, name="Lock2") 2026-01-07T17:55:18.845873Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2189], cookie=9903244578724868637) 2026-01-07T17:55:18.846265Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:170:2192], cookie=13693829626645469309, name="Lock1") 2026-01-07T17:55:18.846332Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:170:2192], cookie=13693829626645469309) 2026-01-07T17:55:18.846755Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2195], cookie=8698519873781707439, name="Lock2") 2026-01-07T17:55:18.846847Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2195], cookie=8698519873781707439) 2026-01-07T17:55:18.847087Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=444, session=2, semaphore="Lock2" count=1) 2026-01-07T17:55:18.847217Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-07T17:55:18.859302Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=444) 2026-01-07T17:55:18.859860Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:178:2200], cookie=5510127351939337951, name="Lock2") 2026-01-07T17:55:18.859950Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:178:2200], cookie=5510127351939337951) 2026-01-07T17:55:18.860370Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:181:2203], cookie=10208903998591230530, name="Lock2") 2026-01-07T17:55:18.860433Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:181:2203], cookie=10208903998591230530) 2026-01-07T17:55:18.874892Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:18.875002Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:18.875457Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:18.875998Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:18.934076Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:18.934264Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:18.934318Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-07T17:55:18.934347Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-07T17:55:18.934374Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-07T17:55:18.934736Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:220:2233], cookie=1523131113286264209, name="Lock1") 2026-01-07T17:55:18.934840Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:220:2233], cookie=1523131113286264209) 2026-01-07T17:55:18.935465Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:228:2240], cookie=7152699765740157222, name="Lock2") 2026-01-07T17:55:18.935546Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:228:2240], cookie=7152699765740157222) |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2026-01-07T17:55:17.307584Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:17.307713Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:17.327469Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:17.327910Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:17.366536Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:17.367305Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=9898918398861875863, session=0, seqNo=222) 2026-01-07T17:55:17.367461Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:17.379338Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=9898918398861875863, session=1) 2026-01-07T17:55:17.379625Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=12574746046945607032, session=1, seqNo=111) 2026-01-07T17:55:17.391991Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=12574746046945607032, session=1) 2026-01-07T17:55:17.741064Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:17.741184Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:17.757684Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:17.757955Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:17.775096Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:17.775678Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=111, session=0, seqNo=42) 2026-01-07T17:55:17.775852Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:17.776014Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=222, session=1, seqNo=41) 2026-01-07T17:55:17.800506Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=111, session=1) 2026-01-07T17:55:17.800590Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=222, session=1) 2026-01-07T17:55:18.150684Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:18.150800Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:18.164297Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:18.164397Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:18.205611Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:18.206091Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=17581009396534401734, session=0, seqNo=0) 2026-01-07T17:55:18.206219Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:18.223280Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=17581009396534401734, session=1) 2026-01-07T17:55:18.224949Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=15481450571367456640) 2026-01-07T17:55:18.225043Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=15481450571367456640) 2026-01-07T17:55:18.568538Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:18.568646Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:18.586736Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:18.587027Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:18.610996Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:19.019393Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:19.019502Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:19.036462Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:19.036624Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:19.072142Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:19.072746Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=7542053693239663002, session=0, seqNo=0) 2026-01-07T17:55:19.072943Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:19.085254Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=7542053693239663002, session=1) 2026-01-07T17:55:19.085600Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:55:19.085740Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:55:19.085825Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:19.098211Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2026-01-07T17:55:19.099212Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2173], cookie=834321788759652174, name="Sem1", limit=42) 2026-01-07T17:55:19.099394Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-01-07T17:55:19.111677Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2173], cookie=834321788759652174) 2026-01-07T17:55:19.112263Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:156:2178], cookie=14294890031786876798, name="Sem1", limit=42) 2026-01-07T17:55:19.124902Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:156:2178], cookie=14294890031786876798) 2026-01-07T17:55:19.125618Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:161:2183], cookie=4053864218663960415, name="Sem1", limit=51) 2026-01-07T17:55:19.138269Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:161:2183], cookie=4053864218663960415) 2026-01-07T17:55:19.138962Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:166:2188], cookie=168559667134334799, name="Lock1", limit=42) 2026-01-07T17:55:19.151589Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:166:2188], cookie=168559667134334799) 2026-01-07T17:55:19.152251Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:171:2193], cookie=3867718407824121596, name="Lock1", limit=18446744073709551615) 2026-01-07T17:55:19.164789Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:171:2193], cookie=3867718407824121596) 2026-01-07T17:55:19.165466Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:176:2198], cookie=10249907394384326459, name="Sem1") 2026-01-07T17:55:19.165576Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:176:2198], cookie=10249907394384326459) 2026-01-07T17:55:19.166133Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:179:2201], cookie=2625906216683316234, name="Sem2") 2026-01-07T17:55:19.166211Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:179:2201], cookie=2625906216683316234) 2026-01-07T17:55:19.179857Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:19.179971Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:19.180468Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:19.181055Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:19.228797Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:19.228970Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:19.229386Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:218:2231], cookie=9623423379932058079, name="Sem1") 2026-01-07T17:55:19.229471Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:218:2231], cookie=9623423379932058079) 2026-01-07T17:55:19.230091Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:225:2237], cookie=7351669811584951051, name="Sem2") 2026-01-07T17:55:19.230173Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:225:2237], cookie=7351669811584951051) |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> TPartitionTests::TEvTxCalcPredicate_Without_Conflicts >> KqpQuery::CreateAsSelectView [GOOD] >> KqpQuery::CreateTableAs_MkDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout [GOOD] Test command err: 2026-01-07T17:55:16.307241Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:16.382120Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:55:16.385918Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:55:16.386269Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:16.386339Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:16.386379Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:55:16.386440Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:55:16.386493Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:16.386556Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:16.418672Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:210:2214], now have 1 active actors on pipe 2026-01-07T17:55:16.418867Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:55:16.438103Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-07T17:55:16.440904Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-07T17:55:16.441095Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:16.442077Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-07T17:55:16.442321Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:55:16.442768Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:55:16.443206Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:218:2142] 2026-01-07T17:55:16.444114Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:55:16.444160Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-07T17:55:16.444198Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:218:2142] 2026-01-07T17:55:16.444247Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:55:16.444309Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:55:16.444899Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:55:16.445349Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:55:16.445388Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:16.445423Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:16.445461Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:16.445517Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-07T17:55:16.445552Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:16.445602Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:16.445680Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-07T17:55:16.445718Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:55:16.445748Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:55:16.445773Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2026-01-07T17:55:16.445794Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2026-01-07T17:55:16.445820Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-07T17:55:16.445850Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-07T17:55:16.445891Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:16.446152Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:55:16.446203Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:55:16.446269Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:55:16.446461Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:55:16.446662Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:16.449266Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:16.449383Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:55:16.449432Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:16.449481Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:16.449514Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:16.449557Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:16.449597Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:16.449644Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:16.449973Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:225:2222], now have 1 active actors on pipe 2026-01-07T17:55:16.450587Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:228:2224], now have 1 active actors on pipe 2026-01-07T17:55:16.451432Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-07T17:55:16.451512Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-07T17:55:16.451602Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-07T17:55:16.451647Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-07T17:55:16.451683Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-07T17:55:16.451721Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-07T17:55:16.451770Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [ ... T topic topic partitition {0, KafkaTransactionWriteId{1, 0}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:55:19.775562Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:55:19.775590Z node 6 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process pending events. Count 0 2026-01-07T17:55:19.775615Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:55:19.775648Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:19.775671Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:19.775698Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:19.775719Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-07T17:55:19.775773Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:55:19.775897Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-07T17:55:19.776028Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|fedde287-caef68f6-f22bfded-4cbf4a2d_0 generated for partition {0, KafkaTransactionWriteId{1, 0}, 100001} topic 'topic' owner -=[ 0wn3r ]=- 2026-01-07T17:55:19.776067Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:55:19.776101Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:19.776139Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:19.776168Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:19.776198Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:55:19.776239Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:19.776266Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Batch completed (1) 2026-01-07T17:55:19.776305Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-07T17:55:19.776357Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-07T17:55:19.776414Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2026-01-07T17:55:19.776868Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3103: [PQ: 72057594037927937] Transaction for Kafka producer {Id: 1, Epoch: 0} is expired 2026-01-07T17:55:19.776909Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5321: [PQ: 72057594037927937] send TEvPQ::TEvDeletePartition to partition {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-07T17:55:19.777245Z node 6 :PERSQUEUE DEBUG: partition.cpp:4395: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-07T17:55:19.777688Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:55:19.777735Z node 6 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from D0000100001(+) to D0000100002(-) 2026-01-07T17:55:19.778066Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:19.778204Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-07T17:55:19.778242Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-07T17:55:19.778279Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:19.778315Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:19.778354Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:19.778388Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-07T17:55:19.778429Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-07T17:55:19.778675Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 0 CachedBlobs 0 2026-01-07T17:55:19.778759Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-07T17:55:19.778814Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-07T17:55:19.778852Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-07T17:55:19.778919Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T17:55:19.780828Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T17:55:19.791266Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:19.823526Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:19.834099Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:19.834170Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:19.834223Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:19.834267Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:19.834301Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:19.845766Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:55:19.845855Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:19.845892Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:19.845932Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:19.845964Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Try persist 2026-01-07T17:55:19.975297Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:19.985952Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] No data for blobs compaction 2026-01-07T17:55:20.263141Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:20.284096Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:20.284174Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:20.284210Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:20.284253Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:20.284285Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:20.294654Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx events 2026-01-07T17:55:20.294720Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:20.294748Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:20.294805Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:20.294835Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Try persist 2026-01-07T17:55:20.305162Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:20.325914Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> Channels20::IcChannelLateBinding [GOOD] >> Channels20::IcChannelAsyncRead >> BSCMovePDisk::PDiskMove_Block42 >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] >> TKesusTest::TestSemaphoreSessionFailures [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] Test command err: RandomSeed# 17108002208446079706 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2026-01-07T17:55:18.799081Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:18.799198Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:18.814703Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:18.815033Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:18.849515Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:18.850073Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=10933783751048159250, session=0, seqNo=0) 2026-01-07T17:55:18.850214Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:18.862289Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=10933783751048159250, session=1) 2026-01-07T17:55:18.862563Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=15919085437879917274, session=0, seqNo=0) 2026-01-07T17:55:18.862703Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:18.877887Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=15919085437879917274, session=2) 2026-01-07T17:55:18.878273Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=111, name="Lock1") 2026-01-07T17:55:18.893671Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=111) 2026-01-07T17:55:18.894029Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:55:18.894215Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:55:18.894320Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:18.906696Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2026-01-07T17:55:18.907012Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=333, name="Lock1") 2026-01-07T17:55:18.919460Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=333) 2026-01-07T17:55:19.370099Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:19.370261Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:19.387271Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:19.387502Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:19.401476Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:19.402063Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=10610051871745481320, session=0, seqNo=0) 2026-01-07T17:55:19.402228Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:19.429731Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=10610051871745481320, session=1) 2026-01-07T17:55:19.430068Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=8701432455484343542, session=0, seqNo=0) 2026-01-07T17:55:19.430183Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:19.442275Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=8701432455484343542, session=2) 2026-01-07T17:55:19.442800Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:148:2170], cookie=943511763948384588, name="Sem1", limit=1) 2026-01-07T17:55:19.442932Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-07T17:55:19.455120Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:148:2170], cookie=943511763948384588) 2026-01-07T17:55:19.455460Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-07T17:55:19.455588Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-07T17:55:19.455741Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-07T17:55:19.467931Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=111) 2026-01-07T17:55:19.468013Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=222) 2026-01-07T17:55:19.468516Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:156:2178], cookie=15553067667173344058, name="Sem1") 2026-01-07T17:55:19.468608Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:156:2178], cookie=15553067667173344058) 2026-01-07T17:55:19.469033Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2181], cookie=5880280380210865197, name="Sem1") 2026-01-07T17:55:19.469095Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2181], cookie=5880280380210865197) 2026-01-07T17:55:19.469314Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:137:2161], cookie=333, name="Sem1") 2026-01-07T17:55:19.469426Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2026-01-07T17:55:19.481416Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:137:2161], cookie=333) 2026-01-07T17:55:19.481969Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:164:2186], cookie=2813534143499059895, name="Sem1") 2026-01-07T17:55:19.482046Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:164:2186], cookie=2813534143499059895) 2026-01-07T17:55:19.482413Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2189], cookie=4896427187765241745, name="Sem1") 2026-01-07T17:55:19.482465Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2189], cookie=4896427187765241745) 2026-01-07T17:55:19.482701Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:137:2161], cookie=444, name="Sem1") 2026-01-07T17:55:19.482846Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-07T17:55:19.494762Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:137:2161], cookie=444) 2026-01-07T17:55:19.495341Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:172:2194], cookie=10509920079571120510, name="Sem1") 2026-01-07T17:55:19.495435Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:172:2194], cookie=10509920079571120510) 2026-01-07T17:55:19.495871Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:175:2197], cookie=12424717598739426958, name="Sem1") 2026-01-07T17:55:19.495921Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:175:2197], cookie=12424717598739426958) 2026-01-07T17:55:19.781063Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:19.781180Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:19.797984Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:19.798092Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:19.833132Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:19.833560Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:136:2161], cookie=11014239004058127288, name="Sem1", limit=1) 2026-01-07T17:55:19.833737Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-07T17:55:19.847254Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:136:2161], cookie=11014239004058127288) 2026-01-07T17:55:19.847794Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:146:2168], cookie=8257195820508425886, name="Sem2", limit=1) 2026-01-07T17:55:19.847954Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem2" 2026-01-07T17:55:19.861678Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:146:2168], cookie=8257195820508425886) 2026-01-07T17:55:19.862363Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:151:2173], cookie=15974697555645436005, name="Sem1") 2026-01-07T17:55:19.862469Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:151:2173], cookie=15974697555645436005) 2026-01-07T17:55:19.862978Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:154:2176], cookie=5811950046312607392, name="Sem2") 2026-01-07T17:55:19.863049Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:154:2176], cookie=5811950046312607392) 2026-01-07T17:55:19.876352Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:19.876460Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::E ... xSemaphoreCreate::Complete (sender=[4:249:2270], cookie=14333886650652525571) 2026-01-07T17:55:20.797028Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-07T17:55:20.797229Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2026-01-07T17:55:20.811147Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-01-07T17:55:20.811719Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-07T17:55:20.824045Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-01-07T17:55:20.824504Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=333, name="Sem1") 2026-01-07T17:55:20.824620Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2026-01-07T17:55:20.847281Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=333) 2026-01-07T17:55:20.847938Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=444, session=2, semaphore="Sem1" count=1) 2026-01-07T17:55:20.860418Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=444) 2026-01-07T17:55:20.861178Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=555, name="Sem1") 2026-01-07T17:55:20.861300Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2026-01-07T17:55:20.861379Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2026-01-07T17:55:20.875235Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=555) 2026-01-07T17:55:21.296718Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:21.296842Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:21.317958Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:21.318336Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:21.344464Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:21.345116Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=10055442909113557658, session=0, seqNo=0) 2026-01-07T17:55:21.345297Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:21.357685Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=10055442909113557658, session=1) 2026-01-07T17:55:21.358048Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2161], cookie=112, name="Sem1", limit=5) 2026-01-07T17:55:21.358229Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-07T17:55:21.377806Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2161], cookie=112) 2026-01-07T17:55:21.378226Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2161], cookie=113, name="Sem1") 2026-01-07T17:55:21.393782Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2161], cookie=113) 2026-01-07T17:55:21.394181Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2161], cookie=114, name="Sem1", force=0) 2026-01-07T17:55:21.394315Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-01-07T17:55:21.407072Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2161], cookie=114) 2026-01-07T17:55:21.407427Z node 5 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[5:137:2161], cookie=14369320164416095226 2026-01-07T17:55:21.407747Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2161], cookie=115, name="Sem1", limit=5) 2026-01-07T17:55:21.421850Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2161], cookie=115) 2026-01-07T17:55:21.422260Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2161], cookie=116, name="Sem1") 2026-01-07T17:55:21.434806Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2161], cookie=116) 2026-01-07T17:55:21.435209Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2161], cookie=117, name="Sem1", force=0) 2026-01-07T17:55:21.447505Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2161], cookie=117) 2026-01-07T17:55:21.447911Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=118, session=1, semaphore="Sem1" count=1) 2026-01-07T17:55:21.460264Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=118) 2026-01-07T17:55:21.460643Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2161], cookie=119, name="Sem1") 2026-01-07T17:55:21.473057Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2161], cookie=119) 2026-01-07T17:55:21.473436Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2161], cookie=120, name="Sem1") 2026-01-07T17:55:21.473534Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2161], cookie=120) 2026-01-07T17:55:21.473812Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:137:2161], cookie=1825087701320719747, session=1) 2026-01-07T17:55:21.473915Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-07T17:55:21.486127Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:137:2161], cookie=1825087701320719747) 2026-01-07T17:55:21.486488Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2161], cookie=121, name="Sem1", limit=5) 2026-01-07T17:55:21.505711Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2161], cookie=121) 2026-01-07T17:55:21.506117Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2161], cookie=122, name="Sem1") 2026-01-07T17:55:21.519067Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2161], cookie=122) 2026-01-07T17:55:21.519433Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2161], cookie=123, name="Sem1", force=0) 2026-01-07T17:55:21.531996Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2161], cookie=123) 2026-01-07T17:55:21.532359Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=124, session=1, semaphore="Sem1" count=1) 2026-01-07T17:55:21.544767Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=124) 2026-01-07T17:55:21.545132Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2161], cookie=125, name="Sem1") 2026-01-07T17:55:21.570948Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2161], cookie=125) 2026-01-07T17:55:21.571282Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2161], cookie=126, name="Sem1") 2026-01-07T17:55:21.571377Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2161], cookie=126) 2026-01-07T17:55:21.572037Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2161], cookie=127, name="Sem1", limit=5) 2026-01-07T17:55:21.572122Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2161], cookie=127) 2026-01-07T17:55:21.572359Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2161], cookie=128, name="Sem1") 2026-01-07T17:55:21.572425Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2161], cookie=128) 2026-01-07T17:55:21.572654Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2161], cookie=129, name="Sem1", force=0) 2026-01-07T17:55:21.572717Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2161], cookie=129) 2026-01-07T17:55:21.572998Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=130, session=1, semaphore="Sem1" count=1) 2026-01-07T17:55:21.573070Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=130) 2026-01-07T17:55:21.573328Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2161], cookie=131, name="Sem1") 2026-01-07T17:55:21.573396Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2161], cookie=131) 2026-01-07T17:55:21.573628Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2161], cookie=132, name="Sem1") 2026-01-07T17:55:21.573691Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2161], cookie=132) |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> TPartitionTests::ConflictingSrcIdForTxWithHead >> TKesusTest::TestKesusConfig >> KqpExplain::FullOuterJoin [GOOD] >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TPartitionTests::TEvTxCalcPredicate_Without_Conflicts [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: 2026-01-07T17:50:36.886561Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:36.915144Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:36.915428Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:36.916354Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:36.916742Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:36.917936Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:36.918005Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:36.919058Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:53:2077] ControllerId# 72057594037932033 2026-01-07T17:50:36.919099Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:36.919216Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:36.919372Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:36.933472Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:36.933550Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:36.936010Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:61:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.936204Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:62:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.936391Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:63:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.936553Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:64:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.936698Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:65:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.936865Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:66:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.937033Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:52:2076] Create Queue# [1:67:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.937062Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:36.937145Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:53:2077] 2026-01-07T17:50:36.937180Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:53:2077] 2026-01-07T17:50:36.937251Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:36.937300Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:36.938319Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:36.938401Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:36.941586Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:36.941734Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:36.942109Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:36.942328Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:36.943224Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:78:2076] ControllerId# 72057594037932033 2026-01-07T17:50:36.943255Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:36.943313Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:36.943431Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:36.954634Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:36.954715Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:36.956504Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:84:2080] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.956637Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:85:2081] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.956776Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:86:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.956901Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:87:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.957037Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:88:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.957152Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:89:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.957270Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:77:2075] Create Queue# [2:90:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.957296Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:36.957359Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:78:2076] 2026-01-07T17:50:36.957388Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:78:2076] 2026-01-07T17:50:36.957458Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:36.957501Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:36.957927Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:36.958149Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:36.969143Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:53:2077] 2026-01-07T17:50:36.969307Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:36.997025Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:36.997101Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:43:2064] 2026-01-07T17:50:36.997131Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:43:2064] 2026-01-07T17:50:37.005225Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.009889Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:78:2076] 2026-01-07T17:50:37.009968Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.010286Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.010335Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.011654Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.014066Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:97:2093] 2026-01-07T17:50:37.014113Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:97:2093] 2026-01-07T17:50:37.014403Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.014497Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:37.014952Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.015270Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.015384Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.015473Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:97:2093] 2026-01-07T17:50:37.015555Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageC ... The tablet Unknown.65553.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876207Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65552.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876223Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65551.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876238Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65550.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876254Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65549.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876270Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65548.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876286Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65547.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876302Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65546.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876319Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65545.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876335Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65544.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876351Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65543.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876366Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65542.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876381Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65541.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876396Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65540.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876410Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65539.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876426Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65538.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876441Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65537.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876456Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65536.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-07T17:52:55.876570Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} queued, type NKikimr::NHive::TTxProcessPendingOperations 2026-01-07T17:52:55.876619Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:55.876683Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} hope 1 -> done Change{10, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:52:55.876732Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:55.876824Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:514:2351] 2026-01-07T17:52:55.876847Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:514:2351] 2026-01-07T17:52:55.876960Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594037927937 Active! Generation: 3, Type: Hive started in 21msec Marker# TSYS24 2026-01-07T17:52:55.876987Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [72057594037927937] Activate 2026-01-07T17:52:55.877098Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:514:2351] 2026-01-07T17:52:55.877160Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [1:146:2124] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:52:55.877193Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [1:146:2124] 2026-01-07T17:52:55.877293Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [1:97:2093] 2026-01-07T17:52:55.877324Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [1:97:2093] 2026-01-07T17:52:55.877352Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [1:97:2093] 2026-01-07T17:52:55.877392Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [1:96:2093] EventType# 268959744 2026-01-07T17:52:55.877423Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [1:514:2351] 2026-01-07T17:52:55.877453Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037932033]::SendEvent [1:514:2351] 2026-01-07T17:52:55.877544Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [1:514:2351] 2026-01-07T17:52:55.877634Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [1:514:2351] 2026-01-07T17:52:55.877664Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [1:514:2351] 2026-01-07T17:52:55.877681Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [1:514:2351] 2026-01-07T17:52:55.877706Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [1:431:2280] EventType# 268637702 2026-01-07T17:52:55.877851Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [2:103:2089] 2026-01-07T17:52:55.877876Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [2:103:2089] 2026-01-07T17:52:55.877890Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [2:103:2089] 2026-01-07T17:52:55.877949Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [2:103:2089] 2026-01-07T17:52:55.878045Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2026-01-07T17:52:55.878088Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:55.878229Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{22, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:52:55.878270Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:55.878419Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-07T17:52:55.878452Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:55.878575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:55.878651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:55.878708Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{10, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-07T17:52:55.878775Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:55.878887Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [2:102:2089] EventType# 268959744 2026-01-07T17:52:55.879029Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-07T17:52:55.879063Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:55.879112Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:52:55.879151Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:55.879225Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-07T17:52:55.879249Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:55.879300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:52:55.879339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:52:55.879373Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{11, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-07T17:52:55.879396Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:52:55.879485Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-07T17:52:55.879505Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:52:55.879531Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{12, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:52:55.879551Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} Took 49.32055 seconds >> TKesusTest::TestQuoterResourceDescribe >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TKesusTest::TestAcquireUpgrade |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> Channels20::LocalChannelAsyncRead [GOOD] >> Channels20::IcChannelTrivial >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> KqpQuery::QueryExplain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 28651, MsgBus: 12503 2026-01-07T17:54:47.241656Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679641482446876:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:47.242274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:47.287754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:47.539616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:47.571343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:47.571490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:47.583261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:47.721728Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:47.726434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:47.726461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:47.726473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:47.726570Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:47.815089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:48.142554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:48.149309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:48.202828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.261991Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:48.347639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.504674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:48.572479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:50.386461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679654367350522:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.386582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.387005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679654367350532:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.387041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.736985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:50.817474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:50.851227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:50.884073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:50.926468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.005515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.048000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.118120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:51.188395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679658662318706:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.188460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.188591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679658662318711:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.188623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679658662318713:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.188756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:51.192540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:51.204545Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679658662318715:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:51.290180Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679658662318768:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:52.240970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679641482446876:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:52.241049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:15.649674Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:15.731226Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:15.731330Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:15.736475Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:15.797439Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:15.797465Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:15.797472Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:15.797553Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:15.834021Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:16.309728Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:16.329196Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:16.395327Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:16.558769Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:16.563490Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:16.637034Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.545947Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679776523942241:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:19.546056Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:19.546792Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679776523942250:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:19.546864Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:19.636975Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.681056Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.716321Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.755076Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.795246Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.833391Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.875251Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.925914Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.014568Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679780818910423:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.014666Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679780818910428:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.014693Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.014942Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679780818910430:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.014985Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.020822Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:20.032723Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679780818910431:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:20.106360Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679780818910483:3779] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:20.544864Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679759344071227:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:20.544962Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:21.947570Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:22.267962Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:22.319027Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 2326, MsgBus: 12587 2026-01-07T17:54:43.350960Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679622350557246:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:43.351037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:43.404788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:43.652458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:43.652753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:43.656488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:43.745965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:43.766159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:43.869623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:43.869652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:43.869658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:43.869744Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:43.964045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:44.308580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:44.321782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:44.331742Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:44.373505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:44.557863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:44.710589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:44.789289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.505163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679635235460977:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.505278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.505935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679635235460987:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.506003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:46.822806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.858055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.896780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.931917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.959755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.996666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.035726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.113679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.195047Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679639530429151:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.195140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.195439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679639530429157:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.195490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.195543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679639530429156:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:47.199407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:47.209555Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679639530429160:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:47.302046Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679639530429211:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:48.351406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679622350557246:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:48.351466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7269, MsgBus: 20905 2026-01-07T17:54:50.079547Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists. ... lterResource ok# false data# peer# 2026-01-07T17:55:23.217050Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d82b80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2026-01-07T17:55:23.217242Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d83980] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2026-01-07T17:55:23.217466Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d8be80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2026-01-07T17:55:23.217667Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d84080] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2026-01-07T17:55:23.217872Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d71380] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2026-01-07T17:55:23.218074Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d39380] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2026-01-07T17:55:23.218260Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d70c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2026-01-07T17:55:23.218422Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cff080] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2026-01-07T17:55:23.218590Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5dabd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2026-01-07T17:55:23.218724Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d69580] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2026-01-07T17:55:23.218874Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d6a380] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2026-01-07T17:55:23.219000Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d8ef80] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2026-01-07T17:55:23.219141Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d4f180] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2026-01-07T17:55:23.219276Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d3a180] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2026-01-07T17:55:23.219425Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d35480] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2026-01-07T17:55:23.219549Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d3cb80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2026-01-07T17:55:23.219693Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d6fe80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2026-01-07T17:55:23.219813Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d50680] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2026-01-07T17:55:23.219933Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d6f080] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2026-01-07T17:55:23.220065Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d70580] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2026-01-07T17:55:23.220193Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d6cd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2026-01-07T17:55:23.220317Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d6e280] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2026-01-07T17:55:23.220453Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d6c680] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2026-01-07T17:55:23.220580Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5ce2980] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2026-01-07T17:55:23.220730Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cdb980] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2026-01-07T17:55:23.220875Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd8180] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2026-01-07T17:55:23.221043Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd4980] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2026-01-07T17:55:23.221235Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cbd680] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2026-01-07T17:55:23.221232Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd1180] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2026-01-07T17:55:23.221410Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cb3c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2026-01-07T17:55:23.221428Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cb4380] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2026-01-07T17:55:23.221590Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5ce1480] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2026-01-07T17:55:23.221616Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d68e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2026-01-07T17:55:23.221773Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d6aa80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2026-01-07T17:55:23.221806Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5d6b880] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2026-01-07T17:55:23.221952Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5c9fa80] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2026-01-07T17:55:23.221987Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5ca1680] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2026-01-07T17:55:23.222145Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5bff180] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2026-01-07T17:55:23.222193Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cb9e80] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2026-01-07T17:55:23.222343Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cbb380] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2026-01-07T17:55:23.222383Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cbeb80] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2026-01-07T17:55:23.222564Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd0380] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2026-01-07T17:55:23.222568Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd1f80] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2026-01-07T17:55:23.222769Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5ccee80] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2026-01-07T17:55:23.222805Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5db9d80] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2026-01-07T17:55:23.222891Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cb2780] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2026-01-07T17:55:23.222983Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cbac80] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2026-01-07T17:55:23.223091Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd2d80] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2026-01-07T17:55:23.223190Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd5780] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2026-01-07T17:55:23.223271Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd3480] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2026-01-07T17:55:23.223360Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd5e80] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2026-01-07T17:55:23.223472Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd8f80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2026-01-07T17:55:23.223541Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd7380] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2026-01-07T17:55:23.223655Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cd9d80] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2026-01-07T17:55:23.223713Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cdc780] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2026-01-07T17:55:23.223833Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cdab80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2026-01-07T17:55:23.223872Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cddc80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2026-01-07T17:55:23.224030Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cdce80] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2026-01-07T17:55:23.224030Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5cdff80] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2026-01-07T17:55:23.224209Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5ceb580] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2026-01-07T17:55:23.224225Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5ce0d80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2026-01-07T17:55:23.224400Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cd5e5ce3780] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TPartitionTests::UserActCount [GOOD] >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] >> TPartitionTests::TooManyImmediateTxs >> KqpQuery::CreateTableAs_MkDir [GOOD] >> KqpPg::CreateTableSerialColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] Test command err: 2026-01-07T17:55:14.598080Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.670486Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:14.670569Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:14.670648Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:14.670715Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:14.692913Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:183:2196] 2026-01-07T17:55:14.695147Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:55:14.000000Z 2026-01-07T17:55:14.695406Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:183:2196] 2026-01-07T17:55:14.716846Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.760206Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.781326Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.791935Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.836054Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.881224Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.913893Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\357\344\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:55:15.066617Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\357\344\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:55:15.090745Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\357\344\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:55:15.421330Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.465319Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:15.465390Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:15.465439Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:15.465494Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:15.487014Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:182:2194] 2026-01-07T17:55:15.488995Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:55:15.000000Z 2026-01-07T17:55:15.489274Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:182:2194] 2026-01-07T17:55:15.501113Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.533982Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.555125Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.586440Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.617927Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.649109Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.721374Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.752553Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\367\344\314\27138\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\367\344\314\27138\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\367\344\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:55:15.909196Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\367\344\314\27138\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\367\344\314\27138\001" StorageChannel: INLINE } 2026-01-07T17:55:16.263166Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:16.312498Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:16.312566Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:16.312615Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:16.312669Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:16.685738Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candida ... RSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:25.792644Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:25.792682Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 4, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:25.792711Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:25.792743Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 4, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:25.792769Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:25.814583Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:55:25.814762Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-07T17:55:25.814830Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:25.814876Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:25.814929Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 0 affect SourceId src1 2026-01-07T17:55:25.815001Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:25.815043Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:25.815081Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:25.815123Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:25.815157Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Wait 1st KV request Wait kv request 2026-01-07T17:55:25.815302Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2026-01-07T17:55:25.815349Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:25.815396Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:25.815436Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:25.815472Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:25.815520Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:55:25.815562Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 0 2026-01-07T17:55:25.815616Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2026-01-07T17:55:25.815671Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-07T17:55:25.815716Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:25.815750Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:55:25.815791Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Wait tx committed for tx 0 2026-01-07T17:55:25.816139Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:25.827213Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:55:25.827300Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:55:25.827414Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-07T17:55:25.827464Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:25.827502Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:25.827552Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:25.827580Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:25.827614Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 3, PendingWrites: 0 2026-01-07T17:55:25.827651Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:25.827695Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:55:25.827772Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1227: [72057594037927937][Partition][0][StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 8. InitialSeqNo: (NULL). Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 20 2026-01-07T17:55:25.827827Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:55:25.827857Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1227: [72057594037927937][Partition][0][StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 10. InitialSeqNo: (NULL). Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 30 2026-01-07T17:55:25.827883Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:55:25.827974Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2026-01-07T17:55:25.829139Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 40 PartNo 0 PackedSize 84 count 1 nextOffset 41 batches 1 2026-01-07T17:55:25.829225Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 3 2026-01-07T17:55:25.829265Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (3) 2026-01-07T17:55:25.829316Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:25.829889Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][0][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 40,1 HeadOffset 1 endOffset 1 curOffset 41 d0000000000_00000000000000000040_00000_0000000001_00000? size 70 WTime 10239 2026-01-07T17:55:25.830125Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 3 Got KV request Got KV request Wait 2nd KV request Wait kv request 2026-01-07T17:55:25.830368Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:25.851479Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:55:25.851579Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:55:25.851671Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:55:25.851723Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-07T17:55:25.851792Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 1 is already written 2026-01-07T17:55:25.851828Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-07T17:55:25.851863Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 1 is already written 2026-01-07T17:55:25.851899Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-07T17:55:25.851933Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 40 is stored on disk 2026-01-07T17:55:25.852124Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:25.852162Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:25.852198Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:25.852242Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:25.852277Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:25.852330Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> BSCMovePDisk::PDiskMove_Block42 [GOOD] >> KqpLimits::TooBigColumn+useSink [GOOD] >> KqpLimits::ReadsetCountLimit >> KqpLimits::DataShardReplySizeExceeded [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryExplain [GOOD] Test command err: Trying to start YDB, gRPC: 25880, MsgBus: 3796 2026-01-07T17:54:46.338124Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679633469461026:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:46.338200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:46.565849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:46.574400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:46.574499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:46.628493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:46.655945Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:46.657115Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679633469461001:2081] 1767808486337357 != 1767808486337360 2026-01-07T17:54:46.716581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:46.716600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:46.716606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:46.716675Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:46.762536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:47.146200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:47.217821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.346620Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:47.360919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.529172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:47.601559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.223063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679646354364761:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.223151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.223360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679646354364771:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.223415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.528313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.559368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.593654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.625767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.659900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.747342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.812676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.862848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:49.936874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679646354365652:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.936951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.937170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679646354365657:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.937210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679646354365658:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.937247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:49.940394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:49.954114Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679646354365661:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:50.021768Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679650649333008:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:51.338389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679633469461026:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:51.338442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:51.449071Z node 1 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cdbcbfa1d80] finished request Name# Ydb.Table.V1.TableService/DeleteSession ok# true peer# ipv6:%5B::1%5D:41634 2026-01-07T17:54:51.449278Z node 1 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cdbcc0cfb80] finished request Name# Ydb.Table.V1.TableService/CreateSession ok# true peer# ipv4:12 ... : ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.084463Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.220315Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:19.228840Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:19.297676Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:21.783857Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679786533088765:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.783945Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.784228Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679786533088774:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.784297Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.905512Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:21.941369Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:21.974975Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:22.006015Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:22.042316Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:22.080328Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:22.124401Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:22.178809Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:22.268126Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679790828056944:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:22.268226Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:22.268546Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679790828056949:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:22.268594Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679790828056950:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:22.268733Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:22.272592Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:22.285338Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679790828056953:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:22.348476Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679790828057004:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:23.204227Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679773648185047:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:23.204316Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:45" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 '('('"ItemsLimit" $3) '('"Sequential" '1) '('"PointPrefixLen" '1))) (let $5 (Uint32 '1)) (let $6 (KqpRowsSourceSettings $1 $2 $4 '((KqlKeyExc $5 (String '"Name")) (KqlKeyInc $5)))) (let $7 (OptionalType (DataType 'String))) (let $8 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $7) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $7))) (let $9 '('('"_logical_id" '715) '('"_id" '"e454bff7-1a0e45d-78163755-80ee8549") '('"_partition_mode" '"single") '('"_wide_channels" $8))) (let $10 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $6)) (lambda '($14) (block '( (let $15 (lambda '($16) (Member $16 '"Amount") (Member $16 '"Comment") (Member $16 '"Group") (Member $16 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $14) $3) $15))) ))) $9)) (let $11 (DqCnUnionAll (TDqOutput $10 '"0"))) (let $12 (DqPhyStage '($11) (lambda '($17) (FromFlow (NarrowMap (Take (ToFlow $17) $3) (lambda '($18 $19 $20 $21) (AsStruct '('"Amount" $18) '('"Comment" $19) '('"Group" $20) '('"Name" $21)))))) '('('"_logical_id" '728) '('"_id" '"de96b40-261e00aa-661d6865-ad69e364")))) (let $13 (DqCnResult (TDqOutput $12 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($10 $12) '($13) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $8) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpPg::TypeCoercionBulkUpsert >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] Test command err: RandomSeed# 11814806962095092716 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> KqpPg::InsertNoTargetColumns_Simple+useSink >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Block42 [GOOD] Test command err: RandomSeed# 471173929199117614 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2026-01-07T17:55:23.598484Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:23.598611Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:23.616594Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:23.616966Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:23.655526Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:23.655891Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:136:2161], cookie=10906277237461894093, path="/foo/bar/baz") 2026-01-07T17:55:23.670646Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:136:2161], cookie=10906277237461894093, status=SUCCESS) 2026-01-07T17:55:23.671256Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:146:2168], cookie=17035860746230644109) 2026-01-07T17:55:23.683843Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:146:2168], cookie=17035860746230644109) 2026-01-07T17:55:23.684491Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:151:2173], cookie=13328605532236393284, path="/foo/bar/baz") 2026-01-07T17:55:23.697924Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:151:2173], cookie=13328605532236393284, status=SUCCESS) 2026-01-07T17:55:23.698545Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:156:2178], cookie=14112423657806394591) 2026-01-07T17:55:23.710828Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:156:2178], cookie=14112423657806394591) 2026-01-07T17:55:23.724169Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:23.724293Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:23.724744Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:23.725487Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:23.752328Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:23.752724Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:197:2210], cookie=12673808982082441933) 2026-01-07T17:55:23.787148Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:197:2210], cookie=12673808982082441933) 2026-01-07T17:55:23.787726Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:205:2217], cookie=3826632263308458009, path="/foo/bar/baz") 2026-01-07T17:55:23.800441Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:205:2217], cookie=3826632263308458009, status=SUCCESS) 2026-01-07T17:55:23.800994Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:210:2222], cookie=5802184060332247937, path="/foo/bar/baz") 2026-01-07T17:55:23.801075Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:210:2222], cookie=5802184060332247937, status=PRECONDITION_FAILED) 2026-01-07T17:55:24.210924Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:24.211046Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:24.230889Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:24.231209Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:24.245325Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:24.245758Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:137:2161], cookie=9047231087677617717, name="Lock1") 2026-01-07T17:55:24.245853Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:137:2161], cookie=9047231087677617717) 2026-01-07T17:55:24.613800Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:24.613902Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:24.631659Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:24.631773Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:24.665887Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:24.666388Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=16698023632766934387, session=0, seqNo=0) 2026-01-07T17:55:24.666533Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:24.678459Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=16698023632766934387, session=1) 2026-01-07T17:55:24.678829Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:55:24.678982Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:55:24.679071Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:24.691493Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=111) 2026-01-07T17:55:24.692026Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:148:2170], cookie=18279592069535253140, name="Lock1", force=0) 2026-01-07T17:55:24.705573Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:148:2170], cookie=18279592069535253140) 2026-01-07T17:55:24.706109Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:153:2175], cookie=18210154971501537580, name="Sem1", force=0) 2026-01-07T17:55:24.722826Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:153:2175], cookie=18210154971501537580) 2026-01-07T17:55:24.723312Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:158:2180], cookie=3083433590110366351, name="Sem1", limit=42) 2026-01-07T17:55:24.723455Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-01-07T17:55:24.735706Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:158:2180], cookie=3083433590110366351) 2026-01-07T17:55:24.736220Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:163:2185], cookie=15062456800065212061, name="Sem1", force=0) 2026-01-07T17:55:24.736316Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2026-01-07T17:55:24.757568Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:163:2185], cookie=15062456800065212061) 2026-01-07T17:55:24.758120Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:168:2190], cookie=7848820531617629478, name="Sem1", force=0) 2026-01-07T17:55:24.770392Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:168:2190], cookie=7848820531617629478) 2026-01-07T17:55:25.248396Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:25.248510Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:25.263122Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:25.263371Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:25.286668Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:25.287052Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=2086779099049548672, session=0, seqNo=0) 2026-01-07T17:55:25.287167Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:25.299355Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=2086779099049548672, session=1) 2026-01-07T17:55:25.299576Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=15010583373132395309, session=0, seqNo=0) 2026-01-07T17:55:25.299664Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:25.311306Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=15010583373132395309, session=2) 2026-01-07T17:55:25.311569Z node 4 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=2 from sender=[4:137:2161], cookie=3896675242854977072 2026-01-07T17:55:25.312006Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:149:2171], cookie=11145377385033436334, name="Sem1", limit=3) 2026-01-07T17:55:25.312148Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-07T17:55:25.324006Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:149:2171], cookie=11145377385033436334) 2026-01-07T17:55:25.324340Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=112, name="Sem1") 2026-01-07T17:55:25.324439Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=112) 2026-01-07T17:55:25.324713Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=113, name="Sem1") 2026-01-07T17:55:25.324808Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=113) 2026-01-07T17:55:25.325079Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=13684266525470602586, session=2, seqNo=0) 2026-01-07T17:55:25.336840Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [720575940379279 ... de 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:26.753866Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:26.764504Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=129, session=1, semaphore="Sem2" count=2) 2026-01-07T17:55:26.776847Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=129) 2026-01-07T17:55:26.777298Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=130, name="Sem2") 2026-01-07T17:55:26.777391Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=130) 2026-01-07T17:55:26.777662Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=131, session=1, semaphore="Sem2" count=1) 2026-01-07T17:55:26.790996Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=131) 2026-01-07T17:55:26.791531Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=132, name="Sem2") 2026-01-07T17:55:26.791635Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=132) 2026-01-07T17:55:26.791979Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=133, name="Sem2") 2026-01-07T17:55:26.792062Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=133) 2026-01-07T17:55:27.080919Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:27.081045Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:27.099416Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:27.099533Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:27.134235Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:27.141444Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=14289034329891629791, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2026-01-07T17:55:27.141674Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2026-01-07T17:55:27.153675Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=14289034329891629791) 2026-01-07T17:55:27.154153Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=6202882044207752921, path="/Root1/Res", config={ }) 2026-01-07T17:55:27.154356Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2026-01-07T17:55:27.166867Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=6202882044207752921) 2026-01-07T17:55:27.167476Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2173], cookie=727348050417003485, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2026-01-07T17:55:27.167656Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2026-01-07T17:55:27.184343Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2173], cookie=727348050417003485) 2026-01-07T17:55:27.184915Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2178], cookie=3841168511607870359, path="/Root2/Res", config={ }) 2026-01-07T17:55:27.185149Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2026-01-07T17:55:27.202663Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2178], cookie=3841168511607870359) 2026-01-07T17:55:27.203265Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:161:2183], cookie=16680910128253251868, path="/Root2/Res/Subres", config={ }) 2026-01-07T17:55:27.203499Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2026-01-07T17:55:27.215970Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:161:2183], cookie=16680910128253251868) 2026-01-07T17:55:27.217286Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:166:2188]. Cookie: 12140264199188010014. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:27.217352Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:166:2188], cookie=12140264199188010014) 2026-01-07T17:55:27.263835Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-07T17:55:27.323067Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-07T17:55:27.357855Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-07T17:55:27.358509Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:174:2192]. Cookie: 6581298962775853719. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2026-01-07T17:55:27.359346Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:177:2195]. Cookie: 16211982206778427456. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:27.359407Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:177:2195], cookie=16211982206778427456) 2026-01-07T17:55:27.401097Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-07T17:55:27.443025Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-07T17:55:27.443740Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2199]. Cookie: 1320183206599209629. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2026-01-07T17:55:27.444559Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:166:2188]. Cookie: 12586815399098497628. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:27.444615Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:166:2188], cookie=12586815399098497628) 2026-01-07T17:55:27.445270Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:177:2195]. Cookie: 6654785239910306610. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:27.445351Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:177:2195], cookie=6654785239910306610) 2026-01-07T17:55:27.479632Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2026-01-07T17:55:27.479891Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2026-01-07T17:55:27.480693Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:190:2206]. Cookie: 3979320059537538732. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2026-01-07T17:55:24.591491Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:24.591679Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:24.610932Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:24.611406Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:24.646273Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:24.652653Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=11561603666570631041, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-07T17:55:24.652955Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:55:24.665153Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=11561603666570631041) 2026-01-07T17:55:24.665788Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2168], cookie=3465694519740804749, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-07T17:55:24.666032Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2026-01-07T17:55:24.678336Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2168], cookie=3465694519740804749) 2026-01-07T17:55:24.679034Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:151:2173], cookie=9965497667390884817, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-07T17:55:24.679275Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2026-01-07T17:55:24.692601Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:151:2173], cookie=9965497667390884817) 2026-01-07T17:55:24.693451Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:156:2178], cookie=2607075903270964700, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-07T17:55:24.693684Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2026-01-07T17:55:24.706234Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:156:2178], cookie=2607075903270964700) 2026-01-07T17:55:24.706897Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:161:2183], cookie=17650357181269968230, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2026-01-07T17:55:24.707120Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2026-01-07T17:55:24.719575Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:161:2183], cookie=17650357181269968230) 2026-01-07T17:55:24.720321Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2188], cookie=14963311949869607274, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2026-01-07T17:55:24.720556Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2026-01-07T17:55:24.734922Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2188], cookie=14963311949869607274) 2026-01-07T17:55:24.735548Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2193], cookie=7477332689203915314, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-07T17:55:24.735710Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 7 "Root2" 2026-01-07T17:55:24.752811Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2193], cookie=7477332689203915314) 2026-01-07T17:55:24.753636Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:176:2198], cookie=3659338354247529801, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2026-01-07T17:55:24.753870Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2026-01-07T17:55:24.766193Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:176:2198], cookie=3659338354247529801) 2026-01-07T17:55:24.767016Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:181:2203], cookie=15311720753447881736, ids=[100], paths=[], recursive=0) 2026-01-07T17:55:24.767114Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:181:2203], cookie=15311720753447881736) 2026-01-07T17:55:24.767658Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:184:2206], cookie=420569922451090988, ids=[], paths=[Nonexistent/Path], recursive=0) 2026-01-07T17:55:24.767756Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:184:2206], cookie=420569922451090988) 2026-01-07T17:55:24.768276Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:187:2209], cookie=18113653355581572366, ids=[], paths=[/Root, ], recursive=0) 2026-01-07T17:55:24.768358Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:187:2209], cookie=18113653355581572366) 2026-01-07T17:55:24.768838Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:190:2212], cookie=15589098345347397824, ids=[1, 1], paths=[], recursive=0) 2026-01-07T17:55:24.768897Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:190:2212], cookie=15589098345347397824) 2026-01-07T17:55:24.770512Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:193:2215], cookie=13213739876916677907, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2026-01-07T17:55:24.770622Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:193:2215], cookie=13213739876916677907) 2026-01-07T17:55:24.771336Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:196:2218], cookie=10132337049988287691, ids=[], paths=[], recursive=1) 2026-01-07T17:55:24.771430Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:196:2218], cookie=10132337049988287691) 2026-01-07T17:55:24.772822Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:199:2221], cookie=13387829983901024730, ids=[], paths=[], recursive=0) 2026-01-07T17:55:24.772920Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:199:2221], cookie=13387829983901024730) 2026-01-07T17:55:24.773445Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:202:2224], cookie=17403877466138145549, ids=[3, 2], paths=[], recursive=1) 2026-01-07T17:55:24.773517Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:202:2224], cookie=17403877466138145549) 2026-01-07T17:55:24.774037Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:205:2227], cookie=9197589862220624168, ids=[3, 2], paths=[], recursive=0) 2026-01-07T17:55:24.774092Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:205:2227], cookie=9197589862220624168) 2026-01-07T17:55:24.774627Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:208:2230], cookie=6324260897745591272, ids=[], paths=[Root2/], recursive=1) 2026-01-07T17:55:24.774708Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:208:2230], cookie=6324260897745591272) 2026-01-07T17:55:24.775291Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:211:2233], cookie=10400656694235805605, ids=[], paths=[Root2/], recursive=0) 2026-01-07T17:55:24.775348Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:211:2233], cookie=10400656694235805605) 2026-01-07T17:55:24.794835Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:24.794992Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:24.795477Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:24.795806Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:24.839505Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:24.839959Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:250:2263], cookie=16046078211999410076, ids=[100], paths=[], recursive=0) 2026-01-07T17:55:24.840052Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:250:2263], cookie=16046078211999410076) 2026-01-07T17:55:24.840946Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:256:2268], cookie=8569123394939756258, ids=[], paths=[Nonexistent/Path], recursive=0) 2026-01-07T17:55:24.841065Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:256:2268], cookie=8569123394939756258) 2026-01-07T17:55:24.841757Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:259:2271], cookie=4474525109696455148, ids=[], paths=[/Root, ], recursive=0) 2026-01-07T17:55:24.841862Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:259:2271], cookie=4474525109696455148) 2026-01-07T17:55:24.842522Z ... DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2026-01-07T17:55:26.979829Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:156:2178], cookie=7608556746663273261) 2026-01-07T17:55:26.980393Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:161:2183], cookie=14374213243885221495, ids=[], paths=[], recursive=1) 2026-01-07T17:55:26.980491Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:161:2183], cookie=14374213243885221495) 2026-01-07T17:55:26.981372Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:167:2189], cookie=12792388389971534408, ids=[], paths=[], recursive=1) 2026-01-07T17:55:26.981449Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:167:2189], cookie=12792388389971534408) 2026-01-07T17:55:26.982237Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:173:2195], cookie=10420626441461387610, ids=[], paths=[], recursive=1) 2026-01-07T17:55:26.982309Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:173:2195], cookie=10420626441461387610) 2026-01-07T17:55:26.982750Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:176:2198], cookie=16233020622015244996, id=0, path="/Root/Folder/NonexistingRes") 2026-01-07T17:55:26.982849Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:176:2198], cookie=16233020622015244996) 2026-01-07T17:55:26.983293Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:179:2201], cookie=2502936369059241935, ids=[], paths=[], recursive=1) 2026-01-07T17:55:26.983357Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:179:2201], cookie=2502936369059241935) 2026-01-07T17:55:26.983780Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:182:2204], cookie=7311806010643890366, id=100, path="") 2026-01-07T17:55:26.983842Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:182:2204], cookie=7311806010643890366) 2026-01-07T17:55:26.984268Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:185:2207], cookie=3483590795074789581, ids=[], paths=[], recursive=1) 2026-01-07T17:55:26.984327Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:185:2207], cookie=3483590795074789581) 2026-01-07T17:55:26.984768Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:188:2210], cookie=1597879505564219195, id=3, path="") 2026-01-07T17:55:26.984824Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:188:2210], cookie=1597879505564219195) 2026-01-07T17:55:26.985266Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:191:2213], cookie=11928848287566281587, ids=[], paths=[], recursive=1) 2026-01-07T17:55:26.985324Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:191:2213], cookie=11928848287566281587) 2026-01-07T17:55:26.985820Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:194:2216], cookie=11855753898871495026, id=0, path="/Root/Folder/Q1") 2026-01-07T17:55:26.985965Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2026-01-07T17:55:26.998253Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:194:2216], cookie=11855753898871495026) 2026-01-07T17:55:26.998810Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:199:2221], cookie=15212564690513100874, ids=[], paths=[], recursive=1) 2026-01-07T17:55:26.998885Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:199:2221], cookie=15212564690513100874) 2026-01-07T17:55:27.018440Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:27.018551Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:27.019017Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:27.019929Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:27.067816Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:27.068176Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:238:2251], cookie=8811853448751093217, ids=[], paths=[], recursive=1) 2026-01-07T17:55:27.068267Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:238:2251], cookie=8811853448751093217) 2026-01-07T17:55:27.068896Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:244:2256], cookie=9227914116073322089, id=3, path="") 2026-01-07T17:55:27.069066Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2026-01-07T17:55:27.081239Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:244:2256], cookie=9227914116073322089) 2026-01-07T17:55:27.081913Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:249:2261], cookie=18328460527375248692, ids=[], paths=[], recursive=1) 2026-01-07T17:55:27.082002Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:249:2261], cookie=18328460527375248692) 2026-01-07T17:55:27.094000Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:27.094107Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:27.094551Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:27.095114Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:27.134877Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:27.135226Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:288:2291], cookie=12908810475075764899, ids=[], paths=[], recursive=1) 2026-01-07T17:55:27.135313Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:288:2291], cookie=12908810475075764899) 2026-01-07T17:55:27.553275Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:27.553418Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:27.575468Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:27.575610Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:27.610363Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:27.610825Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=11519235743556540193, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-07T17:55:27.611058Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Q1" 2026-01-07T17:55:27.623332Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=11519235743556540193) 2026-01-07T17:55:27.624006Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=1268443671879077070, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2026-01-07T17:55:27.624198Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Q2" 2026-01-07T17:55:27.636439Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=1268443671879077070) 2026-01-07T17:55:27.638242Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 1391587915610113754. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:27.638318Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=1391587915610113754) 2026-01-07T17:55:27.639197Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 11544007729678258508. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2026-01-07T17:55:27.639256Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=11544007729678258508) |96.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts [GOOD] |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |96.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TObjectStorageListingTest::SchemaChecks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateTableAs_MkDir [GOOD] Test command err: Trying to start YDB, gRPC: 11006, MsgBus: 17695 2026-01-07T17:54:42.754728Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679617658033295:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:42.757401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:43.004202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:43.004320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:43.027746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:43.075824Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:43.093476Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:43.284906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:43.284937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:43.284944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:43.285011Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:43.358602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:43.772369Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:43.784785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:43.792427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:45.864206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679630542935997:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.864659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679630542935984:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.864746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.865709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679630542936015:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.865793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:45.868239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:45.879384Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679630542936013:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:54:45.996970Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679630542936066:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:46.255791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.669096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:46.877346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:46.884898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:54:46.894951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 15361, MsgBus: 16146 2026-01-07T17:54:47.919133Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679637744091948:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:47.919166Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:47.935235Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:47.999395Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:48.005049Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679637744091916:2081] 1767808487916479 != 1767808487916482 2026-01-07T17:54:48.036200Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:48.036275Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:48.040053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:48.107442Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:48.107469Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:48.107479Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:48.107554Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:48.150532Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:48.514878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:48.926498Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:50.863410Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679650628994680:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.863493Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.863795Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679650628994693:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.863821Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679650628994692:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.863842Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:50.867511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB cal ... 01-07T17:55:18.081176Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:18.083671Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:18.092473Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592679772021354346:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:55:18.173402Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679772021354399:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:18.215769Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.261188Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.763910Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.795505Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679750546517078:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:18.795568Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:18.979348Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:55:18.987353Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:55:19.326517Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592679776316322367:2939] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:19.339894Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.018929Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:55:20.024700Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 29218, MsgBus: 4310 2026-01-07T17:55:21.325008Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592679786579324281:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:21.343325Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:21.445510Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:21.446408Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:21.449141Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592679786579324240:2081] 1767808521306014 != 1767808521306017 2026-01-07T17:55:21.461727Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:21.461832Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:21.482758Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:21.565498Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:21.565530Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:21.565537Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:21.565626Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:21.635306Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:22.130223Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:22.360081Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:25.370657Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679803759194310:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:25.370756Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:25.371146Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679803759194321:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:25.371186Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679803759194323:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:25.371228Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:25.376009Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:25.394285Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679803759194326:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:55:25.462890Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679803759194378:2542] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:25.560056Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:25.745749Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:55:25.756952Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-07T17:55:25.768795Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TPartitionTests::WriteSubDomainOutOfSpace >> Channels20::IcChannelAsyncRead [GOOD] >> Channels20::IcChannelEarlyFinish >> TKesusTest::TestAttachNewSessions >> KqpPg::CreateTableBulkUpsertAndRead >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts [GOOD] Test command err: 2026-01-07T17:55:13.408303Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.474722Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:13.474809Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:13.474864Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:13.474914Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:13.490041Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] bootstrapping {0, {0, 1111}, 123} [1:183:2196] 2026-01-07T17:55:13.493080Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:55:13.000000Z 2026-01-07T17:55:13.493173Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [1:183:2196] 2026-01-07T17:55:13.514281Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.555697Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.576736Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.587416Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.628955Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.670445Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.705255Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.829128Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|415c0fe7-d347453e-893acfa7-147ba04c_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 2026-01-07T17:55:13.854438Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:13.876558Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 1 2026-01-07T17:55:14.100913Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.132380Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 2 2026-01-07T17:55:14.383692Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 3 2026-01-07T17:55:14.678914Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:14.710644Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 4 2026-01-07T17:55:15.045670Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 5 2026-01-07T17:55:15.327085Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 6 2026-01-07T17:55:15.592740Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:15.815801Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 7 2026-01-07T17:55:15.920643Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 8 2026-01-07T17:55:16.199326Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 9 2026-01-07T17:55:16.572809Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.070316Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.114943Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:17.115000Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:17.115042Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:17.115115Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:17.130153Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] bootstrapping {0, {0, 1111}, 123} [2:182:2194] 2026-01-07T17:55:17.131676Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-07T17:55:17.000000Z 2026-01-07T17:55:17.131733Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:182:2194] 2026-01-07T17:55:17.145147Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.176081Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.199821Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.233976Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.270535Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.302020Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.379234Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.413586Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.497145Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|67f21c51-281169d7-a456baba-d368ebe3_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 2026-01-07T17:55:17.566222Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.680327Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 1 2026-01-07T17:55:17.822079Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:17.966203Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 2 2026-01-07T17:55:18.237272Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 3 2026-01-07T17:55:18.365185Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:18.579992Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 4 Send write: 5 2026-01-07T17:55:18.889137Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 6 2026-01-07T17:55:19.150230Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:19.415393Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 7 2026-01-07T17:55:19.471251Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 8 2026-01-07T17:55:19.706254Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 9 2026-01-07T17:55:20.080142Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:20.469193Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2 ... [StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.243626Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.264290Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.264387Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.264463Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.264524Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.264585Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.264619Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.285310Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.285391Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.285437Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.285470Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.285511Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.285555Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.307524Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.307624Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.307703Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.307764Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.307804Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.307850Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.328552Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:28.339949Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.340034Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.340124Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.340203Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.340246Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.340292Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.361568Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.361676Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.361748Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.361804Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.361861Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.361915Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.385395Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.385486Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.385570Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.385630Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.385691Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.385725Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.409312Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.409398Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.409449Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.409483Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.410339Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.410401Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.445401Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.445494Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.445550Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.445576Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.445617Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.445658Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.445801Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2026-01-07T17:55:28.445854Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.445899Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.445949Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.445992Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.446036Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-07T17:55:28.446092Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 0 2026-01-07T17:55:28.446161Z node 5 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2026-01-07T17:55:28.446209Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-07T17:55:28.446254Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:28.446294Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:55:28.446329Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request 2026-01-07T17:55:28.446662Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:28.467639Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:55:28.467732Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:55:28.467878Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-07T17:55:28.467947Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:28.468008Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-07T17:55:28.468121Z node 5 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 2 affect SourceId sourceid 2026-01-07T17:55:28.468217Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.468259Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:28.468316Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:28.468374Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:28.468444Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2026-01-07T17:54:59.051749Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679692438752718:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:59.052440Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:59.305931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:59.306049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:59.334084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:54:59.349279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:59.350713Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679692438752689:2081] 1767808499049729 != 1767808499049732 2026-01-07T17:54:59.351682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:59.433766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:59.433792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:59.433806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:59.433955Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:59.544850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:54:59.611888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:59.698257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:00.061693Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:04.054727Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679692438752718:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:04.055460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:14.293684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:55:14.293707Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:25.111593Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679802008448536:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:25.111991Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:25.186543Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T17:55:25.196634Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:25.239490Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:25.239581Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:25.247737Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:25.285443Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:25.285468Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:25.285477Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:25.285562Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:25.357635Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T17:55:25.482121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:25.507901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:26.121482Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> KqpPg::TypeCoercionInsert-useSink >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] >> CrossShardUniqIndexValidationTest::Validation [GOOD] >> FulltextIndexBuildTest::Basic >> IndexBuildTest::ShadowDataNotAllowedByDefault ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2026-01-07T17:55:29.131058Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:29.131194Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:29.150655Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:29.151149Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:29.186882Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:29.187479Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=14860218324758001944, session=0, seqNo=0) 2026-01-07T17:55:29.187654Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:29.199986Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=14860218324758001944, session=1) 2026-01-07T17:55:29.200301Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=4294861833244747918, session=0, seqNo=0) 2026-01-07T17:55:29.200427Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:29.212404Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=4294861833244747918, session=2) 2026-01-07T17:55:29.539578Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:29.539686Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:29.558102Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:29.558351Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:29.574051Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:29.574648Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=15724213934884789450, session=1, seqNo=0) 2026-01-07T17:55:29.597896Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=15724213934884789450, session=1) 2026-01-07T17:55:29.936377Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:29.936491Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:29.954823Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:29.954958Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:29.990480Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:29.991369Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=3086412391380168135, session=0, seqNo=0) 2026-01-07T17:55:29.991522Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:30.003799Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=3086412391380168135, session=1) 2026-01-07T17:55:30.360296Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:30.360423Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:30.380318Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:30.380654Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:30.405912Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:30.406315Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[4:137:2161], cookie=6193592728069098236, path="") 2026-01-07T17:55:30.419115Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[4:137:2161], cookie=6193592728069098236, status=SUCCESS) 2026-01-07T17:55:30.420164Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:146:2168], cookie=17600869031795524921, session=0, seqNo=0) 2026-01-07T17:55:30.420309Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:30.432903Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:146:2168], cookie=17600869031795524921, session=1) 2026-01-07T17:55:30.433662Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:147:2169], cookie=111, session=0, seqNo=0) 2026-01-07T17:55:30.433788Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:30.433921Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:262: [72057594037927937] Fast-path attach session=1 to sender=[4:147:2169], cookie=222, seqNo=0 2026-01-07T17:55:30.446247Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:147:2169], cookie=111, session=2) 2026-01-07T17:55:30.818409Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:30.818536Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:30.839563Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:30.839711Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:30.878293Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:30.878735Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[5:136:2161], cookie=11463494582296827103, path="") 2026-01-07T17:55:30.890900Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[5:136:2161], cookie=11463494582296827103, status=SUCCESS) 2026-01-07T17:55:30.891980Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:146:2168], cookie=10257200935532587828, session=0, seqNo=0) 2026-01-07T17:55:30.892122Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:30.904412Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:146:2168], cookie=10257200935532587828, session=1) 2026-01-07T17:55:30.905251Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:146:2168], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:55:30.905426Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:55:30.905517Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:30.905903Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:147:2169], cookie=111, session=0, seqNo=0) 2026-01-07T17:55:30.906002Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:30.906132Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:147:2169], cookie=222, session=1, seqNo=0) 2026-01-07T17:55:30.926594Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:146:2168], cookie=123) 2026-01-07T17:55:30.926698Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:147:2169], cookie=111, session=2) 2026-01-07T17:55:30.926756Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:147:2169], cookie=222, session=1) |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |96.3%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 1495, MsgBus: 23518 2026-01-07T17:54:10.106274Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679481218968748:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.106412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.414559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.450345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.450467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.566061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.572867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.670594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.941088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.941127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.941133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.941201Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.116179Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:12.022444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:12.097244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.910793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679494103871910:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.910880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679494103871901:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.911162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.911478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679494103871917:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.911521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.915151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:13.926342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679494103871915:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:54:14.019750Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679498398839264:2812] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:14.724329Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2026-01-07T17:54:14.724386Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2026-01-07T17:54:14.744113Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7592679498398839298:2355], TxId: 281474976710661, task: 1. Ctx: { CheckpointId : . TraceId : 01kecsgk15etwcqtaksjpr1014. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmNjZThkZmUtZWI2OGE5Mi04NDI4NzcxZi1jNzNiZTA5Nw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-z2c6jslqle, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-07T17:54:14.612832Z }, code: 2029 }. 2026-01-07T17:54:14.789099Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037895 Cancelled read: {[1:7592679498398839299:2355], 7} 2026-01-07T17:54:14.789157Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037888 Cancelled read: {[1:7592679498398839299:2355], 0} 2026-01-07T17:54:14.789186Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037889 Cancelled read: {[1:7592679498398839299:2355], 1} 2026-01-07T17:54:14.789211Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037890 Cancelled read: {[1:7592679498398839299:2355], 2} 2026-01-07T17:54:14.789231Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037891 Cancelled read: {[1:7592679498398839299:2355], 3} 2026-01-07T17:54:14.789260Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037892 Cancelled read: {[1:7592679498398839299:2355], 4} 2026-01-07T17:54:14.789282Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037893 Cancelled read: {[1:7592679498398839299:2355], 5} 2026-01-07T17:54:14.789304Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037894 Cancelled read: {[1:7592679498398839299:2355], 6} 2026-01-07T17:54:14.822972Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmNjZThkZmUtZWI2OGE5Mi04NDI4NzcxZi1jNzNiZTA5Nw==, ActorId: [1:7592679494103871899:2355], ActorState: ExecuteState, LegacyTraceId: 01kecsgk15etwcqtaksjpr1014, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-z2c6jslqle, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-07T17:54:14.612832Z }\n" issue_code: 2029 severity: 1 } trace_id#
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-z2c6jslqle, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-07T17:54:14.612832Z } , code: 2029 query_phases { duration_us: 217577 table_access { name: "/Root/LargeTable" partitions_count: 8 } cpu_time_us: 45217 affected_shards: 8 } compilation { duration_us: 558748 cpu_time_us: 548389 } process_cpu_time_us: 6180 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048586,\"Max\":1048586,\"Min\":1048586,\"History\":[59,1048586]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/LargeTable\"}],\"BaseTimeMs\":1767808454744,\"CpuTimeUs\":{\"Count\":1,\"Sum\":45217,\"Max\":45217,\"Min\":45217,\"History\":[58,45217]},\"Ingress\":[{\"Pop\":{},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":4483,\"Max\":4483,\"Min\":4483,\"History\":[59,4483]}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":558748,\"CpuTimeUs\":548389},\"ProcessCpuTimeUs\":6180,\"TotalDurationUs\":913548,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":111148},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanN ... de 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:55:16.807095Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:16.807807Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:16.821241Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:16.821332Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:16.855999Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:16.933666Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:16.933693Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:16.933700Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:16.933809Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:17.069370Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:17.470061Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:17.484274Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:17.557765Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:17.706207Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:17.770288Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:17.853285Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.427832Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679782035389079:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.427926Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.428198Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679782035389089:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.428242Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.495265Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.535930Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.569833Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.602882Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.634303Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.672957Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.815264Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.864814Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.956460Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679782035389959:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.956560Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.956838Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679782035389965:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.956979Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679782035389964:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.957075Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.960723Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:20.970915Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679782035389968:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:21.054431Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679786330357315:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:21.666833Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679764855518055:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:21.667583Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:22.783259Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:26.168741Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=YzM2NzRiOTMtOWYwYjYzNGUtODhkNWIxZTctZWUzMzBmMg==, ActorId: [5:7592679790625324916:2532], ActorState: ExecuteState, LegacyTraceId: 01kecsjscb7tamdpkb7qvnbtr7, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Query result size limit exceeded. (51202524 > 50331648)" issue_code: 2013 severity: 1 } trace_id# |96.4%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] Test command err: 2026-01-07T17:54:58.559256Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.624076Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:54:58.624160Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:54:58.624248Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:58.624320Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:54:58.644591Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:183:2196] 2026-01-07T17:54:58.645593Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:183:2196] Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.672724Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.704744Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.715605Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.737844Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.738485Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.749224Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.770420Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.793057Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.806776Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.828119Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.838920Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.860658Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.873294Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:58.897406Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.921303Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:58.944083Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:59.009414Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:59.030610Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.051853Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-07T17:54:59.062538Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.083810Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Ra ... 937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:29.768220Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:29.768258Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:29.768290Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:55:29.768335Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:55:29.768481Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2026-01-07T17:55:29.778863Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:29.810414Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:29.841812Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:29.862891Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:29.918096Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:29.990407Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.074732Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.106128Z node 4 :PERSQUEUE INFO: partition.cpp:4331: [72057594037927937][Partition][1][StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2026-01-07T17:55:30.106217Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:55:30.106266Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:30.106311Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:30.106361Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:30.106410Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:55:30.127169Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1792: [72057594037927937][Partition][1][StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2026-01-07T17:55:30.127324Z node 4 :PERSQUEUE DEBUG: partition.cpp:4259: [72057594037927937][Partition][1][StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2026-01-07T17:55:30.127402Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:55:30.127453Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:30.127514Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:30.127555Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:30.127609Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:55:30.127775Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2026-01-07T17:55:30.128734Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2026-01-07T17:55:30.128826Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:30.128870Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-07T17:55:30.128919Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:55:30.129508Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][1][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 101 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000? size 105 WTime 1329 2026-01-07T17:55:30.129936Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:55:30.150656Z node 4 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-07T17:55:30.150758Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][1][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-07T17:55:30.150903Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:55:30.150977Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyWrite. Partition: 1 2026-01-07T17:55:30.151070Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][1][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk 2026-01-07T17:55:30.151318Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-07T17:55:30.151369Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:30.151417Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:30.151463Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:30.151508Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-07T17:55:30.151571Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-07T17:55:30.470244Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.512452Z node 5 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:30.512519Z node 5 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:30.512563Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:30.512608Z node 5 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:55:30.529637Z node 5 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{1, {2, 3}, 4}][StateInit] bootstrapping {1, {2, 3}, 4} [5:183:2196] 2026-01-07T17:55:30.530469Z node 5 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{1, {2, 3}, 4}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {1, {2, 3}, 4} generation 0 [5:183:2196] 2026-01-07T17:55:30.551542Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.593999Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.614936Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.625785Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.670220Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.712273Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.743229Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.892575Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:30.924509Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:31.144646Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:31.168608Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:31.362733Z node 5 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][{1, {2, 3}, 4}][StateIdle] Got error: The transaction is completed Got cmd write: CmdDeleteRange { Range { From: "M0000000004" IncludeFrom: true To: "M0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "D0000000004" IncludeFrom: true To: "D0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "X0000000004" IncludeFrom: true To: "X0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "J0000000004" IncludeFrom: true To: "J0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "K0000000004" IncludeFrom: true To: "K0000000005" IncludeTo: false } } |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> Channels20::IcChannelTrivial [GOOD] >> ConvertUnboxedValueToArrowAndBack::DictUtf8ToInterval >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication >> VectorIndexBuildTest::CreateAndDrop >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2026-01-07T17:55:19.731994Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:19.732103Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:19.746711Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:19.747125Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:19.781523Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:19.788416Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=5558243797653603447, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2026-01-07T17:55:19.788590Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=5558243797653603447) 2026-01-07T17:55:19.789136Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:144:2166], cookie=9028116077016050012, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2026-01-07T17:55:19.789239Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:144:2166], cookie=9028116077016050012) 2026-01-07T17:55:19.789624Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2169], cookie=14918895639717203850, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2026-01-07T17:55:19.789814Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2026-01-07T17:55:19.801417Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2169], cookie=14918895639717203850) 2026-01-07T17:55:19.801871Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:152:2174], cookie=17389791240042392098, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2026-01-07T17:55:19.802075Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2026-01-07T17:55:19.814741Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:152:2174], cookie=17389791240042392098) 2026-01-07T17:55:20.145711Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:20.145815Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:20.162658Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:20.162911Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:20.177061Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:20.177473Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:137:2161], cookie=11168082696269145222, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2026-01-07T17:55:20.177790Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:55:20.201297Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:137:2161], cookie=11168082696269145222) 2026-01-07T17:55:20.201815Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:147:2169], cookie=15519253517591673325, path="/Root/Res", config={ }) 2026-01-07T17:55:20.202029Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-07T17:55:20.220329Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:147:2169], cookie=15519253517591673325) 2026-01-07T17:55:20.222052Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:152:2174]. Cookie: 9674813975856598295. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:20.222120Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:152:2174], cookie=9674813975856598295) 2026-01-07T17:55:20.222567Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:152:2174]. Cookie: 15745279168482643463. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2026-01-07T17:55:20.222612Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:152:2174], cookie=15745279168482643463) 2026-01-07T17:55:22.390147Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:22.390231Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:22.403288Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:22.403382Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:22.445958Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:22.446415Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:136:2161], cookie=3913934816028955988, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-07T17:55:22.446713Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:55:22.461044Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:136:2161], cookie=3913934816028955988) 2026-01-07T17:55:22.461595Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=10232564301094207623, path="/Root/Res", config={ }) 2026-01-07T17:55:22.461802Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-07T17:55:22.473868Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=10232564301094207623) 2026-01-07T17:55:22.474606Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2174]. Cookie: 1774318572037721724. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:22.474656Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2174], cookie=1774318572037721724) 2026-01-07T17:55:22.475081Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:152:2174]. Cookie: 9270357985190220377. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2026-01-07T17:55:22.475123Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:152:2174], cookie=9270357985190220377) 2026-01-07T17:55:24.601016Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:24.601135Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:24.622106Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:24.622486Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:24.646657Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:24.647122Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=2668699936768481659, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-07T17:55:24.647463Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:55:24.659692Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=2668699936768481659) 2026-01-07T17:55:24.660642Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 13546626750455390431. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:24.660720Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=13546626750455390431) 2026-01-07T17:55:24.661287Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 2083461576398998400. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-07T17:55:24.661346Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=2083461576398998400) 2026-01-07T17:55:24.661823Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 10014341019708075202. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-07T17:55:24.661876Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=10014341019708075202) 2026-01-07T17:55:26.822868Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:26.822970Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:26.841305Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:26.841449Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:26.876630Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:26.877057Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=4392030506587390848, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-07T17:55:26.877345Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-07T17:55:26.890590Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=4392030506587390848) 2026-01-07T17:55:26.891395Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:147:2169]. Cookie: 13982742597130630755. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:26.891448Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:147:2169], cookie=13982742597130630755) 2026-01-07T17:55:26.891896Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:147:2169]. Cookie: 8353934136981340630. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2026-01-07T17:55:26.891939Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:147:2169], cookie=8353934136981340630) 2026-01-07T17:55:29.409617Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:186:2193]. Cookie: 17225749266090724797. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:29.409700Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:186:2193], cookie=17225749266090724797) 2026-01-07T17:55:29.410255Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:186:2193]. Cookie: 2281370290230903543. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2026-01-07T17:55:29.410306Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:186:2193], cookie=2281370290230903543) 2026-01-07T17:55:31.551043Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:219:2219]. Cookie: 9331615095869992842. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-07T17:55:31.551106Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:219:2219], cookie=9331615095869992842) 2026-01-07T17:55:31.551580Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:219:2219]. Cookie: 16803651026551658461. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2026-01-07T17:55:31.551622Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:219:2219], cookie=16803651026551658461) |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] >> VectorIndexBuildTest::TTxReply_DoExecute_Throws >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSessions >> FulltextIndexBuildTest::Basic [GOOD] >> FulltextIndexBuildTest::FlatRelevance >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded >> ConvertUnboxedValueToArrowAndBack::DictUtf8ToInterval [GOOD] >> ConvertUnboxedValueToArrowAndBack::DoubleOptionalVariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::DoubleOptionalVariantOverTupleWithOptionals [GOOD] >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple [GOOD] >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> IndexBuildTest::WithFollowers >> VectorIndexBuildTest::RecreatedColumns >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink >> Channels20::IcChannelEarlyFinish [GOOD] >> Channels20::IcChannelBackPressure >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex >> FulltextIndexBuildTest::FlatRelevance [GOOD] >> IndexBuildTest::BaseCase >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple [GOOD] Test command err: Trying to start YDB, gRPC: 63935, MsgBus: 13237 2026-01-07T17:55:11.586471Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679744346134053:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:11.587761Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:11.846867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:11.980151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:11.980257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:11.987543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:12.050699Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679744346133999:2081] 1767808511573813 != 1767808511573816 2026-01-07T17:55:12.055601Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:12.070383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:12.429684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:12.429792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:12.429801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:12.429891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:12.588515Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:13.205820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:13.212195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:55:13.283600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:13.457781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:13.590399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:13.665016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:14.698541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679757231037765:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:14.698660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:14.699260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679757231037774:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:14.699313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:15.588788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.625319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.660111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.690237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.732919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.763564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.835946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.892595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.999965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679761526005949:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:16.000086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:16.000231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679761526005954:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:16.000613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679765820973251:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:16.000672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:16.018290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:16.031174Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679765820973254:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:55:16.102540Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679765820973305:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:16.583163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679744346134053:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:16.583246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15424, MsgBus: 9996 2026-01-07T17:55:19.192719Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActo ... 5.243477Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:25.243566Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:25.244661Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:25.244732Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:25.250677Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:25.250821Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:55:25.266369Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:25.341721Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:25.341744Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:25.341751Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:25.341859Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:25.394276Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:25.407140Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:25.823741Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:25.854148Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:25.959273Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:26.061028Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:26.068216Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:26.108837Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:26.143874Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:28.623712Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679814367467226:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:28.623819Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:28.624359Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679814367467236:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:28.624409Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:28.692730Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:28.745045Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:28.796236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:28.847841Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:28.910314Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:28.962214Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:29.013027Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:29.077839Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:29.162436Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679818662435689:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:29.162520Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:29.162570Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679818662435694:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:29.162720Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592679818662435696:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:29.162777Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:29.166244Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:29.187538Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592679818662435697:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:29.274731Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592679818662435778:4915] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:30.052588Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592679801482562901:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:30.052658Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:30.081031Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592679804564157345:2102];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:30.081115Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.4%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> VectorIndexBuildTest::TTxReply_DoExecute_Throws [GOOD] >> VectorIndexBuildTest::TTxProgress_Throws |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> IndexBuildTest::WithFollowers [GOOD] >> IndexBuildTest::WithFollowersUniq >> IndexBuildTest::CancellationNotEnoughRetries >> DqUnboxedValueDoNotFitToArrow::LargeVariant [GOOD] >> DqUnboxedValueToNativeArrowConversion::ListOfJsons [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> IndexBuildTest::WithFollowersUniq [GOOD] >> IndexBuildTest::RejectsOnDuplicatesUniq >> VectorIndexBuildTest::TTxProgress_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Throws |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] |96.4%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink >> VectorIndexBuildTest::CreateAndDrop [GOOD] >> VectorIndexBuildTest::Metering_CommonDB >> VectorIndexBuildTest::TTxInit_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Checks_EnableVectorIndex >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> IndexBuildTest::RejectsOnDuplicatesUniq [GOOD] >> VectorIndexBuildTest::BuildTableWithEmptyShard >> VectorIndexBuildTest::RecreatedColumns [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false >> VectorIndexBuildTest::TTxInit_Checks_EnableVectorIndex [GOOD] >> VectorIndexBuildTest::UnknownState |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> KqpLimits::ReplySizeExceeded [GOOD] >> Channels20::IcChannelBackPressure [GOOD] >> Channels20::CaIntegrationTrivial >> KqpBatchDelete::ManyPartitions_2 [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 7811, MsgBus: 17926 2026-01-07T17:54:51.940557Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679657748123083:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:51.940602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:52.161144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:52.188049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:52.188160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:52.232450Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:52.235002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:52.235277Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679657748123054:2081] 1767808491938743 != 1767808491938746 2026-01-07T17:54:52.323505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:52.323526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:52.323530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:52.323603Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:52.363477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:52.698741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:52.706898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:52.779246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:52.947194Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:56.940726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679657748123083:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:56.940805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:00.772474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679696402830517:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:00.772484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679696402830525:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:00.772580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:00.772892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679696402830532:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:00.772974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:00.776656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:00.787365Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679696402830531:2445], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:55:00.873970Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679696402830584:3168] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:01.192091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:07.127792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:55:07.127818Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 28884, MsgBus: 20092 2026-01-07T17:55:09.108303Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679734390055624:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:09.108367Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:09.124090Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:09.203995Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:09.207826Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679734390055597:2081] 1767808509107215 != 1767808509107218 2026-01-07T17:55:09.247112Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:09.247208Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:09.250832Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:09.301586Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:09.301615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:09.301621Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:09.301722Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:09.325118Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:09.726715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:09.736978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:10.115515Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:14.108595Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7592679734390055624:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:14.108674Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:17.780897Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679768749795756:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:17.780914Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679768749795748:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:17.781085Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:17.781348Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679768749795763:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions ... heme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:35.313024Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7592679845390838532:2081] 1767808535191128 != 1767808535191131 2026-01-07T17:55:35.326347Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:35.329413Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:35.329514Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:35.352450Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:35.421802Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:35.443118Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:35.443162Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:35.443173Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:35.443279Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:35.933656Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:35.941175Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:55:35.949906Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:36.015715Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:36.191219Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:36.198384Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:36.263670Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.333257Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679862570709596:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.333341Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.333661Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679862570709605:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.333698Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.415661Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.462640Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.505241Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.541845Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.581175Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.626404Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.674183Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.733440Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.841197Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679862570710476:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.841342Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.841804Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679862570710481:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.841861Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679862570710482:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.841971Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.847497Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:39.861760Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679862570710485:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:55:39.960212Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679862570710536:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:40.191934Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679845390838556:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:40.192000Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:41.712081Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:45.022412Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=M2IwMjE4MjYtMjRjNDM5YWEtMTdmZGZhNWQtNGE3ZjAzNWQ=, ActorId: [5:7592679871160645449:2533], ActorState: ExecuteState, LegacyTraceId: 01kecskbwz9jd36fwmt6k9w1cq, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Query result size limit exceeded. (51202524 > 50331648)" issue_code: 2013 severity: 1 } trace_id# |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> VectorIndexBuildTest::UnknownState [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::UnknownState [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:34.859949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:34.860024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:34.860063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:34.860101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:34.860131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:34.860160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:34.860221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:34.860305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:34.861201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:34.861489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:34.938240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:34.938313Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:34.952970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:34.953318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:34.953495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:34.960076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:34.960475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:34.961318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:34.961609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:34.966041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:34.966251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:34.967600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:34.967691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:34.967840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:34.967917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:34.968020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:34.968230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:35.109463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.110523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.110662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.110733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.110835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.110908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.110989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.111079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.111189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.111270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.111350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.111440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.111534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.111623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:35.111703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Id: 18] was 0 2026-01-07T17:55:47.329187Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-07T17:55:47.329214Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-07T17:55:47.329229Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-07T17:55:47.329243Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-07T17:55:47.329256Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-07T17:55:47.329269Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-07T17:55:47.329294Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-07T17:55:47.329316Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-07T17:55:47.329334Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-07T17:55:47.329349Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-07T17:55:47.329361Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-07T17:55:47.329374Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-07T17:55:47.329388Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-07T17:55:47.329403Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-07T17:55:47.329421Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-07T17:55:47.329437Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-07T17:55:47.329452Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-07T17:55:47.329464Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-07T17:55:47.329488Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-07T17:55:47.329935Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 10, at schemeshard: 72057594046678944 2026-01-07T17:55:47.330074Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.330168Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-07T17:55:47.330208Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-07T17:55:47.330236Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:55:47.330249Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:55:47.330262Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-01-07T17:55:47.330353Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 4, at schemeshard: 72057594046678944 2026-01-07T17:55:47.330561Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.330728Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 12, at schemeshard: 72057594046678944 2026-01-07T17:55:47.331062Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:47.331109Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:55:47.331248Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:47.331582Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.331648Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.331849Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.331957Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.332011Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.332090Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.332242Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.332321Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.332661Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.333032Z node 5 :BUILD_INDEX DEBUG: schemeshard_index_build_info.h:626: Restored index build id# 102: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, State: Filling, SubState: None, IsBroken: 1, IsCancellationRequested: 0, Issue: Unknown build kind: 999999, SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:55:47.333130Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:47.333259Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.333323Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-07T17:55:47.333541Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 1 tables: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:47.333630Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:47.333703Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:47.338043Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:47.342148Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:47.342208Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:47.343023Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:47.343083Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:47.343132Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:47.343788Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:1269:3169] sender: [5:1331:2058] recipient: [5:15:2062] 2026-01-07T17:55:47.403250Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-07T17:55:47.403535Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } 2026-01-07T17:55:47.404680Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=102&Page=BuildIndexInfo 2026-01-07T17:55:47.404825Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=102&Page=BuildIndexInfo |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 32105, MsgBus: 27505 2026-01-07T17:53:26.997043Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679289576759791:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.997094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.325171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.346927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.347042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.392761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.442207Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.613381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.743072Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.743103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.743113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.743183Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.012355Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.433158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.452522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.512561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.673991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.825429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.902464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.311828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306756630807:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.311927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.312249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679306756630817:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.312295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.777276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.810838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.841351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.871932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.895839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.932247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.966782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.021660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.144999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679311051598982:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679311051598987:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679311051598989:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.145321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.148747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.159623Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679311051598991:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:31.261644Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679311051599042:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:31.998251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679289576759791:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:31.998320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.945862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... 7Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7592679837024252447:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:33.415686Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:33.535980Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:33.578287Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:33.578413Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:33.583035Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:33.591411Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7592679837024252422:2081] 1767808533413921 != 1767808533413924 2026-01-07T17:55:33.630405Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:33.716590Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:33.716617Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:33.716630Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:33.716737Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:33.811201Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:34.403482Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:34.420197Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:34.427811Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:34.516420Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:34.778666Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:34.881660Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:38.417130Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592679837024252447:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:38.417216Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:39.291186Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679862794058072:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.291337Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.291805Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679862794058082:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.291869Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:39.399155Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.446555Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.497749Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.545336Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.607571Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.746633Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.820704Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:39.911820Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:40.043937Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679867089026274:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:40.044022Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679867089026279:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:40.044056Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:40.044257Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679867089026281:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:40.044308Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:40.049588Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:40.064227Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592679867089026282:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:40.145300Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592679867089026344:3782] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:43.060375Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpLimits::QueryExecTimeout [GOOD] >> VectorIndexBuildTest::Metering_CommonDB [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> KqpCompileFallback::FallbackWithPreparedQuery |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> KqpCompileFallback::FallbackToVersion1Success >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 13096, MsgBus: 27530 2026-01-07T17:54:10.092228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679480197219813:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.092259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.479626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.511819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.511933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.523728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.620669Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.669733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.945497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.945526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.945535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.945600Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.108039Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.880819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.894030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.983252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.215565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.420272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.500207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.740762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679493082123528:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.740888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.741230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679493082123538:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.741298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.184388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.215507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.274705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.304003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.332777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.367221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.397737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.440234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.510093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497377091704:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.510159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497377091709:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.510158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.510431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497377091711:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.510470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.513469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.523832Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679497377091712:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.624266Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679497377091764:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.092329Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679480197219813:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.092380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:16.263975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... xists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7592679763968230131:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:16.383011Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:16.404465Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:16.496537Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:16.535070Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:16.535180Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:16.548324Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:16.595548Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:16.596100Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:16.596121Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:16.596129Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:16.596219Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:17.115522Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:17.130436Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:17.228449Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:17.387851Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:17.428806Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:17.521587Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.592278Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679781148101064:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.592389Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.592696Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679781148101073:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.592762Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:20.673491Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.709209Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.742567Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.811889Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.846825Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.886403Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.928489Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:20.986268Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:21.081906Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679785443069238:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.082031Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.082171Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679785443069243:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.082539Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592679785443069245:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.082627Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:21.086841Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:21.101590Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592679785443069246:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:55:21.159403Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592679785443069298:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:21.382592Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592679763968230131:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:21.382655Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:31.467172Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:55:31.467198Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=NzNmODhjZmYtYzc0NjFjZTktNjgxMzVjMjUtMWI1Yjg3ZDE= 2026-01-07T17:55:36.377154Z node 5 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=NzNmODhjZmYtYzc0NjFjZTktNjgxMzVjMjUtMWI1Yjg3ZDE= } |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::BaseCaseUniq |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:32.392122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:32.392223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:32.392268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:32.392312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:32.392348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:32.392388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:32.392455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:32.395309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:32.396226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:32.400181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:32.521709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:32.521771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:32.536447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:32.536764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:32.537015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:32.545869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:32.548989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:32.569794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:32.617105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:32.667391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:32.672789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:32.710982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:32.711056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:32.711158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:32.711239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:32.711344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:32.711498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:32.893433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.894353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.894477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.894540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.894606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.894717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.894803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.894884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.894969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.895047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.895118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.895204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.895282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.895369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.895463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1665:3536], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 10356, Billed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 10356} 2026-01-07T17:55:50.310399Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-07T17:55:50.310551Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-07T17:55:50.310597Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:2322:4099] TestWaitNotification: OK eventTxId 107 2026-01-07T17:55:50.311313Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-07T17:55:50.311627Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 330us result status StatusSuccess 2026-01-07T17:55:50.312223Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "prefix" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 33 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 4 } clusters: 20 levels: 2 overlap_clusters: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-07T17:55:50.313019Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72075186233409549 2026-01-07T17:55:50.313412Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" took 403us result status StatusSuccess 2026-01-07T17:55:50.314319Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 35 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976725758 CreateStep: 400 ParentPathId: 33 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "pool-kind-1" } Log { PreferredPoolKind: "pool-kind-1" } Data { PreferredPoolKind: "pool-kind-1" } } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 35 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-07T17:55:50.320639Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268830210, Sender [6:3422:5146], Recipient [6:1739:3603]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "(\n (let range \'(\'(\'__ydb_parent (Null) (Void)) \'(\'key (Null) (Void)) ))\n (let columns \'(\'__ydb_parent))\n (let result (SelectRange \'__user__indexImplPostingTable range columns \'()))\n (return (AsList (SetResult \'Result result) ))\n )" } } ... posting table contains 10 rows >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CopyTableSerialColumns+useSink |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> ConvertUnboxedValueToArrowAndBack::LargeVariant [GOOD] >> KqpCompileFallback::FallbackWithScanQuery |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> ConvertUnboxedValueToArrowAndBack::LargeVariant [GOOD] |96.4%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> Channels20::CaIntegrationTrivial [GOOD] >> Channels20::CaIntegrationAgg >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestCompactifiedWithRetention |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test >> KqpCompileFallback::FallbackToVersion1Success [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TPQTest::TestCompactifiedWithRetention [GOOD] >> TPQTest::TestChangeConfig >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> KqpPg::CopyTableSerialColumns-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] Test command err: Trying to start YDB, gRPC: 28266, MsgBus: 29112 2026-01-07T17:55:49.737810Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679904094969672:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:49.737870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:50.103130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:50.143540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:50.143685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:50.161290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:50.285104Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679904094969631:2081] 1767808549733233 != 1767808549733236 2026-01-07T17:55:50.302146Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:50.316976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:50.619832Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:50.619945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:50.619953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:50.620044Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:50.746053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:51.486638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:51.494902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:55:51.577962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:51.775830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:51.948763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:52.029056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:52.957008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679916979873392:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:52.957138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:52.957448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679916979873401:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:52.957520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:53.715480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.750993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.784747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.813781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.873333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.902650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.935391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.974132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.077523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679925569808868:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.077637Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.077912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679925569808874:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.077949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679925569808873:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.077963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.086366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:54.099459Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679925569808877:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:55:54.206826Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679925569808928:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:54.738919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679904094969672:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:54.739015Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:56.029705Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:56.029862Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007BFCCB19C1F8 2026-01-07T17:55:56.029901Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7592679934159743819:2534], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", keepInCache: 1, split: 0{ TraceId: 01kecskpzt8kh6nhfjajar467c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdlNTczZDAtN2Y2NzFlOTEtZTk3Yzk0YzgtM2FhNGFmYWU=, PoolId: default, IsStreamingQuery: 0} 2026-01-07T17:55:56.030052Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:56.030113Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7592679934159743819:2534], queueSize: 1 2026-01-07T17:55:56.030641Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n 2026-01-07T17:55:56.030690Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7592679934159743819:2534], compileActor: [1:7592679934159743827:2539] 2026-01-07T17:55:56.030719Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-07T17:55:56.030778Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7592679934159743827:2539], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", startTime: 2026-01-07T17:55:56.030697Z 2026-01-07T17:55:56.077352Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7592679934159743827:2539], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n " 2026-01-07T17:55:56.380617Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592679934159743827:2539]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767808556","query_text":"\\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"9f37623-dcd0aac0-7bead048-8e1470c8","version":"1.0"} 2026-01-07T17:55:56.380972Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592679934159743827:2539], duration: 0.350257s 2026-01-07T17:55:56.381005Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592679934159743827:2539], owner: [1:7592679916979873350:2383], status: SUCCESS, issues: , uid: 9f37623-dcd0aac0-7bead048-8e1470c8 2026-01-07T17:55:56.381392Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592679934159743819:2534], status: SUCCESS, compileActor: [1:7592679934159743827:2539] 2026-01-07T17:55:56.381623Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:56.381874Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592679934159743819:2534], queryUid: 9f37623-dcd0aac0-7bead048-8e1470c8, status:SUCCESS 2026-01-07T17:55:56.392826Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1223: Served query from cache by uid, sender: [1:7592679934159743819:2534], queryUid: 9f37623-dcd0aac0-7bead048-8e1470c8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] Test command err: Trying to start YDB, gRPC: 9247, MsgBus: 3612 2026-01-07T17:55:50.930371Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679911181342830:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:50.930588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:51.156531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:51.165122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:51.165252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:51.167450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:51.242174Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679911181342723:2081] 1767808550919766 != 1767808550919769 2026-01-07T17:55:51.251003Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:51.317666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:51.317696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:51.317703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:51.317806Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:51.332610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:51.735769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:51.795654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:51.937075Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:52.011725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:52.167266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:52.234756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.804820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679924066246487:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:53.804922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:53.805254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679924066246497:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:53.805302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.135083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.169241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.199995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.234104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.259396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.289903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.340275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.377289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.434281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679928361214664:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.434356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.434420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679928361214669:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.434609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679928361214671:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.434640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.437516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:54.446072Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679928361214673:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:55:54.512048Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679928361214724:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:55.931582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679911181342830:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:55.931813Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:56.026417Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:56.029912Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C39C87F3048 2026-01-07T17:55:56.029972Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7592679936951149611:2531], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kecskpzs8z9c7wsjv1smd178, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODgzMjIyNDgtNTY4MjM2M2EtMmI0NjlhOTktNGJlOGZhNjk=, PoolId: default, IsStreamingQuery: 0} 2026-01-07T17:55:56.030129Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:56.030190Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7592679936951149611:2531], queueSize: 1 2026-01-07T17:55:56.030634Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7592679936951149611:2531], compileActor: [1:7592679936951149619:2536] 2026-01-07T17:55:56.030673Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-07T17:55:56.030725Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7592679936951149619:2536], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-07T17:55:56.030648Z 2026-01-07T17:55:56.249692Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592679936951149619:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767808556","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"86691836-210e6ef6-fc6408cf-75bff302","version":"1.0"} 2026-01-07T17:55:56.250087Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592679936951149619:2536], duration: 0.219422s 2026-01-07T17:55:56.250129Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592679936951149619:2536], owner: [1:7592679924066246446:2382], status: SUCCESS, issues: , uid: 86691836-210e6ef6-fc6408cf-75bff302 2026-01-07T17:55:56.250233Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592679936951149611:2531], status: SUCCESS, compileActor: [1:7592679936951149619:2536] 2026-01-07T17:55:56.250289Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592679936951149611:2531], queryUid: 86691836-210e6ef6-fc6408cf-75bff302, status:SUCCESS |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackToVersion1Success [GOOD] Test command err: Trying to start YDB, gRPC: 62545, MsgBus: 21539 2026-01-07T17:55:50.270147Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679911287214162:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:50.270944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:50.533005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:50.544443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:50.544563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:50.591171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:50.662434Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679911287214042:2081] 1767808550261755 != 1767808550261758 2026-01-07T17:55:50.666928Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:50.758123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:50.777651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:50.777691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:50.777698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:50.777789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:51.278814Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:51.493803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:51.510852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:55:51.588084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:51.769160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:51.953198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:52.032653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.345550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679924172117808:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:53.345707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:53.346100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679924172117818:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:53.346177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:53.711965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.746719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.783224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.814327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.847984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.883433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.914135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:53.973603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.077759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679928467085984:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.077852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.078165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679928467085990:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.078165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679928467085989:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.078210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:54.086357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:54.096060Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679928467085993:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:54.182490Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679928467086044:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:55.268421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679911287214162:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:55.268541Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:56.026426Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:56.029891Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007CA5A8F044D8 2026-01-07T17:55:56.029957Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7592679937057020931:2531], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kecskpzs37qr08xqgxy7pcx7, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NzdlYzM2ZWEtMjEzNjc3ZGQtMTlkMzVlY2EtNDE0YTYzZTE=, PoolId: default, IsStreamingQuery: 0} 2026-01-07T17:55:56.030109Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:56.030153Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7592679937057020931:2531], queueSize: 1 2026-01-07T17:55:56.030643Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n 2026-01-07T17:55:56.030683Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7592679937057020931:2531], compileActor: [1:7592679937057020939:2536] 2026-01-07T17:55:56.030724Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-07T17:55:56.030771Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7592679937057020939:2536], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-07T17:55:56.030701Z 2026-01-07T17:55:56.249729Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592679937057020939:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767808556","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"61625139-d7e804de-e6129e62-648951e1","version":"1.0"} 2026-01-07T17:55:56.250145Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592679937057020939:2536], duration: 0.219418s 2026-01-07T17:55:56.250173Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592679937057020939:2536], owner: [1:7592679924172117767:2383], status: SUCCESS, issues: , uid: 61625139-d7e804de-e6129e62-648951e1 2026-01-07T17:55:56.250273Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592679937057020931:2531], status: SUCCESS, compileActor: [1:7592679937057020939:2536] 2026-01-07T17:55:56.250331Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592679937057020931:2531], queryUid: 61625139-d7e804de-e6129e62-648951e1, status:SUCCESS |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true [GOOD] >> VectorIndexBuildTest::CancelBroken >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription >> KqpCompileFallback::FallbackWithScanQuery [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> TPQTest::TestChangeConfig [GOOD] >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 21012, MsgBus: 1089 2026-01-07T17:55:53.345632Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679920893386632:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:53.345991Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:53.568348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:53.568448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:53.615416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:53.657735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:53.661276Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679920893386530:2081] 1767808553340009 != 1767808553340012 2026-01-07T17:55:53.667704Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:53.712069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:53.712094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:53.712100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:53.712244Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:53.957309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:54.084647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:54.137951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.255757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.361533Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:54.369384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:54.424810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.149060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679933778290293:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.149258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.149672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679933778290303:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.149717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.457829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.531648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.558132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.593708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.623773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.655709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.704627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.744946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:56.813631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679933778291175:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.813729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.814024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679933778291180:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.814116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679933778291181:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.814178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:56.818118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:56.828525Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679933778291184:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:55:56.923359Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679933778291235:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:58.342537Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679920893386632:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:58.342612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:55:58.475632Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:58.475793Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C7860B09498 2026-01-07T17:55:58.475832Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7592679942368226123:2531], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", keepInCache: 1, split: 0{ TraceId: 01kecskscac4kf1enb9yd1b8v7, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTk1NTkyMDctNTI3MTlhYmYtZWFjZGVkOWQtOTFjNmYxOTM=, PoolId: default, IsStreamingQuery: 0} 2026-01-07T17:55:58.476052Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:58.476138Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7592679942368226123:2531], queueSize: 1 2026-01-07T17:55:58.476798Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n 2026-01-07T17:55:58.476861Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7592679942368226123:2531], compileActor: [1:7592679942368226132:2537] 2026-01-07T17:55:58.476942Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-07T17:55:58.477000Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7592679942368226132:2537], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", startTime: 2026-01-07T17:55:58.476893Z 2026-01-07T17:55:58.607740Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7592679942368226132:2537]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767808558","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (0, +∞)\"],\"type\":\"Scan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"ed43375c-b492d262-c9c2f58e-6e55a66f","version":"1.0"} 2026-01-07T17:55:58.608145Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7592679942368226132:2537], duration: 0.131232s 2026-01-07T17:55:58.608189Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7592679942368226132:2537], owner: [1:7592679933778290252:2382], status: SUCCESS, issues: , uid: ed43375c-b492d262-c9c2f58e-6e55a66f 2026-01-07T17:55:58.608321Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7592679942368226123:2531], status: SUCCESS, compileActor: [1:7592679942368226132:2537] 2026-01-07T17:55:58.608505Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-07T17:55:58.608628Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7592679942368226123:2531], queryUid: ed43375c-b492d262-c9c2f58e-6e55a66f, status:SUCCESS |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] >> TNebiusAccessServiceTest::PassRequestId [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: 2026-01-07T17:54:58.675314Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:54:58.741608Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T17:54:58.745227Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T17:54:58.745498Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:54:58.745550Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:54:58.745585Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-07T17:54:58.745643Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-07T17:54:58.745699Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:58.745770Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:185:2057] recipient: [1:14:2061] 2026-01-07T17:54:58.762044Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:184:2196], now have 1 active actors on pipe 2026-01-07T17:54:58.762182Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-07T17:54:58.780944Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:54:58.783984Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:54:58.784153Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:58.785046Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-07T17:54:58.785183Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-07T17:54:58.785603Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-07T17:54:58.785997Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:191:2142] 2026-01-07T17:54:58.788462Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-07T17:54:58.788525Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-07T17:54:58.788572Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:191:2142] 2026-01-07T17:54:58.788637Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-07T17:54:58.788701Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T17:54:58.790169Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:54:58.791414Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-07T17:54:58.791470Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:54:58.791520Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:54:58.791598Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-07T17:54:58.791632Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-07T17:54:58.791668Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:54:58.791708Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:54:58.791783Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-07T17:54:58.791823Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-07T17:54:58.791860Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-07T17:54:58.791884Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-07T17:54:58.791904Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-07T17:54:58.791934Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-07T17:54:58.791990Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-07T17:54:58.792041Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:54:58.792288Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:54:58.792344Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-07T17:54:58.792422Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T17:54:58.792658Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T17:54:58.792854Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:54:58.795652Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T17:54:58.795773Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T17:54:58.795825Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:54:58.795868Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:58.795898Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:54:58.795949Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:54:58.796009Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:54:58.796058Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T17:54:58.796389Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:198:2204], now have 1 active actors on pipe 2026-01-07T17:54:58.806774Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:201:2206], now have 1 active actors on pipe 2026-01-07T17:54:58.806915Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:54:58.806963Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topi ... PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 45 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 44 } Consumers { Name: "bbb" Generation: 45 Important: true } Consumers { Name: "ccc" Generation: 45 Important: true } 2026-01-07T17:55:59.201912Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:59.202410Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|360fb972-3ec226b5-1d45dc98-849e7d76_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:55:59.209124Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:59.209638Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|42a60e0e-74c28501-7e0e60b1-8811bba0_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:55:59.221770Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 9 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:59.222312Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|33c70192-c37ee4b1-59adf915-e3b2f3b1_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:55:59.621993Z node 41 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 41 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:107:2057] recipient: [41:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:107:2057] recipient: [41:105:2138] Leader for TabletID 72057594037927937 is [41:111:2142] sender: [41:112:2057] recipient: [41:105:2138] 2026-01-07T17:55:59.684639Z node 41 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:59.684702Z node 41 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:59.684748Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:59.684802Z node 41 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [41:153:2057] recipient: [41:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [41:153:2057] recipient: [41:151:2172] Leader for TabletID 72057594037927938 is [41:157:2176] sender: [41:158:2057] recipient: [41:151:2172] Leader for TabletID 72057594037927937 is [41:111:2142] sender: [41:183:2057] recipient: [41:14:2061] 2026-01-07T17:55:59.706042Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:59.707845Z node 41 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 46 actor [41:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 46 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 46 } Consumers { Name: "aaa" Generation: 46 Important: true } 2026-01-07T17:55:59.709234Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [41:189:2142] 2026-01-07T17:55:59.712256Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [41:189:2142] 2026-01-07T17:55:59.715514Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [41:190:2142] 2026-01-07T17:55:59.717785Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [41:190:2142] 2026-01-07T17:55:59.720464Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [41:191:2142] 2026-01-07T17:55:59.722660Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [41:191:2142] 2026-01-07T17:55:59.725297Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [41:192:2142] 2026-01-07T17:55:59.727479Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [41:192:2142] 2026-01-07T17:55:59.730084Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][4][StateInit] bootstrapping 4 [41:193:2142] 2026-01-07T17:55:59.732309Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [41:193:2142] 2026-01-07T17:55:59.744461Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:59.744832Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dbfcccc4-a95fadc9-1c171e28-6036ffd2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:55:59.753354Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:59.760847Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][5][StateInit] bootstrapping 5 [41:237:2142] 2026-01-07T17:55:59.763102Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][5][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [41:237:2142] 2026-01-07T17:55:59.767382Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][6][StateInit] bootstrapping 6 [41:238:2142] 2026-01-07T17:55:59.769746Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][6][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [41:238:2142] 2026-01-07T17:55:59.773701Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][7][StateInit] bootstrapping 7 [41:239:2142] 2026-01-07T17:55:59.776050Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][7][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [41:239:2142] 2026-01-07T17:55:59.780137Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][8][StateInit] bootstrapping 8 [41:240:2142] 2026-01-07T17:55:59.782445Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][8][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [41:240:2142] 2026-01-07T17:55:59.789850Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][9][StateInit] bootstrapping 9 [41:241:2142] 2026-01-07T17:55:59.792217Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][9][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [41:241:2142] 2026-01-07T17:55:59.822410Z node 41 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 47 actor [41:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 46 } Consumers { Name: "bbb" Generation: 47 Important: true } Consumers { Name: "ccc" Generation: 47 Important: true } 2026-01-07T17:55:59.824563Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:59.825184Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|46905dbd-b88efda5-15ece7ad-1482ed4d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:55:59.831322Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:59.831687Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d30ae525-517c5681-193a88d9-d0841585_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:55:59.842232Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 9 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:59.842657Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d71b0f19-d9cafe55-c20808e7-2f36e9a8_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2026-01-07T17:56:00.870316Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d2b2cde4750] Connect to grpc://localhost:25127 2026-01-07T17:56:00.888158Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d2b2cde4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2026-01-07T17:56:00.908906Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d2b2cde4750] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2026-01-07T17:56:00.909376Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d2b2cde4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2026-01-07T17:56:00.912424Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d2b2cde4750] Status 7 Permission Denied 2026-01-07T17:56:00.912797Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d2b2cde4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2026-01-07T17:56:00.914459Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d2b2cde4750] Status 7 Permission Denied 2026-01-07T17:56:00.914863Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d2b2cde4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> VectorIndexBuildTest::CancelBroken [GOOD] >> VectorIndexBuildTest::CreateBuildProposeReject ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2026-01-07T17:56:00.890623Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb1953e3cd0]{reqId} Connect to grpc://localhost:30811 2026-01-07T17:56:00.894334Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb1953e3cd0]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2026-01-07T17:56:00.909431Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb1953e3cd0]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink |96.5%| [TA] $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [GOOD] >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> TPQTest::TestReadSubscription [GOOD] |96.5%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: 2026-01-07T17:55:06.745582Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:55:06.814829Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:06.814899Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:06.814966Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:06.815024Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:55:06.835665Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:06.860456Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2026-01-07T17:55:06.861860Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:55:06.864358Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:55:06.868570Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-01-07T17:55:06.870031Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-01-07T17:55:06.873299Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [1:192:2142] 2026-01-07T17:55:06.875025Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:192:2142] 2026-01-07T17:55:06.884894Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:212:2209], now have 1 active actors on pipe 2026-01-07T17:55:06.885043Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:55:06.885095Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:55:06.885386Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-07T17:55:06.885450Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-07T17:55:06.885563Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-07T17:55:06.885611Z node 1 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2026-01-07T17:55:06.885670Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:06.885704Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:06.885735Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:06.885768Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T17:55:06.885799Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:06.885891Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2026-01-07T17:55:06.885990Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:06.886309Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:214:2211], now have 1 active actors on pipe 2026-01-07T17:55:06.886397Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:55:06.886436Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:55:06.886529Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1dd53e1d-35d8d8dc-4d85a931-469fa7ea_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:55:06.886576Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:06.886617Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:06.886660Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T17:55:06.886688Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:06.886734Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:55:06.886823Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T17:55:06.886861Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-07T17:55:06.886906Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-07T17:55:06.886980Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-07T17:55:06.887108Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T17:55:06.887329Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:216:2213], now have 1 active actors on pipe 2026-01-07T17:55:06.887406Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-07T17:55:06.887430Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-07T17:55:06.887482Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-07T17:55:06.887512Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-07T17:55:06.887586Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-07T17:55:06.887637Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-07T17:55:06.887661Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:06.943948Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T17:55:06.944027Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-07T17:55:06.944065Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T17:55:06.944145Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:55:06.944285Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2026-01-07T17:55:06.945221Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2026-01-07T17:55:06.945313Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T17:55:06.945356Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2026-01-07T17:55:06.945402Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2026-01-07T17:55:06.945438Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-07T17:55:06.945473Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-07T17:55:06.945525Z node 1 :PERSQU ... n][4][StateInit] bootstrapping 4 [56:193:2142] 2026-01-07T17:56:00.749387Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [56:193:2142] 2026-01-07T17:56:00.761219Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:56:00.761651Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8bd12822-321b44f-a0005bac-abe6639f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:56:00.790676Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:00.875474Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:00.896981Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:00.993755Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.047934Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.109230Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.210018Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.220720Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.366701Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.458425Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.580403Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.756742Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:01.792726Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:56:01.793348Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fa4e66d-fc1087d2-3488a79b-e4dec33a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:56:01.802667Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:56:01.803547Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d997c509-3cde7e0-366e49fd-87466776_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:56:02.257815Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:108:2057] recipient: [57:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:108:2057] recipient: [57:106:2138] Leader for TabletID 72057594037927937 is [57:112:2142] sender: [57:113:2057] recipient: [57:106:2138] 2026-01-07T17:56:02.305893Z node 57 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:56:02.305945Z node 57 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:56:02.305983Z node 57 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:56:02.306027Z node 57 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [57:154:2057] recipient: [57:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [57:154:2057] recipient: [57:152:2172] Leader for TabletID 72057594037927938 is [57:158:2176] sender: [57:159:2057] recipient: [57:152:2172] Leader for TabletID 72057594037927937 is [57:112:2142] sender: [57:184:2057] recipient: [57:14:2061] 2026-01-07T17:56:02.327501Z node 57 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:56:02.329397Z node 57 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 60 actor [57:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 60 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 } Consumers { Name: "user1" Generation: 60 Important: true } 2026-01-07T17:56:02.330632Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [57:190:2142] 2026-01-07T17:56:02.333566Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [57:190:2142] 2026-01-07T17:56:02.337041Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [57:191:2142] 2026-01-07T17:56:02.339474Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [57:191:2142] 2026-01-07T17:56:02.342482Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [57:192:2142] 2026-01-07T17:56:02.345237Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [57:192:2142] 2026-01-07T17:56:02.348100Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [57:193:2142] 2026-01-07T17:56:02.350418Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [57:193:2142] 2026-01-07T17:56:02.353235Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][4][StateInit] bootstrapping 4 [57:194:2142] 2026-01-07T17:56:02.355501Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [57:194:2142] 2026-01-07T17:56:02.366933Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:56:02.367237Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3d9fa7ef-59b3fece-ab95a5a0-d46ac0a2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:56:02.384312Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.415906Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.483104Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.525947Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.611105Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.706911Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.727949Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.813764Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.941688Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:02.962664Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:03.151571Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:03.183584Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:56:03.345471Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:56:03.346013Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ae356477-b853a328-56a6bfc1-dadc6b24_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-07T17:56:03.353458Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:56:03.353907Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e54f7b22-34594018-bd1937e8-fe8ac385_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> KqpPg::CreateIndex |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:34.409727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:34.409813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:34.409866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:34.409907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:34.409948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:34.409981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:34.410032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:34.410144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:34.410937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:34.411233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:34.497095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:34.497172Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:34.512191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:34.512534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:34.512725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:34.525847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:34.526220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:34.526931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:34.527258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:34.529793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:34.529972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:34.531149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:34.531207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:34.531317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:34.531361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:34.531462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:34.531648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:34.670860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.671820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.671950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:34.672954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1163:3110], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 304, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:56:04.213782Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710766:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710766 msg type: 269090816 2026-01-07T17:56:04.213897Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710766, partId: 4294967295, tablet: 72057594046316545 2026-01-07T17:56:04.214077Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710766, at schemeshard: 72057594046678944 2026-01-07T17:56:04.214120Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710766, ready parts: 0/1, is published: true 2026-01-07T17:56:04.214161Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710766, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710766 at step: 5000047 FAKE_COORDINATOR: advance: minStep5000047 State->FrontStep: 5000046 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710766 at step: 5000047 2026-01-07T17:56:04.214411Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000047, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-07T17:56:04.214521Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710766 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 25769805936 } } Step: 5000047 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:56:04.214587Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710766:0 HandleReply TEvOperationPlan: step# 5000047 2026-01-07T17:56:04.214651Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710766:0 128 -> 240 2026-01-07T17:56:04.216538Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710766:0, at schemeshard: 72057594046678944 2026-01-07T17:56:04.216600Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710766:0 ProgressState 2026-01-07T17:56:04.216692Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710766:0 progress is 1/1 2026-01-07T17:56:04.216726Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-07T17:56:04.216776Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710766:0 progress is 1/1 2026-01-07T17:56:04.216807Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-07T17:56:04.216850Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710766, ready parts: 1/1, is published: true 2026-01-07T17:56:04.216911Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:128:2153] message: TxId: 281474976710766 2026-01-07T17:56:04.216982Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-07T17:56:04.217022Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710766:0 2026-01-07T17:56:04.217052Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710766:0 2026-01-07T17:56:04.217127Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710766 2026-01-07T17:56:04.218966Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710766 2026-01-07T17:56:04.219038Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710766 2026-01-07T17:56:04.219111Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710766 2026-01-07T17:56:04.219239Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1163:3110], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 304, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710766 2026-01-07T17:56:04.220867Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-07T17:56:04.221040Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1163:3110], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 304, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:56:04.221110Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-07T17:56:04.222763Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2026-01-07T17:56:04.222918Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1163:3110], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 304, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:56:04.222968Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2026-01-07T17:56:04.223115Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-07T17:56:04.223179Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:1164:3111] TestWaitNotification: OK eventTxId 103 2026-01-07T17:56:04.223816Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 103 2026-01-07T17:56:04.224201Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 103 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 103 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::BaseCaseUniq [GOOD] >> IndexBuildTest::CancelBuild >> Channels20::CaIntegrationAgg [GOOD] >> Channels20::CaIntegrationIc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [GOOD] Test command err: Trying to start YDB, gRPC: 24441, MsgBus: 25164 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:44.552174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:44.664744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:44.681542Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:44.681992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:44.682043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:44.947549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:44.947666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:45.029640Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808181983912 != 1767808181983916 2026-01-07T17:49:45.040274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:45.085777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:45.207389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:45.586709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:45.586768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:45.586818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:45.587251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:45.601595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:45.965904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:46.015838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:912:2777], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.015963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.016448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.017348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.017746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:46.022568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:46.125878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:926:2785], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:46.231864Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:983:2823] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:46.568082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:46.889880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:49:49.431177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 23096, MsgBus: 28604 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:56.657055Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:56.662297Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:56.665816Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:56.666073Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:56.666224Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:56.884294Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:56.884394Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:56.921433Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767808193416339 != 1767808193416343 2026-01-07T17:49:56.924710Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:56.973911Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:57.135305Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:57.595213Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:57.595262Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:57.595289Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:57.595473Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:57.609439Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:57.948187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:57.953835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:57.976196Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:912:2777], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:57.976284Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:57.976340Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:57.977175Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:57.977334Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Data ... 56.413749Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.425726Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.425812Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.425834Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.435992Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.436058Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.436077Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.447825Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.447918Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.447943Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.460618Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.460705Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.460730Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.473158Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.473243Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.473270Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.486057Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.486165Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.486194Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.500741Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.500849Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.500876Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.515946Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.516038Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.516064Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.528754Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.528848Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.528876Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.541662Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.541757Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.541783Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.556601Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.556692Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.556720Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-07T17:55:56.647597Z node 20 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsk65y54bfvfpqy5h5egmk", SessionId: ydb://session/3?node_id=20&id=MmIyM2RjZmYtNDE2ZGU3MTgtYzhmNDlkOTUtY2Q5M2JiMWI=, Slow query, duration: 13.331654s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-07T17:55:58.640289Z node 20 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [20:7592679943017842949:4160], TxId: 281474976715668, task: 1. Ctx: { CheckpointId : . TraceId : 01kecsksdbbk609bab7agxsnf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=20&id=ZGYyMTE3ZWItMjY2MTE3N2MtYjAzMjZkNWEtMTE2NTUzNg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } 2026-01-07T17:55:58.640406Z node 20 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [20:7592679943017842949:4160], TxId: 281474976715668, task: 1. Ctx: { CheckpointId : . TraceId : 01kecsksdbbk609bab7agxsnf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=20&id=ZGYyMTE3ZWItMjY2MTE3N2MtYjAzMjZkNWEtMTE2NTUzNg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 }. 2026-01-07T17:55:58.640957Z node 20 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=20&id=ZGYyMTE3ZWItMjY2MTE3N2MtYjAzMjZkNWEtMTE2NTUzNg==, ActorId: [20:7592679938722875067:3649], ActorState: ExecuteState, LegacyTraceId: 01kecsksdbbk609bab7agxsnf4, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id# |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> KqpPg::V1CreateTable >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2026-01-07T17:55:05.514185Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679717146816437:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:05.514601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:05.568717Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679714657345043:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:05.568771Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:05.621681Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:55:05.623062Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:55:05.801022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:05.821112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:05.852173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:05.852287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:05.853262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:05.853326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:05.890729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:05.919854Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:55:05.921359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:05.929639Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:06.033776Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:06.052396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038c7/r3tmp/yandexOjtRNY.tmp 2026-01-07T17:55:06.052422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0038c7/r3tmp/yandexOjtRNY.tmp 2026-01-07T17:55:06.052687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0038c7/r3tmp/yandexOjtRNY.tmp 2026-01-07T17:55:06.052821Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:06.089675Z INFO: TTestServer started on Port 64128 GrpcPort 8239 2026-01-07T17:55:06.121444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:8239 2026-01-07T17:55:06.277984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:06.401216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:06.537569Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:06.595152Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:08.831370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679730031719606:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:08.831450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679730031719628:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:08.831510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:08.832452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679730031719631:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:08.832530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:08.835679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:08.874964Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679730031719630:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:55:08.935570Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679730031719716:3035] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:09.175812Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679730031719733:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:55:09.176053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:09.178678Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592679727542247345:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:55:09.176313Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OTc3YjkyM2YtN2I4ZmY1NDUtNjQ0YTA4NDctY2JhZjRhYjU=, ActorId: [1:7592679730031719597:2329], ActorState: ExecuteState, LegacyTraceId: 01kecsj8wv0qf4tvzsjh59zdfa, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:55:09.178327Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:55:09.180899Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZjljMDZmNDgtYjlhZTEyMTQtNmY0Y2Q3NDItZWNkYjUwZTg=, ActorId: [2:7592679727542247319:2305], ActorState: ExecuteState, LegacyTraceId: 01kecsj8y05kd42djygd2610nw, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:55:09.181506Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' beca ... .cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:54.984346Z node 7 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2026-01-07T17:55:55.001860Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:55.002112Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:55.125620Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:55.129396Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:55.149728Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038c7/r3tmp/yandexXhTqUf.tmp 2026-01-07T17:55:55.149761Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0038c7/r3tmp/yandexXhTqUf.tmp 2026-01-07T17:55:55.149918Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0038c7/r3tmp/yandexXhTqUf.tmp 2026-01-07T17:55:55.150022Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:55.190589Z INFO: TTestServer started on Port 17525 GrpcPort 21414 PQClient connected to localhost:21414 2026-01-07T17:55:55.430323Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:55.469554Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:55.787343Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:55.821781Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:58.675478Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592679944176046324:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:58.675555Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592679944176046316:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:58.675906Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:58.677364Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592679944176046332:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:58.677603Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:58.680971Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:58.702707Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7592679944176046330:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T17:55:58.722648Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:58.771066Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7592679944176046525:3103] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:58.798356Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:58.805477Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [7:7592679944176046552:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:55:58.805911Z node 7 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=7&id=MjI1NjQ5MWItY2U3ODZiNzYtYTg2ZjBhMGEtYmI3MmY0MjY=, ActorId: [7:7592679944176046314:2330], ActorState: ExecuteState, LegacyTraceId: 01kecsksjjewe8vy4t33dps7zv, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:55:58.806355Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:55:58.841617Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [8:7592679944219849211:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:55:58.842433Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=8&id=ZjBlOGFhN2QtNjAzNjg4NDgtYzFiY2U2ZWEtYzQ1MjRjNTg=, ActorId: [8:7592679944219849187:2305], ActorState: ExecuteState, LegacyTraceId: 01kecsksphcr6wczeh8zqa90pj, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:55:58.842916Z node 8 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:55:58.923452Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [7:7592679948471014178:3368] 2026-01-07T17:55:59.776848Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7592679926996175768:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:59.776963Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2026-01-07T17:56:06.279709Z node 7 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [7:7592679978535785608:3580] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2026-01-07T17:56:06.279759Z node 7 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [7:7592679978535785608:3580] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> IndexBuildTest::CancelBuild [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQReadAhead |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:32.392108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:32.392233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:32.392295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:32.392342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:32.392387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:32.392438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:32.392508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:32.395310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:32.396359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:32.400183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:32.516212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:32.516311Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:32.535008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:32.535436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:32.537002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:32.546502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:32.548989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:32.569781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:32.617347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:32.667207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:32.672806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:32.709746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:32.709873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:32.710020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:32.710086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:32.710234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:32.710452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:32.878676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.882611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.882841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.882931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:32.883938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-07T17:56:09.795781Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:127:2152] message: TxId: 281474976710760 2026-01-07T17:56:09.795827Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:56:09.795864Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-07T17:56:09.795894Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-07T17:56:09.795961Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-07T17:56:09.798111Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-07T17:56:09.798176Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-07T17:56:09.798240Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-07T17:56:09.798361Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1556:3420], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-07T17:56:09.800796Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2026-01-07T17:56:09.800945Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1556:3420], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:56:09.801006Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-07T17:56:09.802987Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2026-01-07T17:56:09.803142Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1556:3420], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:56:09.803185Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-07T17:56:09.803355Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:56:09.803404Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:1655:3508] TestWaitNotification: OK eventTxId 102 2026-01-07T17:56:09.806431Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-07T17:56:09.806731Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-07T17:56:09.809963Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:56:09.810265Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 338us result status StatusSuccess 2026-01-07T17:56:09.810778Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:56:09.813064Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:56:09.813340Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 293us result status StatusPathDoesNotExist 2026-01-07T17:56:09.813535Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPartitionedBlobFails |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-fifo] >> KqpBatchUpdate::ManyPartitions_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2026-01-07T17:55:24.631899Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:24.632035Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:24.651730Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:24.652158Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:24.689803Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:24.690484Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=8908112459133735398, session=0, seqNo=0) 2026-01-07T17:55:24.690673Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:24.703152Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=8908112459133735398, session=1) 2026-01-07T17:55:24.703980Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=1) 2026-01-07T17:55:24.704153Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:55:24.704240Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:24.716493Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-07T17:55:24.716862Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:55:24.728802Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2026-01-07T17:55:24.729299Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2175], cookie=14041605888880804792, name="Lock1") 2026-01-07T17:55:24.729408Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2175], cookie=14041605888880804792) 2026-01-07T17:55:25.043453Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:55:25.043564Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:55:25.061915Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:55:25.062197Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:55:25.087149Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:55:25.087936Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=10418794970027282673, session=0, seqNo=0) 2026-01-07T17:55:25.088082Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:55:25.114071Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=10418794970027282673, session=1) 2026-01-07T17:55:25.114466Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=12833346576493698062, session=0, seqNo=0) 2026-01-07T17:55:25.114594Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:55:25.126980Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=12833346576493698062, session=2) 2026-01-07T17:55:25.128206Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-07T17:55:25.128362Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-07T17:55:25.128475Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-07T17:55:25.141843Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=111) 2026-01-07T17:55:25.142133Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-07T17:55:25.142261Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-07T17:55:25.142323Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-07T17:55:25.157919Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2161], cookie=112) 2026-01-07T17:55:25.158281Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-07T17:55:25.158572Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-07T17:55:25.171586Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=222) 2026-01-07T17:55:25.171669Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=223) 2026-01-07T17:55:25.171982Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-07T17:55:25.172345Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-07T17:55:25.185556Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=333) 2026-01-07T17:55:25.185640Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=334) 2026-01-07T17:55:25.637280Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:25.653943Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:26.043075Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:26.055817Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:26.415222Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:26.427541Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:26.778957Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:26.790823Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:27.165109Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:27.181754Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:27.536108Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:27.548581Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:27.900211Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:27.913747Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:28.293409Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:28.305586Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:28.671539Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:28.685625Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:29.088024Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:29.105119Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:29.486477Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:29.500187Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:29.854297Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:29.866514Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:30.232765Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:30.246474Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:30.626102Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:30.641846Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:31.066968Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:31.081711Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:31.456881Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:31.469307Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:31.843011Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:31.857960Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:32.237255Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:32.249623Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:32.612539Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:32.629000Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:33.013422Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:55:33.027284Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:55:33.405400Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [7205759403 ... -07T17:56:06.924651Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:07.318368Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:07.330431Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:07.690246Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:07.702704Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:08.063620Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:08.076161Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:08.340801Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:08.353473Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:08.719430Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:08.733407Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:09.144534Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:09.156987Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:09.523705Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:09.536021Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:09.912111Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:09.924544Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:10.265839Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:10.278265Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:10.640467Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:10.654005Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:11.035576Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:11.049926Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:11.405534Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:11.417849Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:11.769308Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:11.781548Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:12.134571Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:12.147079Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:12.507907Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-07T17:56:12.520285Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-07T17:56:12.990007Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:30: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2026-01-07T17:56:12.990139Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-07T17:56:13.002547Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:71: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2026-01-07T17:56:13.024360Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:599:2536], cookie=18237132232285995874) 2026-01-07T17:56:13.024484Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:599:2536], cookie=18237132232285995874) 2026-01-07T17:56:13.025057Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:602:2539], cookie=7506305976683294305) 2026-01-07T17:56:13.025134Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:602:2539], cookie=7506305976683294305) 2026-01-07T17:56:13.025614Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:605:2542], cookie=14714067899014411761, name="Lock1") 2026-01-07T17:56:13.025696Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:605:2542], cookie=14714067899014411761) 2026-01-07T17:56:13.026274Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:608:2545], cookie=8945512080385466719, name="Lock1") 2026-01-07T17:56:13.026349Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:608:2545], cookie=8945512080385466719) 2026-01-07T17:56:13.530434Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-07T17:56:13.530524Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-07T17:56:13.546406Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-07T17:56:13.546543Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-07T17:56:13.583199Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-07T17:56:13.583753Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=6509268713003486896, session=0, seqNo=0) 2026-01-07T17:56:13.583914Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-07T17:56:13.596430Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=6509268713003486896, session=1) 2026-01-07T17:56:13.596792Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=8740313395029313127, session=0, seqNo=0) 2026-01-07T17:56:13.596969Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-07T17:56:13.609134Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=8740313395029313127, session=2) 2026-01-07T17:56:13.609445Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=13945716020292228417, session=0, seqNo=0) 2026-01-07T17:56:13.609579Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2026-01-07T17:56:13.621887Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=13945716020292228417, session=3) 2026-01-07T17:56:13.622395Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=4996021046667518338, name="Sem1", limit=3) 2026-01-07T17:56:13.622554Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-07T17:56:13.639092Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=4996021046667518338) 2026-01-07T17:56:13.639471Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Sem1" count=2) 2026-01-07T17:56:13.639630Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-07T17:56:13.639817Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=2, semaphore="Sem1" count=2) 2026-01-07T17:56:13.639998Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=3, semaphore="Sem1" count=1) 2026-01-07T17:56:13.652252Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2026-01-07T17:56:13.652337Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2026-01-07T17:56:13.652366Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2026-01-07T17:56:13.652961Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2181], cookie=12326920057504258386, name="Sem1") 2026-01-07T17:56:13.653066Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2181], cookie=12326920057504258386) 2026-01-07T17:56:13.653545Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2184], cookie=2059530045163171216, name="Sem1") 2026-01-07T17:56:13.653630Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2184], cookie=2059530045163171216) 2026-01-07T17:56:13.653902Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=444, name="Sem1") 2026-01-07T17:56:13.654010Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-07T17:56:13.654080Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-07T17:56:13.654133Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-07T17:56:13.666371Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=444) 2026-01-07T17:56:13.667042Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:167:2189], cookie=10466132750656582997, name="Sem1") 2026-01-07T17:56:13.667143Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:167:2189], cookie=10466132750656582997) 2026-01-07T17:56:13.667586Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2192], cookie=1671359310200291269, name="Sem1") 2026-01-07T17:56:13.667655Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2192], cookie=1671359310200291269) |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |96.6%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 20958, MsgBus: 11883 2026-01-07T17:53:35.945674Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679329906034735:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:35.947012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:36.140617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:36.167603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:36.167720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:36.231399Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:36.235664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:36.338990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:36.339010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:36.339041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:36.339166Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:36.388374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:36.764128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:36.773854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:53:36.839844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.949260Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:36.967466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.116250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.168991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.196704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679347085905666:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.196823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.197360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679347085905676:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.197435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.502665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.537268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.578389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.616568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.649181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.697931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.762060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.805121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.880503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679347085906546:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.880615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.880812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679347085906551:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.880854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679347085906552:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.881004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.885524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:39.899979Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679347085906555:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:39.998836Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679347085906608:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:40.944873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679329906034735:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:40.944972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:41.624481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... 8Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7592679951365922920:2124];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:00.456703Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:56:00.475536Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:56:00.596432Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7592679951365922824:2081] 1767808560448372 != 1767808560448375 2026-01-07T17:56:00.596710Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:56:00.615347Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:56:00.615479Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:56:00.640735Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:56:00.717627Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:56:00.717666Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:56:00.717680Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:56:00.717796Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:56:00.736121Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:56:01.462375Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:01.567047Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:56:01.586204Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:01.676407Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:01.919516Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:02.009926Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:05.455398Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592679951365922920:2124];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:05.455506Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:06.309036Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679977135728480:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.309145Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.309469Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679977135728489:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.309573Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.415537Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:06.462072Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:06.514169Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:06.557183Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:06.606615Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:06.660906Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:06.709284Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:06.786484Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:06.894014Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679977135729374:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.894130Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.894143Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679977135729379:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.894500Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679977135729381:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.894572Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:06.899213Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:06.911465Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592679977135729382:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:56:06.986884Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592679977135729434:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:56:09.728472Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::CreateTempTable >> KqpBatchDelete::ManyPartitions_3 [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> Channels20::CaIntegrationIc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 3617, MsgBus: 18830 2026-01-07T17:53:37.971821Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679337653256331:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:37.971892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:38.028664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:53:38.353023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:38.376773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:38.376871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:38.399677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:38.538353Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:38.544704Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679337653256213:2081] 1767808417937693 != 1767808417937696 2026-01-07T17:53:38.586072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:38.599193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:38.599222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:38.599229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:38.599323Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:38.998892Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:39.177709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:39.183987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:39.239716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.410095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.578073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.641315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.446237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679354833127268:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.446377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.446780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679354833127278:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.446854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:41.787521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.816354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.846010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.877412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.915516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.959963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:41.989850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.060383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:42.132064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679359128095445:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.132130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.132228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679359128095450:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.132332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679359128095452:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.132374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:42.136015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:42.147070Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679359128095454:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:42.209178Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679359128095505:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:42.979369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679337653256331:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:42.979455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... 57594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 14539, MsgBus: 19808 2026-01-07T17:56:04.027790Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7592679970969416509:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:04.027865Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:56:04.077444Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:56:04.166386Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:56:04.174208Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:56:04.174350Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:56:04.203051Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:56:04.276350Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:56:04.276371Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:56:04.276379Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:56:04.276470Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:56:04.347678Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:56:05.035113Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:05.055245Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:56:05.070969Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:05.142184Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:05.344696Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:05.440189Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:09.028154Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592679970969416509:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:09.028237Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:09.590680Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679992444254836:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:09.590861Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:09.591367Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679992444254845:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:09.591479Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:09.721054Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:09.770693Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:09.822872Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:09.900669Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:09.948434Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:09.994084Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:10.043884Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:10.116036Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:10.324849Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679996739223027:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:10.325015Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:10.325426Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679996739223033:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:10.325500Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592679996739223032:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:10.325517Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:10.331650Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:10.350582Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592679996739223036:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:56:10.445306Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592679996739223090:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:56:13.074286Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true [GOOD] >> VectorIndexBuildTest::PrefixedDuplicates >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> Channels20::CaIntegrationIc [GOOD] Test command err: Trying to start YDB, gRPC: 4874, MsgBus: 5869 2026-01-07T17:55:13.979255Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679749988303204:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:13.979313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:14.035817Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679756169653576:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:14.035845Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:14.202198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:14.229069Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:14.420578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:14.420723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:14.427562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:14.427685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:14.437307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:14.439609Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:55:14.442473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:14.443554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:14.454007Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:14.463195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:14.660790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:14.660822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:14.660828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:14.660882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:14.854808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:14.961452Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:14.993723Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:15.057219Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:15.174635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:15.246122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.430648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.588695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:15.680377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:17.519419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679767168174871:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:17.519560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:17.520005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679767168174881:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:17.520074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:17.900539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:17.966178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.029258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.080357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.179250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.244197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.293368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.364952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:18.526435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679771463143339:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:18.526502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:18.526616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679771463143344:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:18.526673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679771463143346:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:18.526711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadSer ... opose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:56:07.389769Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:07.473671Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:07.473785Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:07.482371Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:07.493080Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:07.496205Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:07.607354Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:07.724069Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:11.355596Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680000235827107:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:11.355731Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:11.356191Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680000235827116:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:11.356295Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:11.453085Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:11.465296Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592679978760988485:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:11.465367Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:11.476449Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592679976572746535:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:11.476526Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:11.483296Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7592679978317620122:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:11.483382Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:11.490831Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7592679978118739020:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:11.490931Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:11.518252Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:11.587564Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:11.670798Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:11.749623Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:11.831455Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:11.897068Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:11.965426Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:12.162365Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680004530795438:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:12.162484Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:12.162668Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680004530795443:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:12.162735Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680004530795445:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:12.163102Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:12.167382Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:12.190404Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7592680004530795447:2404], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:56:12.289175Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7592680004530795523:4609] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:56:14.798443Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:56:16.592577Z node 13 :KQP_CHANNELS WARN: dq_channel_service.cpp:1505: NODE RECONCILIATION x2, to NodeId=11, NodeActorId=[13:7592680021267294098:2423], Gen=1.1, Next Delay=0.200000s 2026-01-07T17:56:16.742932Z node 14 :KQP_CHANNELS WARN: dq_channel_service.cpp:1505: NODE RECONCILIATION x2, to NodeId=11, NodeActorId=[14:7592680021068412953:2422], Gen=1.1, Next Delay=0.200000s 2026-01-07T17:56:16.797448Z node 13 :KQP_CHANNELS WARN: dq_channel_service.cpp:1505: NODE RECONCILIATION x3, to NodeId=11, NodeActorId=[13:7592680021267294098:2423], Gen=1.1, Next Delay=0.400000s |96.6%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools |96.6%| [TA] $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId 2026-01-07 17:56:23,230 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-07 17:56:23,462 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 338689 58.6M 58.5M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/ec9d/002d2b/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.ar 339593 1.7G 1.7G 1.2G └─ ydb-core-statistics-aggregator-ut --trace-path-append /home/runner/.ya/build/build_root/ec9d/002d2b/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_s Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:46:27.977228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:28.060707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:28.067275Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:46:28.067621Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:46:28.067734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:46:28.458656Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:28.563211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:28.563360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:28.598756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:28.684320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:46:29.340773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:46:29.341672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:46:29.341713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:46:29.341744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:46:29.342172Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:46:29.407458Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:46:29.961857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:46:32.968501Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:46:32.977405Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:46:32.981274Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:46:33.010401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:33.010521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:33.050727Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:46:33.052403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:33.229229Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:46:33.229322Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:46:33.230624Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.231246Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.231739Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.232390Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.232872Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.233075Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.233362Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.233569Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.233707Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:46:33.248397Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:46:33.511990Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:46:33.541187Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:46:33.541321Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:46:33.586156Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:46:33.587921Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:46:33.588184Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:46:33.588253Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:46:33.588321Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:46:33.588399Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:46:33.588526Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:46:33.588608Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:46:33.590010Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:46:33.610721Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:33.610845Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2129:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:46:33.642637Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2201:2643] 2026-01-07T17:46:33.642874Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2201:2643], schemeshard id = 72075186224037897 2026-01-07T17:46:33.708581Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2252:2664] 2026-01-07T17:46:33.713501Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:46:33.725444Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. Describe result: PathErrorUnknown 2026-01-07T17:46:33.725512Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. Creating table 2026-01-07T17:46:33.725613Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:46:33.734512Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:46:33.738992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:46:33.747731Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:46:33.747878Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. Subscribe on create table tx: 281474976720657 2026-01-07T17:46:33.763590Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:46:33.829089Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:46:34.006332Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:46:34.389673Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:46:34.515835Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:46:34.515944Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2258:2669] Owner: [2:2257:2668]. Column diff is empty, finishing 2026-01-07T17:46:35.273849Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 5102:4510], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:53:28.158622Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5100:2464], ActorId: [2:5102:4510], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWQ5YjgxM2YtZTQzMGVjZDktM2ZiMTk2N2QtNjgzNWNmZGU=, TxId: 2026-01-07T17:53:28.158851Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5100:2464], ActorId: [2:5206:4605], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:53:28.163435Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5100:2464], ActorId: [2:5206:4605], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:53:28.163580Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5100:2464], ActorId: [2:5206:4605], Start read next stream part 2026-01-07T17:53:28.234272Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 9 ], ReplyToActorId[ [2:5223:4619]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:53:28.234655Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 9 ] 2026-01-07T17:53:28.234704Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 9, ReplyToActorId = [2:5223:4619], StatRequests.size() = 1 2026-01-07T17:54:21.079588Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsf6k340cefse2pf063xm7", SessionId: ydb://session/3?node_id=2&id=NGI1NGNjZDMtZDE1YmFhYTctMjhlMjMwOWMtYzczOTg4MmI=, Slow query, duration: 52.911360s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:54:21.082426Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5100:2464], ActorId: [2:5206:4605], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:54:21.082585Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5100:2464], ActorId: [2:5206:4605], Start read next stream part 2026-01-07T17:54:21.083062Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 35100, txId: 18446744073709551615] shutting down 2026-01-07T17:54:21.083288Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5100:2464], ActorId: [2:5206:4605], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:54:21.083347Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5100:2464], ActorId: [2:5206:4605], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjQ2YmRhNmYtY2VlYTlmMDAtNjA5ZGVkZmUtYTQ3NmRkNTk=, TxId: 2026-01-07T17:54:21.084777Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5314:4704], ActorId: [2:5318:4706], Starting query actor #1 [2:5319:4707] 2026-01-07T17:54:21.084842Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5318:4706], ActorId: [2:5319:4707], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:54:21.090176Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5318:4706], ActorId: [2:5319:4707], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NzM3NTk0ODktZTk3YTExMzItZTgwYTg5ZmQtZmE0ODAwYzU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:54:21.149211Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5318:4706], ActorId: [2:5319:4707], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzM3NTk0ODktZTk3YTExMzItZTgwYTg5ZmQtZmE0ODAwYzU=, TxId: 2026-01-07T17:54:21.149298Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5318:4706], ActorId: [2:5319:4707], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzM3NTk0ODktZTk3YTExMzItZTgwYTg5ZmQtZmE0ODAwYzU=, TxId: 2026-01-07T17:54:21.149782Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5314:4704], ActorId: [2:5318:4706], Got response [2:5319:4707] SUCCESS 2026-01-07T17:54:21.150138Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:54:21.218973Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:54:21.219050Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId3, ActorId=[1:3166:3440] 2026-01-07T17:54:22.212510Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T17:54:22.214347Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5389:4750], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:54:22.217627Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5389:4750], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-07T17:54:22.217761Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5389:4750], Start read next stream part 2026-01-07T17:54:22.273808Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 10 ], ReplyToActorId[ [2:5400:4760]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:54:22.274219Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 10 ] 2026-01-07T17:54:22.274270Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 10, ReplyToActorId = [2:5400:4760], StatRequests.size() = 1 2026-01-07T17:55:33.155971Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsgvc92p09912qntp72f68", SessionId: ydb://session/3?node_id=2&id=OTViMGU3NzgtYmIyNzIzZmUtN2VjYzVlMS1hM2IwYzRlMg==, Slow query, duration: 70.934434s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:55:33.156968Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5389:4750], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:55:33.157117Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5389:4750], Start read next stream part 2026-01-07T17:55:33.157484Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 36000, txId: 18446744073709551615] shutting down 2026-01-07T17:55:33.157565Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5492:4844], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:55:33.160143Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5389:4750], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:55:33.160208Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5389:4750], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTU5YjRiOTYtOWY2MGQ0NjMtZWQ3ZjRjNGEtNmZkYjI4Mjc=, TxId: 2026-01-07T17:55:33.161323Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5492:4844], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:55:33.161450Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5387:2464], ActorId: [2:5492:4844], Start read next stream part 2026-01-07T17:55:33.227257Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 11 ], ReplyToActorId[ [2:5510:4859]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:55:33.227592Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 11 ] 2026-01-07T17:55:33.227636Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 11, ReplyToActorId = [2:5510:4859], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/002d2b/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/002d2b/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 64201, MsgBus: 29708 2026-01-07T17:55:27.841900Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679810974571819:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:27.842101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:28.094033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:28.094172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:28.137453Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:28.151102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:28.162794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:28.176889Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679810974571792:2081] 1767808527839545 != 1767808527839548 2026-01-07T17:55:28.257460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:28.257487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:28.257508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:28.257593Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:28.337387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:28.666322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:28.851210Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:30.707943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679823859474545:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.708062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.708345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679823859474573:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.708485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679823859474574:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.708632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.712781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:30.722415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679823859474577:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:55:30.826109Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679823859474628:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:30.889429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 19180, MsgBus: 12340 2026-01-07T17:55:32.371696Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679830832117188:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:32.371895Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:32.389807Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:32.456699Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:32.459158Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592679830832117069:2081] 1767808532369009 != 1767808532369012 2026-01-07T17:55:32.487709Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:32.487792Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:32.494761Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:32.550378Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:32.550395Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:32.550406Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:32.550477Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:32.603112Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:32.885355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:33.375765Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:35.246897Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679843717019837:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.246928Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679843717019846:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.246966Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.247187Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679843717019852:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.247244Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.250675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:35.263795Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679843717019851:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:55:35.360037Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679843717019904:2530] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:35.409987Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 22638, MsgBus: 5016 2026-01-07T17:55:36.725364Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592679850346362164:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:36. ... =NActors::TEvents::TEvWakeup; 2026-01-07T17:56:18.865232Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7592680008625417580:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:18.865317Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:19.323159Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592680034395222132:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:19.323253Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7592680034395222144:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:19.323341Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:19.332254Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:19.350883Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7592680034395222154:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:56:19.439030Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7592680034395222217:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:56:19.477652Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:19.613409Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:19.740646Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7592680034395222442:2356], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2026-01-07T17:56:19.741461Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=ZTk1MzYzOTktZTlkZTdkNzAtZWQ5ZmUzYzgtMjBjYTNjZTM=, ActorId: [10:7592680034395222440:2355], ActorState: ExecuteState, LegacyTraceId: 01kecsme373ry4r58946aj2n0d, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Missing not null column in input: c. All not null columns should be initialized" end_position { row: 1 column: 1 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 4324, MsgBus: 18065 2026-01-07T17:56:21.402150Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7592680043095758479:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:21.402237Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:56:21.456096Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:56:21.574652Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:56:21.591321Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:56:21.591445Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:56:21.596047Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:56:21.733136Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:56:21.733166Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:56:21.733179Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:56:21.733289Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:56:21.738318Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:56:22.409497Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:22.547023Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:56:26.405060Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592680043095758479:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:26.405164Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:27.076309Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680068865563099:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:27.076454Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:27.077794Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680068865563111:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:27.077870Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680068865563112:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:27.078275Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:27.082634Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:27.095404Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7592680068865563115:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-07T17:56:27.173133Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7592680068865563166:2547] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:56:27.232582Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:27.385381Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:27.509278Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7592680068865563403:2358], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2026-01-07T17:56:27.512664Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=NDA5MGIxYjMtNTg2ODkzYWMtYjExNmNhNWQtMjkyNWZhZjk=, ActorId: [11:7592680068865563401:2357], ActorState: ExecuteState, LegacyTraceId: 01kecsmnps03t4mrrjm1nefad0, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Missing not null column in input: c. All not null columns should be initialized" end_position { row: 1 column: 1 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> THiveTest::TestCreateExternalTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] Test command err: 2026-01-07T17:55:02.679331Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679702065660611:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:02.684273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:02.734131Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679703032411365:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:02.734551Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:02.743502Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:55:02.764907Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T17:55:02.949652Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:02.953445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:02.985324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:02.985433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:02.985958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:02.986001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:03.027260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:03.051015Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:55:03.051256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:03.061124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:03.173452Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:03.182892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038f2/r3tmp/yandexFMZGIg.tmp 2026-01-07T17:55:03.182940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0038f2/r3tmp/yandexFMZGIg.tmp 2026-01-07T17:55:03.183108Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0038f2/r3tmp/yandexFMZGIg.tmp 2026-01-07T17:55:03.183234Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:03.227489Z INFO: TTestServer started on Port 20369 GrpcPort 5097 2026-01-07T17:55:03.249704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:5097 2026-01-07T17:55:03.418458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:03.539298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:03.694643Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:03.762456Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:06.062159Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679720212280937:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.062161Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679720212280948:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.062256Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.062491Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679720212280952:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.062535Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:06.069960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:06.098536Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7592679720212280951:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-07T17:55:06.178773Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679720212280980:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:06.434505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:06.436123Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592679719245531236:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:55:06.436877Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZWY0NDYyNTUtMTYxYzNlM2ItZDJlNjlmMTMtZjAxM2FkMDc=, ActorId: [1:7592679719245531185:2327], ActorState: ExecuteState, LegacyTraceId: 01kecsj68127eehbn3p3g43y2q, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:55:06.439479Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:55:06.437011Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7592679720212280987:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:55:06.439106Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YjhhM2MwNDItNjE1NTRhM2UtZGFiZWZlOWUtYTA4ZGY4ZTM=, ActorId: [2:7592679720212280934:2305], ActorState: ExecuteState, LegacyTraceId: 01kecsj66cb77kr9gjz0vg1av9, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:55:06.439451Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' beca ... 968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:56:15.511966Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:56:15.541772Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0038f2/r3tmp/yandexOzILs4.tmp 2026-01-07T17:56:15.541801Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0038f2/r3tmp/yandexOzILs4.tmp 2026-01-07T17:56:15.541970Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0038f2/r3tmp/yandexOzILs4.tmp 2026-01-07T17:56:15.542074Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:56:15.592137Z INFO: TTestServer started on Port 29307 GrpcPort 25717 2026-01-07T17:56:15.807004Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:25717 2026-01-07T17:56:15.864322Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:56:15.912887Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:16.132887Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:16.181107Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:20.125223Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592680018142254660:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:20.125326Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:20.141168Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592680018383433016:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:20.141256Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:20.200019Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680039858269876:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:20.200141Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680039858269887:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:20.200229Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:20.204016Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680039858269891:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:20.204113Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:20.211280Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:20.246346Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592680039858269890:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-07T17:56:20.326942Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592680039858269919:2184] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:56:20.375126Z node 12 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [12:7592680039858269930:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:56:20.378613Z node 12 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=12&id=NGQ0ZTM1MjgtZTZjYmIyMDUtNGNjYzU2YWYtNTEzMmEzYTE=, ActorId: [12:7592680039858269874:2307], ActorState: ExecuteState, LegacyTraceId: 01kecsmek60t0mfjqzvd4268yy, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:56:20.379907Z node 12 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:56:20.399910Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7592680039617092567:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:56:20.403488Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=NjRmODZiMmItZmZiYjZhYjYtZjcxNzUwZTMtMmEyNDZhYWU=, ActorId: [11:7592680039617092533:2331], ActorState: ExecuteState, LegacyTraceId: 01kecsmer6dg80wr5m92ct2mnz, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T17:56:20.403963Z node 11 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T17:56:20.449109Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:20.588225Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:20.803998Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [11:7592680043912060322:3427] === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2026-01-07T17:56:28.305653Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [11:7592680073976831756:3641] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2026-01-07T17:56:28.305699Z node 11 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [11:7592680073976831756:3641] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNotEnoughRetriesUniq >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] |96.6%| [TA] $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:54:03.130320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:03.211952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:54:03.218514Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:54:03.218854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:54:03.218983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:54:03.620380Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:03.725880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:03.726032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:03.761647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:03.839358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:54:04.506314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:04.507655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:04.507708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:04.507758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:04.508017Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:04.577125Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:05.136064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:54:07.486615Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:07.494298Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:54:07.498250Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:54:07.526247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:07.526345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:07.564942Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:54:07.566531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:07.756161Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:07.756303Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:07.758045Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.758762Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.759370Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.760368Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.760481Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.760756Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.760892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.761095Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.761331Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:07.777601Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:07.949800Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:07.985606Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:54:07.985737Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:54:08.027252Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:54:08.028397Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:54:08.028589Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:54:08.028640Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:54:08.028685Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:54:08.028736Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:54:08.028778Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:54:08.028818Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:54:08.029413Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:54:08.031472Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:54:08.034493Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:54:08.076523Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:54:08.076598Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:54:08.076694Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:54:08.084629Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:54:08.084759Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:54:08.115814Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:54:08.116294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:54:08.119634Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:54:08.159743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:08.173067Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:54:08.173227Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:54:08.185788Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:54:08.388640Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:54:08.447413Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:54:08.804141Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:54:08.934972Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:54:08.935070Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:54:09.652822Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... default not found or you don't have access permissions } 2026-01-07T17:54:38.108090Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4117:3716], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:38.108230Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:38.115077Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4134:3721], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:54:38.118434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:38.194602Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4116:3715], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-07T17:54:38.448908Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4203:3763], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:54:38.483766Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4202:3762] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:38.775790Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4224:3776]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:54:38.776167Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-07T17:54:38.776261Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4226:3778] 2026-01-07T17:54:38.776356Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4226:3778] 2026-01-07T17:54:38.776756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4227:3779] 2026-01-07T17:54:38.776916Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4227:3779], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-07T17:54:38.777022Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-07T17:54:38.777196Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4226:3778], server id = [2:4227:3779], tablet id = 72075186224037894, status = OK 2026-01-07T17:54:38.777324Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:54:38.777415Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4224:3776], StatRequests.size() = 1 2026-01-07T17:54:39.762259Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:55:42.487291Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4089:3701], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:55:42.487524Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4089:3701], Start read next stream part 2026-01-07T17:55:42.487695Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecshawe76vdymm3b214x316", SessionId: ydb://session/3?node_id=2&id=NTBjMWNiNWMtMzIwMjNlMDEtZDA4Y2IxYzMtMmI1ZjI5OTI=, Slow query, duration: 64.387205s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:55:42.488983Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-07T17:55:42.489130Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4336:3878], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:55:42.492471Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4336:3878], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-07T17:55:42.492577Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4336:3878], Start read next stream part 2026-01-07T17:55:42.495168Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4089:3701], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:55:42.495242Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4089:3701], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTlmNDI5ZTAtMTA2ZTY3NzctMTA3NGQ4OGItNmIwZGE5NTQ=, TxId: 2026-01-07T17:55:42.584522Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4363:3901]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:55:42.584764Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:55:42.584813Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4363:3901], StatRequests.size() = 1 2026-01-07T17:56:33.684901Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsk9rw9by37brbbvf5jzvs", SessionId: ydb://session/3?node_id=2&id=MTA3YmIyODQtY2NiZTFmYjEtYjE5NWQwNDEtYWZiOWFlNTg=, Slow query, duration: 51.188266s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-07T17:56:33.687103Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4336:3878], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:56:33.687252Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4336:3878], Start read next stream part 2026-01-07T17:56:33.687663Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-07T17:56:33.688274Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4451:3984], ActorId: [2:4454:3986], Starting query actor #1 [2:4457:3988] 2026-01-07T17:56:33.688352Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4454:3986], ActorId: [2:4457:3988], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T17:56:33.692850Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4336:3878], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:56:33.693080Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4079:2463], ActorId: [2:4336:3878], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTkyNjAyNWItMmQyZWI2OGUtZmZjZDk1NDItMzFhZTgxZDA=, TxId: 2026-01-07T17:56:33.694093Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4454:3986], ActorId: [2:4457:3988], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NmI4YjBkYy1hZWNkZDdiMC1iNzZlY2Q5Mi1mNjZjNDFjMg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T17:56:33.780858Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4470:4000]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:56:33.781269Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T17:56:33.781326Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4470:4000], StatRequests.size() = 1 2026-01-07T17:56:33.974302Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4454:3986], ActorId: [2:4457:3988], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmI4YjBkYy1hZWNkZDdiMC1iNzZlY2Q5Mi1mNjZjNDFjMg==, TxId: 2026-01-07T17:56:33.974413Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4454:3986], ActorId: [2:4457:3988], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmI4YjBkYy1hZWNkZDdiMC1iNzZlY2Q5Mi1mNjZjNDFjMg==, TxId: 2026-01-07T17:56:33.974966Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4451:3984], ActorId: [2:4454:3986], Got response [2:4457:3988] SUCCESS 2026-01-07T17:56:33.975470Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T17:56:34.013267Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T17:56:34.013411Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3150:3428] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> VectorIndexBuildTest::PrefixedDuplicates [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] |96.6%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2026-01-07T17:50:36.833733Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T17:50:36.865545Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:36.869997Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T17:50:36.880805Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T17:50:36.885467Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T17:50:36.889529Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-07T17:50:36.889653Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T17:50:36.891268Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-07T17:50:36.891317Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T17:50:36.892867Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T17:50:36.893052Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T17:50:36.895188Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-07T17:50:36.895247Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-07T17:50:36.895314Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-07T17:50:36.895352Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T17:50:36.895531Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T17:50:36.896872Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:36.910010Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-07T17:50:36.910089Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-07T17:50:36.913561Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.913762Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.913900Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.914059Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.914215Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.914340Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:44:2087] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.914497Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:45:2088] targetNodeId# 1 Marker# DSP01 2026-01-07T17:50:36.914529Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-07T17:50:36.914700Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-07T17:50:36.914853Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:36.956779Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:36.956851Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T17:50:36.957122Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:36.997606Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-07T17:50:37.001021Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-07T17:50:37.001148Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-07T17:50:37.001175Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-07T17:50:37.001214Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:37.013107Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-07T17:50:37.013164Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-07T17:50:37.013341Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-07T17:50:37.013397Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-07T17:50:37.013452Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-07T17:50:37.017618Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-07T17:50:37.022616Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-07T17:50:37.022921Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:37.023064Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.023216Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2026-01-07T17:50:37.023244Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:51:2091] 2026-01-07T17:50:37.023351Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:50:37.023415Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:51:2091] 2026-01-07T17:50:37.028700Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-07T17:50:37.028782Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-07T17:50:37.028890Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:50:37.033233Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-07T17:50:37.033468Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-07T17:50:37.036997Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-07T17:50:37.037924Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.038245Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-07T17:50:37.038471Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-07T17:50:37.038828Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-07T17:50:37.038880Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-07T17:50:37.038967Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:50:37.046464Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-07T17:50:37.046537Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-07T17:50:37.046580Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-07T17:50:37.046681Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-07T17:50:37.051759Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-07T17:50:37.051845Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 202 ... 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [153:271:2264] 2026-01-07T17:56:31.272801Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [153:271:2264] 2026-01-07T17:56:31.272888Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [153:271:2264] 2026-01-07T17:56:31.273162Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [153:271:2264] 2026-01-07T17:56:31.273266Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [153:271:2264] 2026-01-07T17:56:31.273498Z node 153 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [153:270:2263] EventType# 268697601 2026-01-07T17:56:31.273882Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2026-01-07T17:56:31.274037Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:56:31.275171Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2026-01-07T17:56:31.275301Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:56:31.275545Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [153:312:2292] 2026-01-07T17:56:31.275601Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [153:312:2292] 2026-01-07T17:56:31.275694Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [153:312:2292] 2026-01-07T17:56:31.275824Z node 153 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [153:94:2123] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:56:31.275915Z node 153 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 153 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [153:94:2123] 2026-01-07T17:56:31.276054Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [153:312:2292] 2026-01-07T17:56:31.276143Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037932033]::SendEvent [153:312:2292] 2026-01-07T17:56:31.276327Z node 153 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [153:312:2292] 2026-01-07T17:56:31.276657Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [153:312:2292] 2026-01-07T17:56:31.276720Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [153:312:2292] 2026-01-07T17:56:31.276758Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [153:312:2292] 2026-01-07T17:56:31.276900Z node 153 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [153:280:2269] EventType# 268637702 2026-01-07T17:56:31.277507Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2026-01-07T17:56:31.277648Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:56:31.277979Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T17:56:31.278144Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:56:31.278579Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-07T17:56:31.278709Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T17:56:31.279412Z node 153 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137043581064576}(72075186224037888)::Execute - TryToBoot was not successfull 2026-01-07T17:56:31.279631Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2026-01-07T17:56:31.279791Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T17:56:31.291323Z node 153 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [185eac4b9c06d110] bootstrap ActorId# [153:315:2295] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-07T17:56:31.291531Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [185eac4b9c06d110] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-07T17:56:31.291629Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [185eac4b9c06d110] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-07T17:56:31.291727Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2026-01-07T17:56:31.291805Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2026-01-07T17:56:31.292005Z node 153 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [153:38:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-07T17:56:31.293848Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-07T17:56:31.294024Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-07T17:56:31.294119Z node 153 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-07T17:56:31.294354Z node 153 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-07T17:56:31.294550Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2026-01-07T17:56:31.295074Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:56:31.295308Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:56:31.295429Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:56:31.295492Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:56:31.295589Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:56:31.295696Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:56:31.295795Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:56:31.296527Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [153:319:2298] 2026-01-07T17:56:31.296641Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [153:319:2298] 2026-01-07T17:56:31.296997Z node 153 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-07T17:56:31.297256Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-07T17:56:31.297506Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-07T17:56:31.297668Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-07T17:56:31.297751Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-07T17:56:31.297868Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:56:31.298024Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:56:31.298102Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-07T17:56:31.298236Z node 153 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2026-01-07T17:56:31.298437Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [153:319:2298] 2026-01-07T17:56:31.298540Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [153:319:2298] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> TPQTest::TestPQReadAhead [GOOD] >> TPQTest::TestOwnership >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] |96.6%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams+useSink >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] >> KqpPg::DeleteWithQueryService-useSink [GOOD] >> TPQTest::TestOwnership [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> TPQTest::TestPQCacheSizeManagement >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23516, MsgBus: 21071 2026-01-07T17:55:27.159876Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679813089973103:2117];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:27.159979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:27.468168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:27.533377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:27.533503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:27.545135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:27.607907Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:27.688136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:27.879918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:27.879945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:27.880008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:27.880104Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:28.166538Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:28.590569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:28.598216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:55:30.049874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679825974875784:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.050035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.050455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679825974875793:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.050550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.504712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:30.652965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679825974875919:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.653072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.653359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679825974875925:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.653403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679825974875924:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.653527Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:30.680232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:30.692751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679825974875928:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:55:30.792308Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679825974875979:2616] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 2026-01-07T17:55:32.160031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679813089973103:2117];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:32.160128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19603, MsgBus: 28411 2026-01-07T17:55:32.872565Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679832851301998:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:32.877136Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:32.886048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:55:32.976966Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:32.978332Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:33.027054Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:33.027139Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:33.030180Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:33.051578Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:33.051598Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:33.051605Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:33.051672Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:33.277188Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:33.519562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:33.527281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:55:33.876800Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:35.698836Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679845736204721:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.698926Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.699228Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592679845736204730:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.699289Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.714723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpC ... ROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7592680123123293608:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:45.742197Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:46.451303Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680148893098245:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.451456Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.451889Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680148893098254:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.451948Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.483265Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:46.562847Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680148893098351:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.562971Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.563522Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680148893098357:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.563583Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7592680148893098356:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.563637Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.568703Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:46.581267Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7592680148893098360:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T17:56:46.685262Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7592680148893098411:2600] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 7403, MsgBus: 20058 2026-01-07T17:56:48.306478Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7592680159236204263:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:48.306585Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:56:48.332646Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:56:48.458588Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:56:48.458719Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:56:48.459835Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:56:48.481410Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:56:48.572637Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:56:48.589533Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:56:48.589565Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:56:48.589579Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:56:48.589692Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:56:49.317160Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:49.479550Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:56:53.306879Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592680159236204263:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:53.306962Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:53.326014Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680180711041606:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.326144Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.326839Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680180711041616:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.326928Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.426571Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:53.529836Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680180711041714:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.529952Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.530387Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680180711041719:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.530483Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680180711041720:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.530936Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:53.535905Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:53.551127Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592680180711041723:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:56:53.608541Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592680180711041774:2603] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true [GOOD] >> VectorIndexBuildTest::Shard_Build_Error >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> KqpExplain::AggGroupLimit [GOOD] >> KqpBatchUpdate::ManyPartitions_3 [GOOD] >> KqpExplain::ComplexJoin |96.6%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> VectorIndexBuildTest::Shard_Build_Error [GOOD] |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 16178, MsgBus: 4125 2026-01-07T17:53:35.385377Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679328735419321:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:35.387721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:35.643657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:35.677414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:35.677525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:35.739012Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:35.740508Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679328735419293:2081] 1767808415382549 != 1767808415382552 2026-01-07T17:53:35.760042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:35.806649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:35.852739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:35.852760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:35.852766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:35.852847Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:36.306451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:36.366646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.420419Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:36.511938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.651255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:36.724130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.710959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679341620323055:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:38.711066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:38.711842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679341620323065:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:38.711902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.013383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.049293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.080422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.114988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.146460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.199025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.239529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.277385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:39.351148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679345915291231:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.351244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.351510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679345915291237:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.351522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679345915291236:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.351580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:39.355263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:39.369857Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679345915291240:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:39.447057Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679345915291293:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:40.385602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679328735419321:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:40.385734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:40.928967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... de 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:56:40.789337Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:56:40.920059Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:56:40.942265Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:56:40.942358Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:56:40.945232Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7592680125509770652:2081] 1767808600759420 != 1767808600759423 2026-01-07T17:56:40.975106Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:56:41.021140Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:56:41.073192Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:56:41.073218Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:56:41.073229Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:56:41.073346Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:56:41.782875Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:56:42.044422Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:56:42.060627Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:42.167698Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:42.454296Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:42.575113Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:45.769084Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7592680125509770682:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:56:45.769185Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:56:46.640336Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680151279576306:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.640473Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.640862Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680151279576315:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.640954Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:46.785594Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:46.841928Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:46.909766Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:46.964914Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:47.038011Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:47.093861Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:47.152124Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:47.231706Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:47.350595Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680155574544490:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:47.350704Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:47.350712Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680155574544495:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:47.350935Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7592680155574544497:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:47.350999Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:56:47.357989Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:56:47.373574Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7592680155574544498:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:56:47.459604Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7592680155574544558:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:56:50.664286Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:56:55.914254Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:56:55.914290Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Shard_Build_Error [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:36.746058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:36.746147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:36.746203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:36.746253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:36.746299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:36.746328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:36.746402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:36.746479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:36.747433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:36.747687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:36.822785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:36.822860Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:36.836791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:36.837120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:36.837305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:36.845715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:36.846052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:36.846681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:36.846951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:36.849217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:36.849388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:36.850521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:36.850573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:36.850688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:36.850730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:36.850833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:36.851022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:36.973135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.974921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.975006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.975082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:36.975153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... r::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046678944 ClientId: [6:1358:3278] ServerId: [6:1362:3281] } 2026-01-07T17:56:59.545805Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-07T17:56:59.546746Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-07T17:56:59.547034Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-07T17:56:59.547464Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-01-07T17:56:59.548026Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:1026:2972], Recipient [6:1037:2981]: NKikimr::TEvTablet::TEvTabletDead 2026-01-07T17:56:59.548637Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409549 2026-01-07T17:56:59.548823Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409549 Forgetting tablet 72075186233409549 2026-01-07T17:56:59.551012Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:56:59.551095Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-07T17:56:59.551222Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-07T17:56:59.553599Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:56:59.553661Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-07T17:56:59.554121Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-07T17:56:59.554295Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552133, Sender [6:1291:3222], Recipient [6:856:2820]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 2026-01-07T17:56:59.554338Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3158: StateWork, processing event TEvDataShard::TEvStateChangedResult 2026-01-07T17:56:59.554370Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186233409547 state Offline 2026-01-07T17:56:59.554480Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-07T17:56:59.554785Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:56:59.555147Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-07T17:56:59.555195Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 Forgetting tablet 72075186233409547 2026-01-07T17:56:59.556323Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-07T17:56:59.556470Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [6:1364:3283], Recipient [6:856:2820]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046678944 ClientId: [6:1364:3283] ServerId: [6:1366:3285] } 2026-01-07T17:56:59.556506Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-07T17:56:59.556589Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552133, Sender [6:1291:3222], Recipient [6:865:2827]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 2026-01-07T17:56:59.556618Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3158: StateWork, processing event TEvDataShard::TEvStateChangedResult 2026-01-07T17:56:59.556652Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186233409548 state Offline Forgetting tablet 72075186233409548 2026-01-07T17:56:59.557031Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:839:2808], Recipient [6:856:2820]: NKikimr::TEvTablet::TEvTabletDead 2026-01-07T17:56:59.557489Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409547 2026-01-07T17:56:59.557613Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409547 2026-01-07T17:56:59.559117Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:849:2815], Recipient [6:865:2827]: NKikimr::TEvTablet::TEvTabletDead 2026-01-07T17:56:59.559564Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409548 2026-01-07T17:56:59.559686Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409548 2026-01-07T17:56:59.561253Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-07T17:56:59.561547Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:56:59.564289Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-07T17:56:59.564389Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-07T17:56:59.567044Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2026-01-07T17:56:59.567203Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:56:59.567267Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:56:59.567386Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-07T17:56:59.567441Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:56:59.567474Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-07T17:56:59.567499Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-07T17:56:59.567528Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-07T17:56:59.567695Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-07T17:56:59.567730Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409547 2026-01-07T17:56:59.567836Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:56:59.567880Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409548 2026-01-07T17:56:59.569981Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:56:59.601545Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-07T17:56:59.601962Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "One of the shards report BUILD_ERROR
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n at Filling stage, process has to be canceled, shardId: 72075186233409546, shardIdx: 72057594046678944:1" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 357 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "One of the shards report BUILD_ERROR
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n at Filling stage, process has to be canceled, shardId: 72075186233409546, shardIdx: 72057594046678944:1" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 357 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:113:2144] 2026-01-07T17:56:08.983770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:56:08.983921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:56:08.983981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:56:08.984019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:56:08.984064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:56:08.984102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:56:08.984180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:56:08.984268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:56:08.985220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:56:08.988303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:56:09.144407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:56:09.144499Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:56:09.145450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:56:09.180796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:56:09.181326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:56:09.181543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:56:09.196916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:56:09.197465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:56:09.199272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:56:09.202936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:56:09.220896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:56:09.222269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:56:09.236707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:56:09.236834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:56:09.237030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:56:09.237084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:56:09.237139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:56:09.237358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:56:09.432750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.435966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.436030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.436092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.436184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... RD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-07T17:57:00.036647Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2026-01-07T17:57:00.036816Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-07T17:57:00.036867Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-07T17:57:00.036948Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-07T17:57:00.037011Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-07T17:57:00.037061Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2026-01-07T17:57:00.037168Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:1379:3204] message: TxId: 281474976710757 2026-01-07T17:57:00.037240Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-07T17:57:00.037301Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2026-01-07T17:57:00.037347Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:0 2026-01-07T17:57:00.037529Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-07T17:57:00.037585Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2026-01-07T17:57:00.037616Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:1 2026-01-07T17:57:00.037660Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:57:00.037692Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2026-01-07T17:57:00.037719Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:2 2026-01-07T17:57:00.037790Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-07T17:57:00.038192Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-07T17:57:00.038419Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-07T17:57:00.038611Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-07T17:57:00.038685Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-07T17:57:00.038784Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:57:00.038862Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-07T17:57:00.038911Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-07T17:57:00.039330Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-07T17:57:00.039458Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-07T17:57:00.039516Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-07T17:57:00.043911Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-07T17:57:00.044072Z node 16 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2026-01-07T17:57:00.737450Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:57:00.737789Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 376us result status StatusPathDoesNotExist 2026-01-07T17:57:00.737989Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:57:00.738771Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:57:00.739052Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 308us result status StatusPathDoesNotExist 2026-01-07T17:57:00.739226Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T17:57:00.739888Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:57:00.740179Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 301us result status StatusSuccess 2026-01-07T17:57:00.740684Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: 2026-01-07T17:54:58.655932Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-01-07T17:54:58.740810Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:54:58.740887Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:54:58.740976Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:58.741039Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-07T17:54:58.764531Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:58.788514Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "aaa" Generation: 1 Important: true } 2026-01-07T17:54:58.789816Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-07T17:54:58.792574Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-07T17:54:58.796309Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-01-07T17:54:58.798321Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-01-07T17:54:58.810451Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:54:58.811010Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a8278fdf-d3733810-8bf9abf2-a80cc8fe_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:182:2194] 2026-01-07T17:54:59.275117Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-01-07T17:54:59.329271Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:54:59.329330Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:54:59.329388Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:59.329452Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:181:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:111:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:183:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:186:2057] recipient: [2:185:2195] Leader for TabletID 72057594037927937 is [2:187:2196] sender: [2:188:2057] recipient: [2:185:2195] 2026-01-07T17:54:59.371176Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:54:59.371234Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:54:59.371286Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:54:59.371354Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:111:2142]) rebooted! !Reboot 72057594037927937 (actor [2:111:2142]) tablet resolver refreshed! new actor is[2:187:2196] 2026-01-07T17:54:59.392532Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.454219Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.485240Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.495753Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.537655Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.589943Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.621486Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.758279Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:54:59.769043Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:00.083010Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:00.104433Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:00.476155Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:00.723287Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:00.840707Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:187:2196] sender: [2:268:2057] recipient: [2:14:2061] 2026-01-07T17:55:00.974665Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:00.975665Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } Consumers { Name: "aaa" Generation: 2 Important: true } 2026-01-07T17:55:00.976998Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:274:2196] 2026-01-07T17:55:00.979936Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:274:2196] 2026-01-07T17:55:00.983528Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:275:2196] 2026-01-07T17:55:00.985578Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:275:2196] 2026-01-07T17:55:01.000585Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:01.001173Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|77db8cc9-52c48d29-a4c9b0a8-e0f1c789_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [2:179:2192] 2026-01-07T17:55:01.300892Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:107:2057] recipient: [3:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:107:2057] recipient: [3:105:2138] Leader for TabletID 72057594037927937 is [3:111:2142] sender: [3:112:2057] recipient: [3:105:2138] 2026-01-07T17:55:01.344702Z node 3 :PQ_TX IN ... 2433Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:781:2555] 2026-01-07T17:57:04.034867Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:782:2555] 2026-01-07T17:57:04.041090Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.041643Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 8 [69:782:2555] 2026-01-07T17:57:04.065814Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.067116Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 8 [69:781:2555] 2026-01-07T17:57:04.091205Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:181:2194] 2026-01-07T17:57:04.130791Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:722:2555] sender: [69:814:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:722:2555] sender: [69:817:2057] recipient: [69:103:2137] Leader for TabletID 72057594037927937 is [69:722:2555] sender: [69:820:2057] recipient: [69:819:2621] Leader for TabletID 72057594037927937 is [69:821:2622] sender: [69:822:2057] recipient: [69:819:2621] 2026-01-07T17:57:04.171535Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:57:04.171600Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:57:04.172363Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:04.172415Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:57:04.173121Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:882:2622] 2026-01-07T17:57:04.175635Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:883:2622] 2026-01-07T17:57:04.185818Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.192880Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [69:883:2622] 2026-01-07T17:57:04.203808Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.213794Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [69:882:2622] 2026-01-07T17:57:04.238212Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:181:2194] 2026-01-07T17:57:04.277240Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:821:2622] sender: [69:917:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:821:2622] sender: [69:920:2057] recipient: [69:103:2137] Leader for TabletID 72057594037927937 is [69:821:2622] sender: [69:923:2057] recipient: [69:922:2692] Leader for TabletID 72057594037927937 is [69:924:2693] sender: [69:925:2057] recipient: [69:922:2692] 2026-01-07T17:57:04.322917Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:57:04.322982Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:57:04.323721Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:04.323769Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:57:04.324466Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:987:2693] 2026-01-07T17:57:04.326944Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:988:2693] 2026-01-07T17:57:04.336868Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.337770Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [69:988:2693] 2026-01-07T17:57:04.355075Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.356180Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [69:987:2693] 2026-01-07T17:57:04.394059Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:181:2194] 2026-01-07T17:57:04.429310Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:924:2693] sender: [69:1020:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:924:2693] sender: [69:1023:2057] recipient: [69:103:2137] Leader for TabletID 72057594037927937 is [69:924:2693] sender: [69:1026:2057] recipient: [69:1025:2763] Leader for TabletID 72057594037927937 is [69:1027:2764] sender: [69:1028:2057] recipient: [69:1025:2763] 2026-01-07T17:57:04.482491Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:57:04.482552Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:57:04.483375Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:04.483416Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:57:04.484099Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:1092:2764] 2026-01-07T17:57:04.486418Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:1093:2764] 2026-01-07T17:57:04.496248Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.504458Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [69:1093:2764] 2026-01-07T17:57:04.514855Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.515889Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [69:1092:2764] 2026-01-07T17:57:04.545252Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:181:2194] 2026-01-07T17:57:04.586707Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:1027:2764] sender: [69:1127:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:1027:2764] sender: [69:1130:2057] recipient: [69:103:2137] Leader for TabletID 72057594037927937 is [69:1027:2764] sender: [69:1133:2057] recipient: [69:1132:2838] Leader for TabletID 72057594037927937 is [69:1134:2839] sender: [69:1135:2057] recipient: [69:1132:2838] 2026-01-07T17:57:04.626076Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:57:04.626123Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:57:04.626722Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:04.626754Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:57:04.627344Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:1201:2839] 2026-01-07T17:57:04.629338Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:1202:2839] 2026-01-07T17:57:04.636212Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.637007Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [69:1202:2839] 2026-01-07T17:57:04.665991Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:04.666953Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [69:1201:2839] 2026-01-07T17:57:04.694400Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> KqpExplain::ComplexJoin [GOOD] >> KqpExplain::CompoundKeyRange >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> KqpBatchDelete::ManyPartitions_1 [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> YdbSdkSessions::MultiThreadSync >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 1348, MsgBus: 19911 2026-01-07T17:53:26.999131Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679291132423804:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.999205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.293087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.338990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.339652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.397174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.499899Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.508297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.745527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.745568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.745577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.745657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.017117Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.412762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.421721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.513335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.689946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.866877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.949123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.252908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679308312294826:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.253024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.253342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679308312294836:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.253382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.777531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.817518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.853950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.889069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.920611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.958095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.999670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.045697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.139110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679312607263001:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.139170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.139294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679312607263006:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.139302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679312607263008:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.139330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.142716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.156507Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679312607263010:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:31.235825Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679312607263061:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:31.999559Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679291132423804:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:31.999638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:32.939311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... 6Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7592680210238117146:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:57:00.383168Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:57:00.425056Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:57:00.612802Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:57:00.612969Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:57:00.613761Z node 20 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [20:7592680210238117120:2081] 1767808620379887 != 1767808620379890 2026-01-07T17:57:00.628903Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:57:00.649193Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:57:00.703909Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:57:00.737592Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:57:00.737624Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:57:00.737642Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:57:00.737803Z node 20 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:57:01.401551Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:57:01.866333Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:57:01.883284Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:01.988498Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:02.265328Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:02.477192Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:05.383301Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7592680210238117146:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:57:05.383424Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:57:07.315447Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680240302890089:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:07.315607Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:07.316224Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680240302890098:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:07.316302Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:07.512076Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.569595Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.630206Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.686158Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.746786Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.811899Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.872311Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.964237Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:08.107867Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680244597858286:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:08.108048Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:08.108519Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680244597858291:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:08.108548Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680244597858292:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:08.108604Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:08.114927Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:57:08.131972Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7592680244597858295:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:57:08.222021Z node 20 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [20:7592680244597858346:3791] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:57:11.185463Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [GOOD] Test command err: Trying to start YDB, gRPC: 7728, MsgBus: 3300 ... waiting for SysViewsRoster update finished 2026-01-07T17:49:57.102380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:49:57.218157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:49:57.238127Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:49:57.238647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:49:57.238706Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:49:57.544159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:49:57.544317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:49:57.631127Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808194147523 != 1767808194147527 2026-01-07T17:49:57.646696Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:49:57.700957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:49:57.830671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:49:58.221504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:49:58.221566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:49:58.221613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:49:58.226499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:49:58.240217Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:49:58.583222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:49:58.591490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:49:58.622060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:912:2777], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.622203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.622286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.623386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.623521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:49:58.628088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:49:58.715213Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:926:2785], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T17:49:58.787699Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:983:2823] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:49:59.080154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-07T17:49:59.281172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:49:59.281390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:49:59.281639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:49:59.281764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:49:59.281892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:49:59.282032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:49:59.282183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:49:59.282294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:49:59.282415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:49:59.282527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:49:59.282600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:49:59.282744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:49:59.282874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1108:2908];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:49:59.316842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-07T17:49:59.316953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-07T17:49:59.317098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-07T17:49:59.317153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-07T17:49:59.317354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-07T17:49:59.317397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-07T17:49:59.317508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-07T17:49:59.317566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-07T17:49:59.317623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-0 ... p/server/server_builder.cc:396:5 #10 0x0000212eba79 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #11 0x00003ff53625 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #12 0x00003ff579f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #13 0x00004e62b368 in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #14 0x00001b4f9b79 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #15 0x00001b4f9b79 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #16 0x00001b659ef6 in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:509:20 #17 0x00001b6209f7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #18 0x00001b6209f7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #19 0x00001b6209f7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #20 0x00001b6209f7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #21 0x00001b6209f7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #22 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #23 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #24 0x00001c064559 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #25 0x00001c0325c7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #26 0x00001b61fd4c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #27 0x00001c033d7f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #28 0x00001c05e3bc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #29 0x7fd6bc3ebd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b87bddd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021ce3f29 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021ce3f29 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021ce3f29 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021ce3f29 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021ce3f29 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:354:25 #6 0x000021ce3f29 in __push_back_slow_path /-S/contrib/libs/cxxsupp/libcxx/include/vector:1541:47 #7 0x000021ce3f29 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/vector:1569:13 #8 0x000021ce3f29 in grpc_core::Server::Start() /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:702:17 #9 0x000021cf304e in grpc_server_start /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1715:37 #10 0x000021d177f7 in grpc::Server::Start(grpc::ServerCompletionQueue**, unsigned long) /-S/contrib/libs/grpc/src/cpp/server/server_cc.cc:1214:3 #11 0x0000212fc4c4 in grpc::ServerBuilder::BuildAndStart() /-S/contrib/libs/grpc/src/cpp/server/server_builder.cc:445:11 #12 0x0000212eba79 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #13 0x00003ff53625 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #14 0x00003ff579f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #15 0x00004e62b368 in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #16 0x00001b4f9b79 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #17 0x00001b4f9b79 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #18 0x00001b659ef6 in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:509:20 #19 0x00001b6209f7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #20 0x00001b6209f7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #21 0x00001b6209f7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #22 0x00001b6209f7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #23 0x00001b6209f7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #24 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #25 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #26 0x00001c064559 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #27 0x00001c0325c7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #28 0x00001b61fd4c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #29 0x00001c033d7f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #30 0x00001c05e3bc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #31 0x7fd6bc3ebd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b87bddd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021368121 in grpc_core::internal::StatusAllocHeapPtr(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/gprpp/status_helper.cc:427:25 #2 0x00002149d202 in grpc_core::CallCombiner::Cancel(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/call_combiner.cc:233:25 #3 0x0000214439fe in grpc_core::FilterStackCall::CancelWithError(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1037:18 #4 0x00002143f11c in grpc_core::Call::CancelWithStatus(grpc_status_code, char const*) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:366:3 #5 0x000021461223 in grpc_call_cancel_with_status /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:3499:30 #6 0x000021d40d46 in grpc::ServerContextBase::TryCancel() const /-S/contrib/libs/grpc/src/cpp/server/server_context.cc:347:7 #7 0x0000212e86dc in NYdbGrpc::TGrpcServiceProtectiable::StopService() /-S/ydb/library/grpc/server/grpc_server.cpp:64:26 #8 0x0000212eeeee in NYdbGrpc::TGRpcServer::Stop() /-S/ydb/library/grpc/server/grpc_server.cpp:278:18 #9 0x00001b39dfe6 in Shutdown /-S/ydb/core/testlib/test_client.h:443:33 #10 0x00001b39dfe6 in ShutdownGRpc /-S/ydb/core/testlib/test_client.h:395:30 #11 0x00001b39dfe6 in NKikimr::NKqp::TKikimrRunner::~TKikimrRunner() /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:178:17 #12 0x00001b659f3e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:70:5 #13 0x00001b659f3e in reset /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:286:7 #14 0x00001b659f3e in ~unique_ptr /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:255:71 #15 0x00001b659f3e in ~TTableDataModificationTester /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:15:7 #16 0x00001b659f3e in ~TIsolationSetting /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:391:11 #17 0x00001b659f3e in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:510:9 #18 0x00001b6209f7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #19 0x00001b6209f7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #20 0x00001b6209f7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #21 0x00001b6209f7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #22 0x00001b6209f7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #23 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #24 0x00001c064559 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #25 0x00001c064559 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #26 0x00001c0325c7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #27 0x00001b61fd4c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #28 0x00001c033d7f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #29 0x00001c05e3bc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #30 0x7fd6bc3ebd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) SUMMARY: AddressSanitizer: 429042 byte(s) leaked in 5259 allocation(s). |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:248: Test is failing right now |96.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] >> KqpExplain::CompoundKeyRange [GOOD] >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:548: Enable after accepting a pull request with merging configs |96.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:113:2144] 2026-01-07T17:56:08.983779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:56:08.983899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:56:08.983950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:56:08.984000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:56:08.984041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:56:08.984077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:56:08.984151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:56:08.984227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:56:08.985179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:56:08.988340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:56:09.141905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-07T17:56:09.142001Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:56:09.142975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:56:09.177639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:56:09.178111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:56:09.180456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:56:09.197059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:56:09.197586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:56:09.199359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:56:09.204005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:56:09.228541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:56:09.228838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:56:09.236707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:56:09.236827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:56:09.237017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:56:09.237078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:56:09.237209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:56:09.237415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:56:09.437225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:56:09.438956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... 83647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "3_continuousBackupImpl" TopicPath: "/MyRoot/Table/3_continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409555 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409556 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 50 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:57:18.796349Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:57:18.796653Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl1" took 354us result status StatusSuccess 2026-01-07T17:57:18.797256Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl1" PathDescription { Self { Name: "IncrBackupImpl1" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:57:18.798514Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:57:18.798765Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl2" took 272us result status StatusSuccess 2026-01-07T17:57:18.799291Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl2" PathDescription { Self { Name: "IncrBackupImpl2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000042 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:57:18.800476Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:57:18.801288Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl3" took 266us result status StatusSuccess 2026-01-07T17:57:18.801806Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl3" PathDescription { Self { Name: "IncrBackupImpl3" PathId: 49 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 49 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange [GOOD] Test command err: 2026-01-07T17:54:39.438864Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679606806142831:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:39.442349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:39.491840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:39.697097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:39.764114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:39.764212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:39.808691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:39.842264Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679606806142800:2082] 1767808479433271 != 1767808479433274 2026-01-07T17:54:39.849487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:40.009065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:40.015671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:40.015694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:40.015704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:40.015796Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:40.059170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:40.297628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:54:40.337085Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.011302s 2026-01-07T17:54:40.355556Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679609043698727:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:40.355625Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:40.382402Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:54:40.403638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:40.403712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:40.404670Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:54:40.412130Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:54:40.414913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:40.466667Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:40.467311Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.468339Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.468411Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.468486Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.468528Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.468574Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.468630Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.468672Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.468726Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:54:40.507622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:40.507729Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:40.511269Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:40.591913Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:54:40.591968Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:54:40.599121Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:40.607551Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:54:40.607596Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:54:40.607774Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:54:40.607803Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:54:40.607821Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:54:40.607852Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:54:40.607886Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:54:40.607920Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:54:40.609029Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:54:40.610053Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7592679609043699183:2277] 2026-01-07T17:54:40.614512Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:54:40.635551Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:7592679609043699266:2305] Owner: [2:7592679609043699257:2304]. Describe result: PathErrorUnknown 2026-01-07T17:54:40.635578Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:7592679609043699266:2305] Owner: [2:7592679609043699257:2304]. Creating table 2026-01-07T17:54:40.635645Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7592679609043699266:2305] Owner: [2:7592679609043699257:2304]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:54:40.641436Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:54:40.641487Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:7592679609043699304:2307], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:54:40.641978Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7592679609043699306:2365], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:54:40.644072Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:54:40.655790Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7592679609043699290:2358] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-07T17:54:40.662234Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7592679609043699266:2305] Owner: [2:7592679609043699257:2304]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-07T17:54:40.674127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7592679609043699346:2382] 2026-01-07T17:54:40.684266Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7592679609043699346:2382], schemeshard id = 72075186224037897 2026-01-07T17:54:40.740976Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7592679609043699266:2305] Owner: [2:7592679609043699257:2304]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:54:40.742159Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7592679609043699552:2574], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:54:40.750080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:40.754057Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. Sel ... 0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:57:09.857663Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:57:09.857701Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:57:09.857721Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:57:09.857819Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:57:09.924512Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:57:10.384079Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:57:10.393029Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:57:10.407399Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:10.491989Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:10.643227Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:57:10.722614Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:10.825229Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:14.594941Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7592680249255537650:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:57:14.595057Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:57:14.607748Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592680270730375963:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:14.607866Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:14.608388Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592680270730375974:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:14.608440Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:14.762404Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:14.843639Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:14.890707Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:14.988512Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:15.049480Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:15.112268Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:15.175572Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:15.246863Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:15.369857Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592680275025344147:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:15.369979Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:15.370007Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592680275025344152:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:15.370173Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7592680275025344154:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:15.370229Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:15.376068Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:57:15.390496Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7592680275025344155:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:57:15.471943Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7592680275025344209:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Scan":"Parallel","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"lookup_by":["App (new_app_1)","Ts (49)"],"columns":["App","Host","Message","Ts"],"scan_by":["Host (null, xyz)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] |96.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |96.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchDelete::SimplePartitions [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |96.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 17742, MsgBus: 30022 2026-01-07T17:53:29.239466Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679305819904200:2232];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:29.239557Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:29.434752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:29.459967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:29.460057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:29.500481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:29.542568Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:29.543358Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679305819904005:2081] 1767808409230492 != 1767808409230495 2026-01-07T17:53:29.616572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:29.616683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:29.616690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:29.616759Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:29.671220Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:29.985383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:30.035431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.183919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.245265Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:30.335323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.401238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.247041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318704807772:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.247440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.248095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318704807782:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.248184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.582927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.617359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.644364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.679452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.712863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.746577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.801906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.841033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:32.926535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318704808658:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.926604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.926702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318704808663:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.926994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679318704808665:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.927022Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:32.930244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:32.940907Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679318704808667:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:33.033397Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679322999776014:3779] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:34.239182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679305819904200:2232];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:34.240492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31333, MsgBus: 21799 2026-01-07T17:53:40.919824Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592679351659061713:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:40.920013Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_s ... e 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 8338, MsgBus: 17800 2026-01-07T17:57:06.205790Z node 16 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7592680236603718319:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:57:06.205844Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:57:06.229180Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:57:06.300107Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:57:06.301464Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:7592680236603718291:2081] 1767808626204838 != 1767808626204841 2026-01-07T17:57:06.351101Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:57:06.351182Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:57:06.355451Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:57:06.411335Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:57:06.411364Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:57:06.411373Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:57:06.411497Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:57:06.467300Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:57:06.911415Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:57:06.935095Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.012229Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.182425Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:07.231443Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:57:07.257868Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:11.209056Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7592680236603718319:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:57:11.209142Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:57:11.724489Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680258078556648:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:11.724593Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:11.724962Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680258078556657:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:11.725032Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:11.835453Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:11.887456Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:11.935755Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:12.002205Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:12.049217Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:12.096111Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:12.141093Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:12.199318Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:12.322705Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680262373524840:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:12.322814Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:12.323209Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680262373524845:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:12.323276Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680262373524846:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:12.323319Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:12.327806Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:57:12.340655Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7592680262373524849:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:57:12.435896Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7592680262373524910:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:57:21.294736Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:57:21.294766Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] |96.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |96.7%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |96.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestPQSmallRead >> KqpPg::CheckPgAutoParams-useSink [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27246, MsgBus: 27370 2026-01-07T17:55:29.376274Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679819130840614:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:29.376574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:29.593938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:29.599140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:29.599261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:29.682415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:29.719000Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:29.721097Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679819130840572:2081] 1767808529374740 != 1767808529374743 2026-01-07T17:55:29.789309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:29.789341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:29.789352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:29.789567Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:29.828912Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:30.175247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:30.384490Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:32.263573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:32.390816Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2026-01-07T17:55:32.442909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:32.488896Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2026-01-07T17:55:32.538003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:32.577217Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2026-01-07T17:55:32.619776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:32.659543Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2026-01-07T17:55:32.701202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:32.744766Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2026-01-07T17:55:32.783601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) f f t t 2026-01-07T17:55:32.871947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:32.930333Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2026-01-07T17:55:32.991384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {f,f} {f,f} {t,t} {t,t} 2026-01-07T17:55:33.103304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:33.189647Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-07T17:55:33.247154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:33.287335Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-07T17:55:33.335082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:33.386991Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2026-01-07T17:55:33.438906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:33.504602Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2026-01-07T17:55:33.545993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:33.592207Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-07T17:55:33.641566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-07T17:55:33.742654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:55:33.806523Z node 1 :READ_TABLE_API WARN: rpc_read_tab ... RROR: schemereq.cpp:624: Actor# [14:7592680309331463913:2543] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:57:23.180420Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:23.560159Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:24.296891Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:504: Get parsing result with error, self: [14:7592680313626431541:2394], owner: [14:7592680309331463807:2311], statement id: 0 2026-01-07T17:57:24.297898Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=YTZhMzQ1OWItMmJhMjM5OTctOWZkMTc3YjctMzA2YWM4ZDg=, ActorId: [14:7592680313626431539:2393], ActorState: ExecuteState, LegacyTraceId: 01kecspd625wg0wnv8tdp17pr1, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Error while parsing query." severity: 1 issues { position { row: 1 column: 1 } message: "alternative is not implemented yet : 34" end_position { row: 1 column: 1 } severity: 1 } } tx_id# trace_id# 2026-01-07T17:57:24.692432Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7592680313626431566:2404], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:56: Error: At function: PgOp
:2:56: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2026-01-07T17:57:24.696902Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=OTQ2MGJkMTMtN2MyMTlkMTItM2QzNTk3OGItOWVmNGU3ZQ==, ActorId: [14:7592680313626431563:2402], ActorState: ExecuteState, LegacyTraceId: 01kecspdf40t2pqvw7ypsxnqy7, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple" end_position { row: 1 column: 1 } severity: 1 issues { message: "At tuple" severity: 1 issues { position { row: 1 column: 1 } message: "At function: PgWhere, At lambda" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 2 column: 56 } message: "At function: PgOp" end_position { row: 2 column: 56 } severity: 1 issues { position { row: 2 column: 56 } message: "Unable to find an overload for operator = with given argument type(s): (text,int4)" end_position { row: 2 column: 56 } severity: 1 } } } } } } tx_id# trace_id# 2026-01-07T17:57:24.755943Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7592680313626431578:2410], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:58: Error: At function: PgAnd
:2:68: Error: At function: PgOp
:2:68: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2026-01-07T17:57:24.757708Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=MmI5YWM3NGYtYTA4NDU0OTgtODJiNWRjOWUtMWMwODRlOWM=, ActorId: [14:7592680313626431575:2408], ActorState: ExecuteState, LegacyTraceId: 01kecspdkd19ep82tk55ypbvw0, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple" end_position { row: 1 column: 1 } severity: 1 issues { message: "At tuple" severity: 1 issues { position { row: 1 column: 1 } message: "At function: PgWhere, At lambda" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 2 column: 58 } message: "At function: PgAnd" end_position { row: 2 column: 58 } severity: 1 issues { position { row: 2 column: 68 } message: "At function: PgOp" end_position { row: 2 column: 68 } severity: 1 issues { position { row: 2 column: 68 } message: "Unable to find an overload for operator = with given argument type(s): (text,int4)" end_position { row: 2 column: 68 } severity: 1 } } } } } } } tx_id# trace_id# 2026-01-07T17:57:24.787486Z node 14 :KQP_EXECUTER CRIT: {KQPLIT@kqp_literal_executer.cpp:118} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecspdn9cdf2ga1yg8101dwy, Database: /Root, SessionId: ydb://session/3?node_id=14&id=ZjA1ZjY3MDItZWM4ZmI0NWYtY2E2MjIwNS1iMjE4NDhlYg==, PoolId: default, IsStreamingQuery: 0}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" trace_id# 2026-01-07T17:57:24.787909Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=ZjA1ZjY3MDItZWM4ZmI0NWYtY2E2MjIwNS1iMjE4NDhlYg==, ActorId: [14:7592680313626431587:2414], ActorState: ExecuteState, LegacyTraceId: 01kecspdn9cdf2ga1yg8101dwy, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "(NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: \"a\"\n\n" issue_code: 2029 severity: 1 } trace_id# 2026-01-07T17:57:24.852858Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:24.973399Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:25.070241Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7592680317921399053:2440], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2026-01-07T17:57:25.072645Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=MmNjZTllZTgtZTgwYjQ3NjUtNDMxODc4NzgtYmI1NGE5MDk=, ActorId: [14:7592680317921399048:2437], ActorState: ExecuteState, LegacyTraceId: 01kecspdxg26zb0kn1md2r7g7p, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "values have 3 columns, INSERT INTO expects: 2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-07T17:57:25.120893Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7592680317921399065:2446], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2026-01-07T17:57:25.121520Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=OWJjN2ZhMDUtY2E4MGE1ZGQtNzg3NDI4ZS1kZjA2OTRkYQ==, ActorId: [14:7592680317921399062:2444], ActorState: ExecuteState, LegacyTraceId: 01kecspdz25m5ranq3n9qkg34q, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to convert type: List> to List>" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to convert \'id\': pgunknown to Optional" end_position { row: 1 column: 1 } severity: 1 } } issues { position { row: 1 column: 1 } message: "Row type mismatch for table: db.[/Root/PgTable2]" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-07T17:57:25.612172Z node 14 :KQP_EXECUTER CRIT: {KQPLIT@kqp_literal_executer.cpp:118} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kecspe0ge7c3mdmbfwps98nw, Database: /Root, SessionId: ydb://session/3?node_id=14&id=Yjg4MTQ2OGQtODEwZmQ5YzgtY2UyOTQ1OTEtMmFkMzlkNjY=, PoolId: default, IsStreamingQuery: 0}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" trace_id# 2026-01-07T17:57:25.613503Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=Yjg4MTQ2OGQtODEwZmQ5YzgtY2UyOTQ1OTEtMmFkMzlkNjY=, ActorId: [14:7592680317921399074:2450], ActorState: ExecuteState, LegacyTraceId: 01kecspe0ge7c3mdmbfwps98nw, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "(NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: \"a\"\n\n" issue_code: 2029 severity: 1 } trace_id# 2026-01-07T17:57:25.673805Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:26.356136Z node 14 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2026-01-07T17:57:26.403943Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23 folder_id=folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23 iam_token=usr_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23 cloud_account=acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23 2026-01-07T17:56:37.913667Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23]","tx_id":"281474976720685","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.006664Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v]","tx_id":"281474976720691","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.070075Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2]","tx_id":"281474976720692","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.111887Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Data]","tx_id":"281474976720694","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.112217Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/SentTimestampIdx]","tx_id":"281474976720700","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.114074Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/State]","tx_id":"281474976720699","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.114317Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Attributes]","tx_id":"281474976720693","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.114530Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Groups]","tx_id":"281474976720696","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.114709Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Deduplication]","tx_id":"281474976720695","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.114889Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Reads]","tx_id":"281474976720697","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.115050Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Messages]","tx_id":"281474976720698","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:38.539775Z: {"request_id":"a3a55180-20b7d401-5b521d2e-ac5318d5","cloud_id":"CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23"} ======================================== 2026-01-07T17:56:38.676428Z: {"request_id":"a3a55180-20b7d401-5b521d2e-ac5318d5","permission":"ymq.queues.create","id":"16237417115096618774$CreateMessageQueue$2026-01-07T17:56:38.676206Z","idempotency_id":"16237417115096618774$CreateMessageQueue$2026-01-07T17:56:37.959000Z","cloud_id":"CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:37.959000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3663a678-ebf2-11f0-9806-d00d1b756f23.fifo","resource_id":"0000000000000001011v","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:56:40.347860Z: {"request_id":"7d56351a-bf1078b-13d4eaee-af873571","permission":"ymq.queues.setAttributes","id":"15533547790310823094$UpdateMessageQueue$2026-01-07T17:56:40.347663Z","idempotency_id":"15533547790310823094$UpdateMessageQueue$2026-01-07T17:56:39.627000Z","cloud_id":"CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:39.627000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3663a678-ebf2-11f0-9806-d00d1b756f23.fifo","resource_id":"0000000000000001011v","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:56:40.904001Z: {"request_id":"74ca16d0-41fa0234-31e092fa-2dd8caf6","permission":"ymq.queues.setAttributes","id":"3362622040654370299$UpdateMessageQueue$2026-01-07T17:56:40.903859Z","idempotency_id":"3362622040654370299$UpdateMessageQueue$2026-01-07T17:56:40.695000Z","cloud_id":"CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:40.695000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3663a678-ebf2-11f0-9806-d00d1b756f23.fifo","resource_id":"0000000000000001011v","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:56:41.827059Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/SentTimestampIdx]","tx_id":"281474976720716","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:41.895270Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/State]","tx_id":"281474976720717","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:41.965912Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Reads]","tx_id":"281474976720718","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:41.995847Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Messages]","tx_id":"281474976720719","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:42.029638Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Groups]","tx_id":"281474976720720","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:42.070299Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Deduplication]","tx_id":"281474976720721","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:42.112410Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Data]","tx_id":"281474976720722","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:42.213020Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2/Attributes]","tx_id":"281474976720723","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:42.280628Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v/v2]","tx_id":"281474976720724","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:42.306017Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_368537e8-ebf2-11f0-bb0a-d00d1b756f23/0000000000000001011v]","tx_id":"281474976720727","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ================================ ... "StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.416911Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/3/Infly]","tx_id":"281474976720792","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.474742Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/3/Messages]","tx_id":"281474976720793","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.523586Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/3/MessageData]","tx_id":"281474976720794","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.613230Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/2/SentTimestampIdx]","tx_id":"281474976720795","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.685639Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/2/Infly]","tx_id":"281474976720796","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.726800Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/2/Messages]","tx_id":"281474976720797","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.765059Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/2/MessageData]","tx_id":"281474976720798","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.827242Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/1/SentTimestampIdx]","tx_id":"281474976720799","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.875305Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/1/Infly]","tx_id":"281474976720800","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.910237Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/1/Messages]","tx_id":"281474976720801","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:58.912295Z: {"request_id":"1717e638-515026c-922dbf30-a9e5e163","permission":"ymq.queues.setAttributes","id":"14848542452710240390$UpdateMessageQueue$2026-01-07T17:56:58.912061Z","idempotency_id":"14848542452710240390$UpdateMessageQueue$2026-01-07T17:56:57.255000Z","cloud_id":"CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:57.255000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_403d0589-ebf2-11f0-a7c3-d00d1b756f23","resource_id":"0000000000000003004h","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:56:58.912668Z: {"request_id":"68d0ead9-71e45752-74b6ded9-c8a3ec1e","permission":"ymq.queues.delete","id":"2304143481365748766$DeleteMessageQueue$2026-01-07T17:56:58.912147Z","idempotency_id":"2304143481365748766$DeleteMessageQueue$2026-01-07T17:56:58.306000Z","cloud_id":"CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:58.306000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_403d0589-ebf2-11f0-a7c3-d00d1b756f23","resource_id":"0000000000000003004h","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:56:58.960213Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/1/MessageData]","tx_id":"281474976720804","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.046604Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/0/SentTimestampIdx]","tx_id":"281474976720805","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.094561Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/0/Infly]","tx_id":"281474976720806","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.141828Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/0/Messages]","tx_id":"281474976720807","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.190140Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/0/MessageData]","tx_id":"281474976720808","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.256578Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/State]","tx_id":"281474976720809","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.332177Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/Attributes]","tx_id":"281474976720810","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.383746Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/3]","tx_id":"281474976720811","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.434256Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/2]","tx_id":"281474976720814","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.490346Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/1]","tx_id":"281474976720815","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.517251Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4/0]","tx_id":"281474976720816","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.542063Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h/v4]","tx_id":"281474976720817","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.572077Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23/0000000000000003004h]","tx_id":"281474976720818","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:59.595097Z: {"request_id":"68d0ead9-71e45752-74b6ded9-c8a3ec1e","cloud_id":"CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23","subject":"fake_user_sid@as","queue":"0000000000000003004h","resource_id":"0000000000000003004h","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_404acc8e-ebf2-11f0-9b2b-d00d1b756f23"} ======================================== ======================================== |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> IndexBuildTest::CancellationNotEnoughRetriesUniq [GOOD] >> IndexBuildTest::CancellationNoTable |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23 folder_id=folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23 iam_token=usr_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23 cloud_account=acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23 2026-01-07T17:56:47.594755Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23]","tx_id":"281474976720693","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:47.676021Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23/000000000000000103qf]","tx_id":"281474976720699","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:47.703156Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23/000000000000000103qf/v2]","tx_id":"281474976720700","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:47.784877Z: {"request_id":"e117ed0b-b4494bd8-18caa1e9-a58a400","cloud_id":"CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23"} ======================================== 2026-01-07T17:56:47.981449Z: {"request_id":"e117ed0b-b4494bd8-18caa1e9-a58a400","permission":"ymq.queues.create","id":"11814136264861131001$CreateMessageQueue$2026-01-07T17:56:47.981221Z","idempotency_id":"11814136264861131001$CreateMessageQueue$2026-01-07T17:56:47.631000Z","cloud_id":"CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:47.631000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3c385d37-ebf2-11f0-b7f6-d00d1b756f23.fifo","resource_id":"000000000000000103qf","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:56:49.572282Z: {"request_id":"31991500-bb7264c7-5a36d54f-8c9e6f50","permission":"ymq.queues.setAttributes","id":"17634766977298860101$UpdateMessageQueue$2026-01-07T17:56:49.572131Z","idempotency_id":"17634766977298860101$UpdateMessageQueue$2026-01-07T17:56:48.868000Z","cloud_id":"CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:48.868000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3c385d37-ebf2-11f0-b7f6-d00d1b756f23.fifo","resource_id":"000000000000000103qf","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:56:50.154629Z: {"request_id":"20764ef8-210deb07-ea80e97d-c3399af8","permission":"ymq.queues.setAttributes","id":"10122832723957817793$UpdateMessageQueue$2026-01-07T17:56:50.154470Z","idempotency_id":"10122832723957817793$UpdateMessageQueue$2026-01-07T17:56:49.932000Z","cloud_id":"CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:49.932000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3c385d37-ebf2-11f0-b7f6-d00d1b756f23.fifo","resource_id":"000000000000000103qf","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:56:51.046377Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23/000000000000000103qf/v2]","tx_id":"281474976720715","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:51.062450Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23/000000000000000103qf]","tx_id":"281474976720716","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:56:51.075011Z: {"request_id":"fa47a2a8-839ee1e6-aed0199c-4d5fad0c","cloud_id":"CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","subject":"fake_user_sid@as","queue":"000000000000000103qf","resource_id":"000000000000000103qf","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23"} ======================================== 2026-01-07T17:56:51.727138Z: {"request_id":"fa47a2a8-839ee1e6-aed0199c-4d5fad0c","permission":"ymq.queues.delete","id":"1634078512426334020$DeleteMessageQueue$2026-01-07T17:56:51.726984Z","idempotency_id":"1634078512426334020$DeleteMessageQueue$2026-01-07T17:56:50.995000Z","cloud_id":"CLOUD_FOR_folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:56:50.995000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3c385d37-ebf2-11f0-b7f6-d00d1b756f23.fifo","resource_id":"000000000000000103qf","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_3c4eccc1-ebf2-11f0-b7a2-d00d1b756f23","component":"ymq"} ======================================== ======================================== run test with cloud_id=CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23 folder_id=folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23 iam_token=usr_acc_45762836-ebf2-11f0-9df7-d00d1b756f23 cloud_account=acc_45762836-ebf2-11f0-9df7-d00d1b756f23 2026-01-07T17:57:02.951570Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23]","tx_id":"281474976720731","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:57:03.034123Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23/000000000000000306hb]","tx_id":"281474976720737","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:57:03.052960Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23/000000000000000306hb/v4]","tx_id":"281474976720738","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:57:03.112148Z: {"request_id":"938cf7e9-597f85b3-ff0c223b-93e59467","cloud_id":"CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23"} ======================================== 2026-01-07T17:57:03.919373Z: {"request_id":"938cf7e9-597f85b3-ff0c223b-93e59467","permission":"ymq.queues.create","id":"13684282110538069424$CreateMessageQueue$2026-01-07T17:57:03.919217Z","idempotency_id":"13684282110538069424$CreateMessageQueue$2026-01-07T17:57:02.990000Z","cloud_id":"CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:57:02.990000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_45696e59-ebf2-11f0-9a87-d00d1b756f23","resource_id":"000000000000000306hb","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:57:04.230087Z: {"request_id":"6e24c499-e340b0b1-65e25f85-daea9f62","permission":"ymq.queues.setAttributes","id":"714437724823319185$UpdateMessageQueue$2026-01-07T17:57:04.229974Z","idempotency_id":"714437724823319185$UpdateMessageQueue$2026-01-07T17:57:04.178000Z","cloud_id":"CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:57:04.178000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_45696e59-ebf2-11f0-9a87-d00d1b756f23","resource_id":"000000000000000306hb","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:57:05.933984Z: {"request_id":"fa524bb6-69fdb733-1c126726-4d9944ec","permission":"ymq.queues.setAttributes","id":"14139864533362436520$UpdateMessageQueue$2026-01-07T17:57:05.933832Z","idempotency_id":"14139864533362436520$UpdateMessageQueue$2026-01-07T17:57:05.242000Z","cloud_id":"CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:57:05.242000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_45696e59-ebf2-11f0-9a87-d00d1b756f23","resource_id":"000000000000000306hb","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","component":"ymq"} ======================================== 2026-01-07T17:57:06.348522Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23/000000000000000306hb/v4]","tx_id":"281474976720759","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:57:06.373328Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23/000000000000000306hb]","tx_id":"281474976720760","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-07T17:57:06.399114Z: {"request_id":"f688f33b-d624bbd6-355d8c08-59ae4060","cloud_id":"CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","subject":"fake_user_sid@as","queue":"000000000000000306hb","resource_id":"000000000000000306hb","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23"} ======================================== 2026-01-07T17:57:07.955849Z: {"request_id":"f688f33b-d624bbd6-355d8c08-59ae4060","permission":"ymq.queues.delete","id":"10571487611747851815$DeleteMessageQueue$2026-01-07T17:57:07.955708Z","idempotency_id":"10571487611747851815$DeleteMessageQueue$2026-01-07T17:57:06.297000Z","cloud_id":"CLOUD_FOR_folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-07T17:57:06.297000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_45696e59-ebf2-11f0-9a87-d00d1b756f23","resource_id":"000000000000000306hb","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_45762836-ebf2-11f0-9df7-d00d1b756f23","component":"ymq"} ======================================== ======================================== |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> IndexBuildTest::CancellationNoTable [GOOD] >> IndexBuildTest::CancellationNoTableUniq >> IndexBuildTest::CancellationNoTableUniq [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestReadAndDeleteConsumer |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::CancelBuildUniq >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> YdbSdkSessions::TestMultipleSessions >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:539: Enable after interactive tx support |96.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> YdbSdkSessions::TestSessionPool [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> IndexBuildTest::CancelBuildUniq [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:588: Enable after accepting a pull request with merging configs |96.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuildUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:55:38.990461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:55:38.990541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:38.990579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:55:38.990617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:55:38.990663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:55:38.990700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:55:38.990741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:55:38.990830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:55:38.991510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:55:38.991736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:55:39.072627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:55:39.072687Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:39.089951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:55:39.090314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:55:39.090518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:55:39.097936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:55:39.098239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:55:39.098929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:55:39.099155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:55:39.102205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:39.102429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:55:39.103748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:55:39.103823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:55:39.103978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:55:39.104039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:55:39.104153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:55:39.104392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:55:39.260369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.261430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.261565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.261634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.261711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.261797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.261880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.261965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.262065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.262161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.262229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.262293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.262390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.262477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:55:39.262560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... fy, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-07T17:57:49.811864Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:128:2153] message: TxId: 281474976710760 2026-01-07T17:57:49.811909Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-07T17:57:49.811940Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-07T17:57:49.812004Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-07T17:57:49.812063Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-07T17:57:49.817858Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-07T17:57:49.817934Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-07T17:57:49.818005Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-07T17:57:49.818144Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1552:3414], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-07T17:57:49.820186Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2026-01-07T17:57:49.820304Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1552:3414], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:57:49.820367Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-07T17:57:49.822267Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2026-01-07T17:57:49.822384Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1552:3414], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-07T17:57:49.822440Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-07T17:57:49.822598Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:57:49.822646Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:1651:3502] TestWaitNotification: OK eventTxId 102 2026-01-07T17:57:49.826450Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-07T17:57:49.826710Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-07T17:57:49.830150Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:57:49.830395Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 276us result status StatusSuccess 2026-01-07T17:57:49.830877Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:57:49.834170Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-07T17:57:49.834411Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 257us result status StatusPathDoesNotExist 2026-01-07T17:57:49.834571Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> KqpBatchUpdate::ManyPartitions_1 [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 14995, MsgBus: 19317 2026-01-07T17:53:36.775076Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679334453262805:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:36.776657Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:36.999809Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:37.037963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:37.038225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:37.052107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:37.117928Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:37.180368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:37.180393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:37.180400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:37.180484Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:37.289566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:37.591126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:37.679942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:37.781125Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:37.831985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.006375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:38.096015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.003230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679351633133812:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.003345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.004025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679351633133822:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.004087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.346360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.381109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.413888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.441797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.470990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.511862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.579329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.623241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:40.711852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679351633134693:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.711932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.712491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679351633134699:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.712761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679351633134698:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.712791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:40.716745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:40.728201Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679351633134702:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:53:40.832481Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679351633134755:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:41.774048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679334453262805:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:41.774123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:53:42.324048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 13616, MsgBus: 65497 2026-01-07T17:53:46.240784Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelive ... ipt_executions 2026-01-07T17:57:37.145054Z node 20 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [20:7592680364739644209:2081] 1767808656869712 != 1767808656869715 2026-01-07T17:57:37.163802Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:57:37.170212Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:57:37.174837Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:57:37.174994Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:57:37.181805Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:57:37.336662Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:57:37.336693Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:57:37.336702Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:57:37.336847Z node 20 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:57:37.470725Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:57:37.905565Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:57:38.561339Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:57:38.570482Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:57:38.586391Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:38.726540Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:39.067858Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:39.229635Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:41.872966Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7592680364739644234:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:57:41.873099Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:57:44.579632Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680399099384490:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:44.579784Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:44.580192Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680399099384500:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:44.580248Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:44.740886Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:44.818790Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:44.898467Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:44.963191Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:45.027459Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:45.104134Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:45.294224Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:45.383366Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:45.529240Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680403394352676:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:45.529554Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:45.530220Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680403394352681:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:45.530323Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7592680403394352682:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:45.530582Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:45.541366Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:57:45.575911Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7592680403394352685:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:57:45.672576Z node 20 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [20:7592680403394352738:3792] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:57:49.144216Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:52.169051Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:57:52.169108Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:200: Test is failing right now |96.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: 2026-01-07T17:55:03.618512Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-07T17:55:03.690812Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:55:03.690887Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:55:03.690957Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:03.691013Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:181:2057] recipient: [1:14:2061] 2026-01-07T17:55:03.710813Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:55:03.732839Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "important_user" Generation: 1 Important: true } 2026-01-07T17:55:03.733717Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:187:2142] 2026-01-07T17:55:03.735945Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:187:2142] 2026-01-07T17:55:03.749383Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:55:03.749925Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|65afe240-9aef7585-d4dfefe8-5b6784b0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:179:2192] 2026-01-07T17:55:03.813178Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:03.854666Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:03.881947Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:03.892729Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:03.937277Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:03.979452Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:04.001887Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:04.151073Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:04.172311Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:04.421988Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:04.453259Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:04.731164Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:04.899672Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:05.015198Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:05.343933Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:05.611622Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:05.905389Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:05.962514Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:06.207104Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:179:2192] 2026-01-07T17:55:06.649231Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:06.930149Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:07.221528Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:07.494394Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:07.546897Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:07.758223Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:08.099884Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:08.364878Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:08.623537Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:08.884897Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:08.969316Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:09.145213Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:09.487819Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:09.784307Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:10.063305Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:10.352752Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:10.404559Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:10.700463Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:10.941580Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:11.194461Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:11.478543Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:11.646587Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:11.764802Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:55:12.537837Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates f ... n't have tx info 2026-01-07T17:57:48.880663Z node 36 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:57:48.880755Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:48.880838Z node 36 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:153:2057] recipient: [36:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:153:2057] recipient: [36:151:2172] Leader for TabletID 72057594037927938 is [36:157:2176] sender: [36:158:2057] recipient: [36:151:2172] Leader for TabletID 72057594037927937 is [36:111:2142] sender: [36:183:2057] recipient: [36:14:2061] 2026-01-07T17:57:48.904650Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:48.905434Z node 36 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1002 actor [36:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2026-01-07T17:57:48.906253Z node 36 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [36:189:2142] 2026-01-07T17:57:48.909452Z node 36 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [36:189:2142] 2026-01-07T17:57:50.083464Z node 36 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:57:50.084227Z node 36 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|963dc1b3-7b0147ea-929432f-20f61344_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [36:111:2142] sender: [36:261:2057] recipient: [36:103:2137] Leader for TabletID 72057594037927937 is [36:111:2142] sender: [36:264:2057] recipient: [36:263:2258] Leader for TabletID 72057594037927937 is [36:265:2259] sender: [36:266:2057] recipient: [36:263:2258] 2026-01-07T17:57:52.479777Z node 36 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:57:52.479866Z node 36 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:57:52.480599Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:52.480726Z node 36 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:57:52.481445Z node 36 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [36:314:2259] 2026-01-07T17:57:52.512379Z node 36 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:52.513465Z node 36 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [36:314:2259] 2026-01-07T17:57:52.542866Z node 36 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 suffix '0' size 8296398 2026-01-07T17:57:52.568718Z node 36 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 36 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:57:52.568882Z node 36 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 36 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [36:265:2259] sender: [36:338:2057] recipient: [36:14:2061] 2026-01-07T17:57:52.573599Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:52.577759Z node 36 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: cannot finish read request. Consumer user1 is gone from partition 2026-01-07T17:57:52.578085Z node 36 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1003 actor [36:335:2310] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } 2026-01-07T17:57:52.994762Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:107:2057] recipient: [37:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:107:2057] recipient: [37:105:2138] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:112:2057] recipient: [37:105:2138] 2026-01-07T17:57:53.060839Z node 37 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:57:53.060903Z node 37 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:57:53.060972Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:53.061028Z node 37 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [37:153:2057] recipient: [37:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [37:153:2057] recipient: [37:151:2172] Leader for TabletID 72057594037927938 is [37:157:2176] sender: [37:158:2057] recipient: [37:151:2172] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:181:2057] recipient: [37:14:2061] 2026-01-07T17:57:53.080835Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:53.081485Z node 37 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1004 actor [37:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2026-01-07T17:57:53.082112Z node 37 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [37:187:2142] 2026-01-07T17:57:53.085140Z node 37 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [37:187:2142] 2026-01-07T17:57:54.315517Z node 37 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-07T17:57:54.316029Z node 37 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3eae6963-30d46faf-6127e9df-facff8bf_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:259:2057] recipient: [37:103:2137] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:262:2057] recipient: [37:261:2256] Leader for TabletID 72057594037927937 is [37:263:2257] sender: [37:264:2057] recipient: [37:261:2256] 2026-01-07T17:57:56.771452Z node 37 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-07T17:57:56.771515Z node 37 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-07T17:57:56.772011Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:56.772058Z node 37 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-07T17:57:56.772580Z node 37 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [37:312:2257] 2026-01-07T17:57:56.800223Z node 37 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-07T17:57:56.801185Z node 37 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [37:312:2257] 2026-01-07T17:57:56.832139Z node 37 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 suffix '0' size 8296398 2026-01-07T17:57:56.869759Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-07T17:57:56.869887Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [37:263:2257] sender: [37:336:2057] recipient: [37:14:2061] 2026-01-07T17:57:56.875139Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T17:57:56.879159Z node 37 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: cannot finish read request. Consumer user1 is gone from partition 2026-01-07T17:57:56.879440Z node 37 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1005 actor [37:333:2308] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] |96.8%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> ExternalIndex::Simple [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:53:59.951592Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:00.106413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:54:00.156029Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:316:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:54:00.156400Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:54:00.156552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2026-01-07T17:54:00.156784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:54:00.556645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:00.556776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:00.643621Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767808437456829 != 1767808437456833 2026-01-07T17:54:00.661439Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:00.708553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:00.819306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:54:01.765982Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:69:2116] Handle TEvGetProxyServicesRequest 2026-01-07T17:54:01.766419Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:69:2116] Handle TEvGetProxyServicesRequest 2026-01-07T17:54:01.770528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:01.771653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:01.771793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:01.771822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:01.772216Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:01.785452Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:01.941105Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-01-07T17:54:01.941199Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T17:54:01.942597Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:907:2769] 2026-01-07T17:54:02.020515Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:907:2769] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "new_column1" Type: "Uint64" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T17:54:02.020626Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:907:2769] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T17:54:02.023343Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:907:2769] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T17:54:02.023488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:907:2769] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T17:54:02.023920Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:907:2769] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T17:54:02.024117Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:907:2769] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T17:54:02.024299Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:907:2769] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T17:54:02.031406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-07T17:54:02.033040Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:907:2769] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T17:54:02.033888Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:907:2769] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T17:54:02.033968Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:907:2769] txid# 281474976715657 SEND to# [1:906:2768] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T17:54:02.145680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-07T17:54:02.178223Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-07T17:54:02.178509Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2026-01-07T17:54:02.206345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-07T17:54:02.206765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-07T17:54:02.207061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-07T17:54:02.207188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-07T17:54:02.207326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-07T17:54:02.207439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-07T17:54:02.207566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-07T17:54:02.207690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-07T17:54:02.207804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-07T17:54:02.207937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-07T17:54:02.208036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-07T17:54:02.208160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-07T17:54:02.208277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-07T17:54:02.237513Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 72075186224037888 2026-01-07T17:54:02.237860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-07T17:54:02.237918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-07T17:54:02.238111Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: ... olumn: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2026-01-07T17:57:30.239868Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:30.239964Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716216 ProcessProposeKqpTransaction 2026-01-07T17:57:30.251980Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:30.252052Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716217 ProcessProposeKqpTransaction 2026-01-07T17:57:30.444435Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:30.444507Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716218 ProcessProposeKqpTransaction 2026-01-07T17:57:30.456462Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:30.456536Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716219 ProcessProposeKqpTransaction 2026-01-07T17:57:30.640553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-07T17:57:30.640713Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-07T17:57:30.640779Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:975:2822];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-07T17:57:30.640838Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:976:2823];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;EXPECTATION=0 2026-01-07T17:57:40.779924Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:11236:10575], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T17:57:40.791972Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZGZmZjMyMWItMmE4Njg4NGEtZTI0NWIzYmItN2ZhY2UwYTA=, ActorId: [1:11232:10572], ActorState: ExecuteState, LegacyTraceId: 01kecspx7pc67kmhyq428e6vj2, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2026-01-07T17:57:41.436373Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:41.436471Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716220 ProcessProposeKqpTransaction 2026-01-07T17:57:41.449248Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:41.449321Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716221 ProcessProposeKqpTransaction 2026-01-07T17:57:41.577445Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:41.577519Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716222 ProcessProposeKqpTransaction 2026-01-07T17:57:41.587024Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:41.587093Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716223 ProcessProposeKqpTransaction 2026-01-07T17:57:41.774914Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-07T17:57:41.775072Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-07T17:57:41.775138Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:975:2822];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-07T17:57:41.775197Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:976:2823];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;EXPECTATION=0 2026-01-07T17:57:52.383286Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:52.383386Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716224 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;RESULT=;EXPECTATION=1 2026-01-07T17:57:52.966453Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:52.966527Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716225 ProcessProposeKqpTransaction 2026-01-07T17:57:52.976766Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:52.976839Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716226 ProcessProposeKqpTransaction 2026-01-07T17:57:53.144149Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:53.144227Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716227 ProcessProposeKqpTransaction 2026-01-07T17:57:53.155228Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:57:53.155304Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716228 ProcessProposeKqpTransaction 2026-01-07T17:57:53.397857Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-07T17:57:53.398029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-07T17:57:53.398096Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:975:2822];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-07T17:57:53.398158Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:976:2823];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;EXPECTATION=1 2026-01-07T17:58:03.912082Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:58:03.912171Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716229 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 2026-01-07T17:58:04.512712Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:58:04.512793Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716230 ProcessProposeKqpTransaction 2026-01-07T17:58:04.523569Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:58:04.523644Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716231 ProcessProposeKqpTransaction 2026-01-07T17:58:04.675354Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:58:04.675432Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716232 ProcessProposeKqpTransaction 2026-01-07T17:58:04.685533Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-07T17:58:04.685605Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716233 ProcessProposeKqpTransaction 2026-01-07T17:58:04.877959Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:961:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-07T17:58:04.878136Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-07T17:58:04.878201Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:975:2822];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-07T17:58:04.878261Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:976:2823];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |96.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> KqpBatchUpdate::SimplePartitions [GOOD] >> TResourcePoolTest::CreateResourcePool >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> TResourcePoolTest::CreateResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 17866, MsgBus: 5288 2026-01-07T17:53:26.997049Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679292163033401:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:26.997123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:53:27.337074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:53:27.382385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:53:27.382491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:53:27.466043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:53:27.518618Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:53:27.521028Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679292163033364:2081] 1767808406985055 != 1767808406985058 2026-01-07T17:53:27.533910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:53:27.742037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:53:27.742069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:53:27.742075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:53:27.742136Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:53:28.018598Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:53:28.423074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:53:28.437181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T17:53:28.524815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.710024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.897131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:28.985042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.613231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309342904422:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.613386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.613784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679309342904432:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.613864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:30.937316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:30.981504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.025040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.068329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.101407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.139931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.178840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.213236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:53:31.270953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313637872605:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.271037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.271247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313637872611:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.271314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679313637872610:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.271348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:53:31.274414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:53:31.285477Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679313637872614:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T17:53:31.365588Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679313637872665:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:53:31.997247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679292163033401:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:53:31.997357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12295, MsgBus: 8469 2026-01-07T17:53:41.924386Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7592680422902859865:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:57:49.170311Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:57:49.248444Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:57:49.350453Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:57:49.353864Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:57:49.353983Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:57:49.357074Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:7592680422902859839:2081] 1767808669168252 != 1767808669168255 2026-01-07T17:57:49.392638Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:57:49.508746Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:57:49.514867Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:57:49.514893Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:57:49.514906Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:57:49.515002Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:57:50.102497Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:57:50.117218Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:57:50.130677Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:50.202682Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:57:50.237689Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:50.437876Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:50.547595Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:54.170672Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7592680422902859865:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:57:54.170763Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:57:55.258469Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680448672665508:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.258565Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.258905Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680448672665518:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.258948Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.374420Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:55.439132Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:55.487477Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:55.530648Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:55.571673Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:55.625206Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:55.771457Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:55.837200Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:57:55.943110Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680448672666411:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.943167Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.943611Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680448672666416:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.943659Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7592680448672666417:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.943951Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:57:55.948226Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:57:55.969197Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7592680448672666420:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:57:56.064693Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7592680452967633767:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:58:04.323353Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T17:58:04.323393Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T17:58:11.053914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T17:58:11.054015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:58:11.054086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T17:58:11.054134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T17:58:11.054170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T17:58:11.054200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T17:58:11.054261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T17:58:11.054337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T17:58:11.055227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T17:58:11.055559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T17:58:11.142186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T17:58:11.142261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:58:11.159491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T17:58:11.159806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T17:58:11.159975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T17:58:11.165956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T17:58:11.166250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T17:58:11.166906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T17:58:11.167180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T17:58:11.169431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:58:11.169610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T17:58:11.171010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:58:11.171080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:58:11.171216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T17:58:11.171271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T17:58:11.171316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T17:58:11.171497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T17:58:11.299113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.300227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.300372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.300484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.300571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.300644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.300719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.300813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.300907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.301024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.301088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.301164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.301285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.301397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T17:58:11.301491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-07T17:58:11.954987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-07T17:58:11.955135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-07T17:58:11.955322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-07T17:58:11.955400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:58:11.956102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:58:11.957908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:58:11.958101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T17:58:11.959259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T17:58:11.959292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-07T17:58:11.959436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:58:11.959509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-07T17:58:11.959611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T17:58:11.959652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 40 2026-01-07T17:58:11.959693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-07T17:58:11.959718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-07T17:58:11.960003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-07T17:58:11.960108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-07T17:58:11.960194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:58:11.960223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:58:11.960256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-07T17:58:11.960282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:58:11.960313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-07T17:58:11.960547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-07T17:58:11.960583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T17:58:11.960610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T17:58:11.960689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-07T17:58:11.960736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-07T17:58:11.960780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2026-01-07T17:58:11.960807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2026-01-07T17:58:11.962028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:58:11.962110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:58:11.962151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:58:11.962186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2026-01-07T17:58:11.962247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-07T17:58:11.963133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:58:11.963200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T17:58:11.963230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-07T17:58:11.963255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-07T17:58:11.963303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-07T17:58:11.963356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-07T17:58:11.965305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-07T17:58:11.965669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T17:58:11.965839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T17:58:11.965893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T17:58:11.966502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T17:58:11.966586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T17:58:11.966640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:751:2740] TestWaitNotification: OK eventTxId 102 2026-01-07T17:58:11.967059Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T17:58:11.967303Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 224us result status StatusSuccess 2026-01-07T17:58:11.967683Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |96.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |96.8%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.8%| [TA] $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |96.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |96.9%| [TA] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> KqpPg::TempTablesDrop [GOOD] >> KqpPg::TempTablesWithCache >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] >> KqpPg::TempTablesWithCache [GOOD] >> KqpPg::TableDeleteWhere+useSink >> KqpLimits::OutOfSpaceYQLUpsertFail [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> KqpStats::DeferredEffects+UseSink [FAIL] >> KqpStats::DeferredEffects-UseSink >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpLimits::QSReplySize+useSink >> KqpStats::DeferredEffects-UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> PgCatalog::CheckSetConfig [GOOD] >> PgCatalog::PgDatabase+useSink |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryWithEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21577, MsgBus: 14793 2026-01-07T17:54:10.094315Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679480389745713:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.094365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.566567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.566670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.568268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.578527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.829364Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.853027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.945533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.945564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.945762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.945912Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.108312Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.750641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.760408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.809541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.020359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.273508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.345008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.905832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679493274649432:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.905938Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.906237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679493274649442:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.906272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.297369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.334081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.393174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.426158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.467369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.510258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.578124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.619135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.704260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497569617618:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.704329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.704449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497569617623:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.704490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497569617624:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.704519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.709071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.722550Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679497569617627:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.827884Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679497569617678:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.094673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679480389745713:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.094755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:19.195582Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767808456967, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 13663, MsgBus: 18049 2026-01-07T17:54:20.064078Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExist ...
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 29974, MsgBus: 4112 2026-01-07T17:59:40.767059Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592680896539827510:2268];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:59:40.767660Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:59:40.796848Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:59:40.880372Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:59:40.881570Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7592680896539827254:2081] 1767808780746828 != 1767808780746831 2026-01-07T17:59:40.900440Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:59:40.900539Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:59:40.935004Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:59:40.985410Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:59:41.021610Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:59:41.021635Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:59:41.021644Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:59:41.021735Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:59:41.534781Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:59:41.542712Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:59:41.556464Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:41.637628Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:41.763246Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:59:41.824897Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:41.907372Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:44.834123Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592680913719698309:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:44.834255Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:44.834842Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592680913719698319:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:44.834892Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:44.919078Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:44.960765Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:44.999486Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:45.044614Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:45.142293Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:45.211784Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:45.270605Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:45.358275Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:45.451925Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592680918014666494:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:45.452023Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:45.452154Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592680918014666499:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:45.452192Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592680918014666501:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:45.452230Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:45.457223Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:59:45.472821Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592680918014666503:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:59:45.536167Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592680918014666554:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:59:45.768570Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7592680896539827510:2268];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:59:45.768704Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] >> KqpLimits::QSReplySize+useSink [GOOD] >> KqpLimits::QSReplySize-useSink >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 63502, MsgBus: 19213 2026-01-07T17:54:10.093139Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679479992428218:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.093191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.370056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.457461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.457574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.542880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.591116Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.611787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:10.945498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:10.945526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:10.945531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:10.945629Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.111208Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.764612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.792211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:11.869479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.063929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.279179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.352740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.800871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492877331942:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.801008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.801351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679492877331952:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.801410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.151118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.182395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.216301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.251566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.304108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.337848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.381337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.432468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.530054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497172300129:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.530172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.530446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497172300134:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.530482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679497172300135:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.530570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.533778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.543810Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679497172300138:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.604282Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679497172300189:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.091745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679479992428218:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.091801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:16.235749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... atus# CANCELLED issues# { message: "Request canceled after 464ms" severity: 1 }{ message: "Cancelling after 466ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:49.823286Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsttvc0gtg2wz943m4mtqv, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 466ms" severity: 1 }{ message: "Cancelling after 465ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:50.533936Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecstvhg74d0r8a7wh7ac1zc, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 468ms" severity: 1 }{ message: "Cancelling after 468ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:51.372826Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecstwbkb0nc777k2d1avnqg, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 470ms" severity: 1 }{ message: "Cancelling after 472ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:52.285386Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecstx836nqgkv511we36067, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 472ms" severity: 1 }{ message: "Cancelling after 473ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:53.085441Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsty10bygzy4kmx393zz12, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 474ms" severity: 1 }{ message: "Cancelling after 476ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:54.064566Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecstyzddftr7sp0w0tvyhsb, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 476ms" severity: 1 }{ message: "Cancelling after 482ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:54.560504Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [5:7592680956360602458:2539] TxId: 281474976710812. Ctx: { TraceId: 01kecstzf3fqvm8j8wtcadyre0, Database: /Root, SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 477ms } {
: Error: Cancelling after 476ms during execution } ] trace_id# 2026-01-07T17:59:54.560670Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592680956360602467:4420], TxId: 281474976710812, task: 3. Ctx: { CheckpointId : . TraceId : 01kecstzf3fqvm8j8wtcadyre0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592680956360602458:2539], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:59:54.560955Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592680956360602468:4421], TxId: 281474976710812, task: 4. Ctx: { CheckpointId : . TraceId : 01kecstzf3fqvm8j8wtcadyre0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7592680956360602458:2539], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:59:54.561088Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7592680956360602466:4419], TxId: 281474976710812, task: 2. Ctx: { CheckpointId : . TraceId : 01kecstzf3fqvm8j8wtcadyre0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7592680956360602458:2539], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-07T17:59:54.561466Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecstzf3fqvm8j8wtcadyre0, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 477ms" severity: 1 }{ message: "Cancelling after 476ms during execution" severity: 1 } trace_id# 2026-01-07T17:59:54.561923Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037927 Cancelled read: {[5:7592680956360602470:4419], 1} 2026-01-07T17:59:55.057323Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecstzye4625m42hwg9q93yk, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 478ms" severity: 1 }{ message: "Cancelling after 482ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:56.043314Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv0x98wgg247fk0ah1beh, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 480ms" severity: 1 }{ message: "Cancelling after 480ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:57.014236Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv1vh7cec6tvm3veen2n7, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 482ms" severity: 1 }{ message: "Cancelling after 482ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:57.797680Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv2m0330sajwaym27cpw7, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 484ms" severity: 1 }{ message: "Cancelling after 484ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:58.482497Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv39adxs677zwj13ptqjt, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 486ms" severity: 1 }{ message: "Cancelling after 486ms during compilation" severity: 1 } trace_id# 2026-01-07T17:59:59.377263Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv4580w3q5emw4dd351dp, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 488ms" severity: 1 }{ message: "Cancelling after 488ms during compilation" severity: 1 } trace_id# 2026-01-07T18:00:00.205297Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv4z02m31bk032r65mcg5, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 490ms" severity: 1 }{ message: "Cancelling after 491ms during compilation" severity: 1 } trace_id# 2026-01-07T18:00:01.029344Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv5rq9jcx1g4vvk105w5d, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 492ms" severity: 1 }{ message: "Cancelling after 492ms during compilation" severity: 1 } trace_id# 2026-01-07T18:00:01.743857Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv6f08145cztaqpvjnj4h, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 494ms" severity: 1 }{ message: "Cancelling after 495ms during compilation" severity: 1 } trace_id# 2026-01-07T18:00:02.588914Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv79cam0qgadwveqcbs3k, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 496ms" severity: 1 }{ message: "Cancelling after 495ms during compilation" severity: 1 } trace_id# 2026-01-07T18:00:03.346419Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv80zav2jvephtvcqeek7, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 498ms" severity: 1 }{ message: "Cancelling after 497ms during compilation" severity: 1 } trace_id# 2026-01-07T18:00:04.047018Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NjFlYTE3MmItOGYyZDRkNDAtZGVkODMwZmMtMzYwZTdiNQ==, ActorId: [5:7592680483914195177:2539], ActorState: ExecuteState, LegacyTraceId: 01kecsv8pq3g4bzrq5geb5qq9p, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 500ms" severity: 1 }{ message: "Cancelling after 502ms during compilation" severity: 1 } trace_id# |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] >> KqpLimits::QSReplySize-useSink [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 7337, MsgBus: 24746 2026-01-07T17:54:10.137395Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679478852186035:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:10.138884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:10.175332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:54:10.450730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.720248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:10.815733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:10.815837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:10.844682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:10.852278Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:10.856284Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679478852186007:2081] 1767808450135696 != 1767808450135699 2026-01-07T17:54:11.025119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:11.025156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:11.025163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:11.025236Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:11.146010Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:11.256834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:11.744316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:11.847889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.071897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.318214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:12.458711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:13.730226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491737090810:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.730352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.730904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679491737090820:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:13.730994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.123942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.188168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.253930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.308506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.367072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.448275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.502152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.561710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:14.691738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496032059606:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.691844Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.692652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496032059612:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.692663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679496032059611:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.692719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:14.702902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:14.725018Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679496032059615:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:14.812435Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679496032059698:5418] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:15.140561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679478852186035:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:15.140628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline= ... 59:54.334617Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:59:54.517789Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:59:54.517817Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:59:54.517828Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:59:54.517927Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:59:54.529532Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:59:55.125861Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:59:55.247090Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:59:55.261697Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:59:55.279251Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:55.407384Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:55.737711Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:55.856859Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:58.614540Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592680973029191225:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:58.614638Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:58.615028Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592680973029191234:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:58.615084Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:58.713624Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:58.763389Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:58.806659Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:58.851415Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:58.902464Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:58.994935Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:59.052033Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:59.094145Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592680955849320272:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:59:59.094224Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:59:59.130332Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:59:59.259188Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592680977324159409:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:59.259301Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:59.259698Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592680977324159415:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:59.259759Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592680977324159414:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:59.259897Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:59:59.265185Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:59:59.279529Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592680977324159418:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:59:59.342945Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592680977324159469:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:00:01.760222Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:09.324570Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-07T18:00:09.324600Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:00:14.966185Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZDVjODVmOGUtODVmMDZlMGYtZTYwNDM0ZTYtY2QyYTExNWI=, ActorId: [5:7592681033158735585:2709], ActorState: ExecuteState, LegacyTraceId: 01kecsvhg57tcdaqbgwxrwvc2y, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Intermediate data materialization exceeded size limit (88240889 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data." issue_code: 2013 severity: 1 } trace_id#
: Error: Intermediate data materialization exceeded size limit (88240889 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> PgCatalog::PgTables [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 10918, MsgBus: 6979 2026-01-07T17:55:31.581692Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679829604225780:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:31.581982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:31.809073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:31.833119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:31.833240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:31.890299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:31.922725Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:32.012404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:32.012422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:32.012431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:32.012534Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:32.112460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:32.414997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:32.420684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 1042 2026-01-07T17:55:32.591974Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:34.916659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2026-01-07T17:55:35.072064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679846784095830:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.072170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.072733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679846784095842:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.072788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679846784095843:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.073084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:55:35.078019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:55:35.091986Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679846784095846:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T17:55:35.227262Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679846784095897:2602] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:55:35.607002Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:484: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-07T17:55:35.609024Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2026-01-07T17:55:35.611365Z node 1 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [1:7592679846784095943:2332] TxId: 281474976710663. Ctx: { TraceId: 01kecsk2gt3c26xch852xy9ty3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ0NWM5NWQtZTE2ODZkMWMtZTg0ZmQwNTMtMzZkNmViMmY=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; trace_id# 2026-01-07T17:55:35.618275Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MzQ0NWM5NWQtZTE2ODZkMWMtZTg0ZmQwNTMtMzZkNmViMmY=, ActorId: [1:7592679846784095827:2332], ActorState: ExecuteState, LegacyTraceId: 01kecsk2gt3c26xch852xy9ty3, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2)\n" severity: 1 } } trace_id#
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-07T17:55:35.670484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2026-01-07T17:55:35.989650Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:484: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-07T17:55:35.991989Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2026-01-07T17:55:35.992189Z node 1 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [1:7592679846784096073:2367] TxId: 281474976710668. Ctx: { TraceId: 01kecsk36k56t5ymwfnj3k0kch, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzZlYjhiYjEtNzZkMTVkNWQtNjZiZjc3OTUtY2I4NjU5YzE=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; trace_id# 2026-01-07T17:55:35.992488Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MzZlYjhiYjEtNzZkMTVkNWQtNjZiZjc3OTUtY2I4NjU5YzE=, ActorId: [1:7592679846784096035:2367], ActorState: ExecuteState, LegacyTraceId: 01kecsk36k56t5ymwfnj3k0kch, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2)\n" severity: 1 } } trace_id#
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): require ... ER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:00:09.131548Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592681023415706157:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:09.131722Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:09.132389Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592681023415706179:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:09.132892Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7592681023415706178:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:09.132983Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:09.139495Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:00:09.155038Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7592681023415706182:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:00:09.256128Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7592681023415706237:2540] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 65018, MsgBus: 10757 2026-01-07T18:00:10.942778Z node 14 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7592681025305221329:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:00:10.942906Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:00:10.985677Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:00:11.153031Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:00:11.155784Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:00:11.155917Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:00:11.160728Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:00:11.183856Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:7592681025305221295:2081] 1767808810930545 != 1767808810930548 2026-01-07T18:00:11.220836Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:00:11.331147Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:00:11.331171Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:00:11.331188Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:00:11.331313Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:00:11.385119Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:00:11.949355Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:00:12.395301Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:00:15.943480Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7592681025305221329:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:00:15.943607Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:00:17.247239Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592681055369993266:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:17.247380Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:17.247482Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592681055369993278:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:17.247756Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592681055369993280:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:17.247837Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:00:17.254742Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:00:17.275844Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7592681055369993281:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:00:17.371664Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7592681055369993333:2543] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:00:17.486490Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:17.553271Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:22.378721Z node 14 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037888 not found 2026-01-07T18:00:22.495943Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:22.860848Z node 14 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [14:7592681076844830344:2444], TxId: 281474976715671, task: 1. Ctx: { CheckpointId : . TraceId : 01kecsvv930zhb5vmwpm55a32x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=14&id=OTY3NDQ4MGMtMTVjOWI1NTMtNjIxZTMzMmYtYzA1N2Y1YTY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2026-01-07T18:00:22.861337Z node 14 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [14:7592681076844830345:2445], TxId: 281474976715671, task: 2. Ctx: { CheckpointId : . TraceId : 01kecsvv930zhb5vmwpm55a32x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=14&id=OTY3NDQ4MGMtMTVjOWI1NTMtNjIxZTMzMmYtYzA1N2Y1YTY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [14:7592681076844830341:2441], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-07T18:00:22.862051Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=OTY3NDQ4MGMtMTVjOWI1NTMtNjIxZTMzMmYtYzA1N2Y1YTY=, ActorId: [14:7592681076844830334:2441], ActorState: ExecuteState, LegacyTraceId: 01kecsvv930zhb5vmwpm55a32x, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: \"pg_proc\"\n\n" severity: 1 } trace_id# |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> alter_compression.py::TestAlterCompression::test_availability_data |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> KqpPg::TableDeleteWhere-useSink [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 30302, MsgBus: 6064 2026-01-07T17:55:27.318666Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679811626301895:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:55:27.318757Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:55:27.571918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:55:27.572052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:55:27.592323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:55:27.611469Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679811626301788:2081] 1767808527314376 != 1767808527314379 2026-01-07T17:55:27.614489Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:55:27.625191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:55:27.833038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:55:27.880859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:55:27.880881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:55:27.880899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:55:27.880994Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:55:28.327296Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:55:28.608508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:55:30.088873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-07T17:55:30.390836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-07T17:55:30.468583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-07T17:55:30.658261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-07T17:55:30.758579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-07T17:55:30.931324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"abcd ","abcd "} 2026-01-07T17:55:31.073342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2026-01-07T17:55:31.161495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2026-01-07T17:55:31.239025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-07T17:55:31.388430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-07T17:55:31.485561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-07T17:55:31.601934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-07T17:55:31.716090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2026-01-07T17:55:31.797423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2026-01-07T17:55:31.877995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 1111 2026-01-07T17:55:32.065810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {1111,1111} 2026-01-07T17:55:32.159188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2026-01-07T17:55:32.217196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2026-01-07T17:55:32.281295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propo ... WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710841:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:48.276143Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 869 2026-01-07T18:00:48.386815Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710843:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:48.455298Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:48.487037Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710845:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:48.559007Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 650 2026-01-07T18:00:48.586731Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710847:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:48.670376Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:48.699494Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710849:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:48.778977Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 829 2026-01-07T18:00:48.810576Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710851:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:48.895518Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:48.927803Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710853:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:49.025139Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 774 2026-01-07T18:00:49.082127Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710856:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:49.224112Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710857:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2950 2026-01-07T18:00:49.355860Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710858:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:49.485731Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710859:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:49.574579Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 114 2026-01-07T18:00:49.608824Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710861:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:49.681865Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:49.713916Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710863:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:49.784078Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3802 2026-01-07T18:00:49.814981Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710865:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:49.883964Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:49.910104Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710867:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:49.979478Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 4072 2026-01-07T18:00:50.009581Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710869:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:50.093501Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:50.126175Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710871:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 142 2026-01-07T18:00:50.210751Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:50.240536Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710873:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:50.324452Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:50.357015Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710875:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:50.431033Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3615 2026-01-07T18:00:50.459581Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710877:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:50.598533Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710878:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:50.670472Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3614 2026-01-07T18:00:50.706785Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710880:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:50.779389Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:50.816588Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710882:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:50.892726Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 22 2026-01-07T18:00:50.923958Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710884:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:51.074175Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-07T18:00:51.108745Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710886:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:00:51.205655Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest |97.0%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [GOOD] >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> LocalTableWriter::StringEscaping >> LocalTableWriter::WaitTxIds |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> KqpLimits::TooBigKey-useSink [GOOD] >> KqpLimits::TooBigColumn-useSink >> ColumnStatistics::CountMinSketchStatistics [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> LocalTableWriter::StringEscaping [GOOD] >> LocalTableWriter::WaitTxIds [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:44.275680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:44.343704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:44.350871Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:44.351956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:44.352101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:44.720438Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:44.809873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:44.809961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:44.845952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:44.974681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:45.908299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:45.909308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:45.909352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:45.909383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:45.909568Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:45.973295Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:46.486195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:48.800905Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:48.805746Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:48.808275Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:48.832510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:48.832637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:48.870262Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:48.871623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.041227Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:49.041323Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:49.042553Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.043109Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.043530Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.044167Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.044253Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.044500Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.044577Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.044675Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.044815Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:49.059139Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:49.223133Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:49.244629Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:49.244711Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:49.271905Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:49.272898Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:49.273123Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:49.273186Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:49.273234Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:49.273275Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:49.273310Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:49.273347Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:49.273847Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:49.275649Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2454] 2026-01-07T17:51:49.277530Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:49.314915Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Describe result: PathErrorUnknown 2026-01-07T17:51:49.314963Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Creating table 2026-01-07T17:51:49.315029Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:49.320362Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.320435Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2141:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:49.342015Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2209:2650] 2026-01-07T17:51:49.342319Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2209:2650], schemeshard id = 72075186224037897 2026-01-07T17:51:49.344340Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2215:2651], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:49.370723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:49.382123Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:49.382251Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:49.391845Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:49.540695Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:49.558381Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:49.837179Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:49.966355Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:49.966450Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2108:2578] Owner: [2:2107:2577]. Column diff is empty, finishing 2026-01-07T17:51:50.664260Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ode 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:52:15.419995Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4617:4103], StatRequests.size() = 1 2026-01-07T17:52:15.420105Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:55:59.990838Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecscyqzfa7m5t9b2ftga2wa", SessionId: ydb://session/3?node_id=2&id=YmRkY2Y0ZWEtMWE4YTU0Y2QtNjc1MmEwMWMtYTQyYTgwZDA=, Slow query, duration: 225.378885s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-07T17:55:59.992608Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4482:4028], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:55:59.993033Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4482:4028], Start read next stream part 2026-01-07T17:55:59.993836Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32120, txId: 18446744073709551615] shutting down 2026-01-07T17:55:59.993994Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4755:4229], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:55:59.997733Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4755:4229], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-07T17:55:59.997872Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4755:4229], Start read next stream part 2026-01-07T17:56:00.001160Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4482:4028], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:56:00.001241Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4482:4028], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDA2MGJjNjUtNzI0NjMxYWYtNjExMTMyZDUtOWNiZTJkNDA=, TxId: 2026-01-07T17:56:00.274895Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4781:4251]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:56:00.275289Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:56:00.275353Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4781:4251], StatRequests.size() = 1 2026-01-07T18:01:07.762528Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsktvxbyk7epkjnnazm6wy", SessionId: ydb://session/3?node_id=2&id=M2Q1Yjk1MzQtY2M5NzRlOTgtYjlhZjAzZjAtODVkMzE1NjY=, Slow query, duration: 307.759635s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-07T18:01:07.765200Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4755:4229], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T18:01:07.765420Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4755:4229], Start read next stream part 2026-01-07T18:01:07.766253Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32200, txId: 18446744073709551615] shutting down 2026-01-07T18:01:07.766857Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4893:4354], ActorId: [2:4897:4356], Starting query actor #1 [2:4898:4357] 2026-01-07T18:01:07.766936Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4897:4356], ActorId: [2:4898:4357], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T18:01:07.770441Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4755:4229], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T18:01:07.770518Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4472:2463], ActorId: [2:4755:4229], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmY2NTg4ZTItOTE3ZGE2NDUtMmI3NTc2NDItOGJhZWNiYWM=, TxId: 2026-01-07T18:01:07.771600Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4897:4356], ActorId: [2:4898:4357], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZmJjNDExOWItMjhkN2UyMTctODE0ZDRhM2MtMjA3MWZkNmI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T18:01:07.842072Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4911:4369]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T18:01:07.842496Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T18:01:07.842553Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4911:4369], StatRequests.size() = 1 2026-01-07T18:01:08.078196Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4897:4356], ActorId: [2:4898:4357], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmJjNDExOWItMjhkN2UyMTctODE0ZDRhM2MtMjA3MWZkNmI=, TxId: 2026-01-07T18:01:08.078290Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4897:4356], ActorId: [2:4898:4357], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmJjNDExOWItMjhkN2UyMTctODE0ZDRhM2MtMjA3MWZkNmI=, TxId: 2026-01-07T18:01:08.078882Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4893:4354], ActorId: [2:4897:4356], Got response [2:4898:4357] SUCCESS 2026-01-07T18:01:08.079318Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T18:01:08.128962Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T18:01:08.129070Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3408:3460] 2026-01-07T18:01:08.129804Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4929:4379]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-07T18:01:08.130170Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T18:01:08.130436Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T18:01:08.130814Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T18:01:08.130878Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T18:01:08.130931Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-07T18:01:08.131201Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-07T18:01:08.138049Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-07T18:01:08.139748Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-07T18:01:08.139945Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::StringEscaping [GOOD] Test command err: 2026-01-07T18:01:08.139074Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592681273908453796:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:01:08.139151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:01:08.419282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:01:08.433921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:01:08.434058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:01:08.468643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:01:08.533600Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:01:08.612838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:01:09.029751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:01:09.029785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:01:09.029792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:01:09.029866Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:01:09.156699Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:01:09.730049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:01:09.736769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T18:01:09.745524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:09.929576Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Handshake: worker# [1:7592681278203421897:2492] 2026-01-07T18:01:09.930031Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T18:01:09.930376Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T18:01:09.930428Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Send handshake: worker# [1:7592681278203421897:2492] 2026-01-07T18:01:09.930807Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T18:01:09.930994Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-07T18:01:09.931215Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592681278203421991:2553] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T18:01:09.931285Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T18:01:09.931406Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592681278203421991:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-07T18:01:09.933750Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592681278203421991:2553] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T18:01:09.933816Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T18:01:09.933857Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681278203421988:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2026-01-07T18:01:08.141966Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592681275223524454:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:01:08.142028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:01:08.448894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:01:08.448992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:01:08.490844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:01:08.501298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:01:08.534460Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:01:08.761028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:01:09.027659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:01:09.027737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:01:09.027759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:01:09.027865Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:01:09.149128Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:01:09.730741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:01:09.738612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T18:01:09.745542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:09.907345Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handshake: worker# [1:7592681279518492556:2553] 2026-01-07T18:01:09.907706Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-07T18:01:09.908003Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-07T18:01:09.908031Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Send handshake: worker# [1:7592681279518492556:2553] 2026-01-07T18:01:09.909555Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-07T18:01:09.916158Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-07T18:01:09.916289Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-07T18:01:09.918532Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592681279518492559:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-07T18:01:09.918588Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T18:01:09.918684Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592681279518492559:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-07T18:01:09.922328Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592681279518492559:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T18:01:09.922404Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T18:01:09.922461Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-07T18:01:10.911869Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2026-01-07T18:01:10.912021Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2026-01-07T18:01:10.912130Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592681279518492559:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-07T18:01:10.914061Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7592681279518492559:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-07T18:01:10.914143Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-07T18:01:10.914190Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7592681279518492555:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> ColumnStatistics::SimpleColumnStatistics [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::SimpleColumnStatistics [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:45.995386Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:46.092582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:46.099843Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:46.100186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:46.100296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:46.449701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:46.531999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:46.532092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:46.566587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:46.648256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:47.249084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:47.250063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:47.250115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:47.250144Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:47.250602Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:47.314779Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:47.872762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:50.193915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:50.199650Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-07T17:51:50.202163Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:50.223694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:50.223776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:50.261576Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T17:51:50.262975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:50.418665Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:50.418754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:50.419756Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.420351Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.420857Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.421535Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.421933Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.422046Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.422223Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.422380Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.422474Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.436569Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:50.587772Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:50.625196Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:50.625269Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:50.652252Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:50.653229Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:50.653403Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:50.653446Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:50.653497Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:50.653548Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:50.653590Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:50.653631Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:50.654188Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:50.698733Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:50.698843Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:50.727703Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2195:2634] 2026-01-07T17:51:50.728510Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2195:2634], schemeshard id = 72075186224037897 2026-01-07T17:51:50.780143Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2246:2660] 2026-01-07T17:51:50.781920Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-07T17:51:50.790917Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Describe result: PathErrorUnknown 2026-01-07T17:51:50.790968Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Creating table 2026-01-07T17:51:50.791047Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-07T17:51:50.799144Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:50.802482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:50.808213Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:50.808330Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:50.818218Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:50.984149Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-07T17:51:51.010394Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:51.307058Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:51.449698Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:51.449791Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2253:2666] Owner: [2:2252:2665]. Column diff is empty, finishing 2026-01-07T17:51:52.200046Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ode id = 2, client id = [2:4613:4107], server id = [2:4614:4108], tablet id = 72075186224037894, status = OK 2026-01-07T17:52:21.216325Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-07T17:52:21.216399Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4611:4105], StatRequests.size() = 1 2026-01-07T17:52:21.216482Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T17:56:04.500099Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecsd4ga4g7q6b3cqzz9kd4t", SessionId: ydb://session/3?node_id=2&id=NTdiYzk4YWQtNWU2MGMxZTItNWUzMzY3MDAtNDcxZWI1MmU=, Slow query, duration: 223.994307s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-07T17:56:04.501831Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4485:4031], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T17:56:04.502229Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4485:4031], Start read next stream part 2026-01-07T17:56:04.502879Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32120, txId: 18446744073709551615] shutting down 2026-01-07T17:56:04.503057Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4751:4232], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-07T17:56:04.505878Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4485:4031], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T17:56:04.505933Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4485:4031], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGQxNWExNmItYmM0OTUxOTctYmE3MzllNjMtMTVlY2IwOA==, TxId: 2026-01-07T17:56:04.506388Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4751:4232], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-07T17:56:04.506475Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4751:4232], Start read next stream part 2026-01-07T17:56:04.741304Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4777:4254]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T17:56:04.741619Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-07T17:56:04.741661Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4777:4254], StatRequests.size() = 1 2026-01-07T18:01:11.831320Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kecskz8t2zmtds7vrc0mpn4n", SessionId: ydb://session/3?node_id=2&id=ZTg1NzZmOWMtZmRlYzEyYmEtZjQxZTJmY2EtNjNhYTMwNWQ=, Slow query, duration: 307.320961s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-07T18:01:11.834762Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4751:4232], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T18:01:11.835080Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4751:4232], Start read next stream part 2026-01-07T18:01:11.835264Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32210, txId: 18446744073709551615] shutting down 2026-01-07T18:01:11.837123Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4889:4357], ActorId: [2:4893:4359], Starting query actor #1 [2:4894:4360] 2026-01-07T18:01:11.837216Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4893:4359], ActorId: [2:4894:4360], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-07T18:01:11.841468Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4751:4232], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T18:01:11.841577Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4475:2463], ActorId: [2:4751:4232], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjYzYzhkMDMtMTA0OWViOWQtZjc2ODQxNWMtYjc3OTBhNw==, TxId: 2026-01-07T18:01:11.843356Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4893:4359], ActorId: [2:4894:4360], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWFmYjZlM2MtYTEzOGYxM2UtZDU0NzEwOTktMWJhOTZkOTE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T18:01:11.927121Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4907:4372]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T18:01:11.927527Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T18:01:11.927581Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4907:4372], StatRequests.size() = 1 2026-01-07T18:01:12.120273Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4893:4359], ActorId: [2:4894:4360], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWFmYjZlM2MtYTEzOGYxM2UtZDU0NzEwOTktMWJhOTZkOTE=, TxId: 2026-01-07T18:01:12.120381Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4893:4359], ActorId: [2:4894:4360], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWFmYjZlM2MtYTEzOGYxM2UtZDU0NzEwOTktMWJhOTZkOTE=, TxId: 2026-01-07T18:01:12.120827Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4889:4357], ActorId: [2:4893:4359], Got response [2:4894:4360] SUCCESS 2026-01-07T18:01:12.121692Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T18:01:12.161211Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-07T18:01:12.161301Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3351:3460] 2026-01-07T18:01:12.162161Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4925:4382]], StatType[ 1 ], StatRequestsCount[ 2 ] 2026-01-07T18:01:12.162614Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T18:01:12.162702Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-07T18:01:12.163055Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T18:01:12.163123Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-07T18:01:12.163195Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 1 ], ColumnTag[ 1 ] 2026-01-07T18:01:12.163524Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 1 ], ColumnTag[ 2 ] 2026-01-07T18:01:12.170494Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-07T18:01:12.170725Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> test_restarts.py::test_basic |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |97.0%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] >> test_canonical_records.py::test_replace_config |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpLimits::TooBigColumn-useSink [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> KqpEffects::RandomWithIndex+UseSecondaryIndex-UseSink |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14484, MsgBus: 10112 2026-01-07T17:54:25.015516Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592679539040287420:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:25.015604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T17:54:25.270188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:54:25.270299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:54:25.324514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T17:54:25.346310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:54:25.348720Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592679539040287292:2081] 1767808464994944 != 1767808464994947 2026-01-07T17:54:25.350583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:54:25.506156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:54:25.506181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:54:25.506192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:54:25.506288Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:54:25.611211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T17:54:25.967978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:54:25.973723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T17:54:26.023281Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:54:26.039304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.182621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.330587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:26.402076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.059547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679556220158353:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.059668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.060109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679556220158363:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.060190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.329932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.356436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.380958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.405396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.434496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.466958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.500162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.561854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:54:28.624686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679556220159234:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.624747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.624862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679556220159239:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.624870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592679556220159241:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.624896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T17:54:28.628371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T17:54:28.638214Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592679556220159243:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T17:54:28.720691Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592679556220159294:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T17:54:30.010875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592679539040287420:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-07T17:54:30.010931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T17:54:30.107614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... T18:01:09.039143Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:01:09.137696Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:01:09.137721Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:01:09.137729Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:01:09.137824Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:01:09.237175Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:01:09.842474Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:01:09.893067Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:01:09.932299Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:10.059915Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:10.256080Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:10.353461Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:13.838316Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7592681275698113103:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:01:13.838411Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:01:13.976678Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592681297172951439:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:13.976777Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:13.977220Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592681297172951448:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:13.977282Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:14.078314Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:14.121317Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:14.159834Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:14.205977Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:14.250958Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:14.331982Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:14.377307Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:14.436581Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:14.536154Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592681301467919623:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:14.536262Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:14.536585Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592681301467919629:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:14.536607Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7592681301467919628:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:14.536638Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:14.545587Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:01:14.556746Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7592681301467919632:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T18:01:14.630139Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7592681301467919683:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:01:18.307552Z node 5 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:186: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2026-01-07T18:01:18.307674Z node 5 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710673 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2026-01-07T18:01:18.307869Z node 5 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [5:7592681318647789184:2528] TxId: 281474976710673. Ctx: { TraceId: 01kecsxh73av1vc1hnmx0jpdye, Database: /Root, SessionId: ydb://session/3?node_id=5&id=OWE0MjEyNWItYmQxOTA0M2QtYjdmMzgwODktODA2NjFlYTg=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; trace_id# 2026-01-07T18:01:18.308444Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=OWE0MjEyNWItYmQxOTA0M2QtYjdmMzgwODktODA2NjFlYTg=, ActorId: [5:7592681314352821866:2528], ActorState: ExecuteState, LegacyTraceId: 01kecsxh73av1vc1hnmx0jpdye, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold" severity: 1 } } trace_id#
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table >> test_canonical_records.py::test_execute_minikql |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpEffects::RandomWithIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex+UseSink >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpEffects::RandomWithIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics 2026-01-07 18:01:42,384 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-07 18:01:42,607 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 458838 58.6M 58.2M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/ec9d/00179a/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args 459114 2.0G 2.0G 1.5G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/ec9d/00179a/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/c Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T17:51:45.815263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:45.883533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:45.888635Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:609:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T17:51:45.888864Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T17:51:45.889055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T17:51:46.187782Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:46.266704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:46.266807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:46.324236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:46.426196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T17:51:47.123532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T17:51:47.123939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T17:51:47.123969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T17:51:47.123992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T17:51:47.124301Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T17:51:47.197155Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T17:51:47.749448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-07T17:51:50.170540Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T17:51:50.170597Z node 4 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 4 2026-01-07T17:51:50.178934Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T17:51:50.200555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:50.200636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:50.237545Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-07T17:51:50.239472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:50.384469Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T17:51:50.384566Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T17:51:50.385813Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.386323Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.386952Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.387723Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.387888Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.388072Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.388209Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.388350Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.388516Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-07T17:51:50.403517Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T17:51:50.554908Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T17:51:50.619276Z node 4 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-07T17:51:50.619363Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-07T17:51:50.646934Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-07T17:51:50.647205Z node 4 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-07T17:51:50.647394Z node 4 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-07T17:51:50.647445Z node 4 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-07T17:51:50.647488Z node 4 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-07T17:51:50.647574Z node 4 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-07T17:51:50.647624Z node 4 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-07T17:51:50.647663Z node 4 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-07T17:51:50.648022Z node 4 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-07T17:51:50.652600Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:50.652697Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [4:2286:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-07T17:51:50.677625Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:2332:2621] 2026-01-07T17:51:50.678567Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2332:2621], schemeshard id = 72075186224037897 2026-01-07T17:51:50.742026Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:2393:2655] 2026-01-07T17:51:50.745838Z node 4 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-07T17:51:50.763861Z node 4 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. Describe result: PathErrorUnknown 2026-01-07T17:51:50.763938Z node 4 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. Creating table 2026-01-07T17:51:50.764025Z node 4 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-07T17:51:50.771213Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [4:2478:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T17:51:50.774198Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T17:51:50.779491Z node 4 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-07T17:51:50.779584Z node 4 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. Subscribe on create table tx: 281474976720657 2026-01-07T17:51:50.788674Z node 4 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. Subscribe on tx: 281474976720657 registered 2026-01-07T17:51:50.967046Z node 4 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-07T17:51:51.097662Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-07T17:51:51.283263Z node 4 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-07T17:51:51.413359Z node 4 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-07T17:51:51.413436Z node 4 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [4:2413:2667] Owner: [4:2411:2666]. Column diff is empty, finishing 2026-01-07T17:51:52.022455Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... ActorId: [4:8041:5643], RunDataQuery with SessionId: ydb://session/3?node_id=4&id=YzYzNDM3N2QtMmVkNTYzNTgtNzVjNzlmNmMtZDU5MjdjZDA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-07T18:01:23.348086Z node 4 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [4:8057:5657]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T18:01:23.348645Z node 4 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-07T18:01:23.348842Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 4, schemeshard count = 1, urgent = 0 2026-01-07T18:01:23.348896Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 4, schemeshard count = 1 2026-01-07T18:01:23.349127Z node 4 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 4 cookie: 18446744073709551615 2026-01-07T18:01:23.349205Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [4:8057:5657], StatRequests.size() = 1 2026-01-07T18:01:23.349289Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-07T18:01:23.591145Z node 4 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [4:8040:5642], ActorId: [4:8041:5643], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=YzYzNDM3N2QtMmVkNTYzNTgtNzVjNzlmNmMtZDU5MjdjZDA=, TxId: 2026-01-07T18:01:23.591240Z node 4 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [4:8040:5642], ActorId: [4:8041:5643], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=YzYzNDM3N2QtMmVkNTYzNTgtNzVjNzlmNmMtZDU5MjdjZDA=, TxId: 2026-01-07T18:01:23.591772Z node 4 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [4:8039:5641], ActorId: [4:8040:5642], Got response [4:8041:5643] SUCCESS 2026-01-07T18:01:23.592378Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-07T18:01:23.643002Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-07T18:01:23.643069Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=opId1, ActorId=[1:6323:4003] 2026-01-07T18:01:23.644963Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:8078:5668] 2026-01-07T18:01:23.646324Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:8076:4523] , Record { OperationId: "opId1" Tables { PathId { OwnerId: 72075186224037905 LocalId: 32 } } Types: TYPE_COUNT_MIN_SKETCH Database: "/Root/Serverless2" } 2026-01-07T18:01:23.646391Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `opId1', DatabaseName: `/Root/Serverless2', Types: 1 2026-01-07T18:01:23.646430Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `opId1', PathId: [OwnerId: 72075186224037905, LocalPathId: 32], ColumnTags: 2026-01-07T18:01:23.686358Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-07T18:01:23.686702Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T18:01:23.686768Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2026-01-07T18:01:23.686817Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2026-01-07T18:01:23.686861Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2026-01-07T18:01:23.734898Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-07T18:01:23.734988Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-07T18:01:23.815503Z node 4 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [4:7768:5396], schemeshard count = 1 2026-01-07T18:01:23.827299Z node 4 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [4:7768:5396], schemeshard count = 1 2026-01-07T18:01:24.472281Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T18:01:24.472378Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2026-01-07T18:01:24.472421Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2026-01-07T18:01:24.472453Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2026-01-07T18:01:25.036192Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T18:01:25.036295Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2026-01-07T18:01:25.036336Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2026-01-07T18:01:25.036368Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2026-01-07T18:01:25.072870Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T18:01:25.072993Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2026-01-07T18:01:25.073034Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2026-01-07T18:01:25.073068Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2026-01-07T18:01:25.682165Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037905 2026-01-07T18:01:25.682275Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.506000s, at schemeshard: 72075186224037905 2026-01-07T18:01:25.682586Z node 4 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-07T18:01:25.709867Z node 4 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-07T18:01:25.986618Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-07T18:01:25.994981Z node 4 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [4:8163:2463], ActorId: [4:8173:5715], Bootstrap. Database: /Root/Serverless2, IsSystemUser: 0, run create session 2026-01-07T18:01:26.000338Z node 4 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [4:8163:2463], ActorId: [4:8173:5715], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Serverless2/Table2` 2026-01-07T18:01:26.000493Z node 4 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [4:8163:2463], ActorId: [4:8173:5715], Start read next stream part 2026-01-07T18:01:26.123003Z node 4 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [4:8199:5728]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-07T18:01:26.123709Z node 4 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-07T18:01:26.123935Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 4, schemeshard count = 1, urgent = 0 2026-01-07T18:01:26.124006Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 4, schemeshard count = 1 2026-01-07T18:01:26.124292Z node 4 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 4 cookie: 18446744073709551615 2026-01-07T18:01:26.124398Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [4:8199:5728], StatRequests.size() = 1 2026-01-07T18:01:26.124523Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/00179a/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ec9d/00179a/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [GOOD] |97.3%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink [GOOD] |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32132, MsgBus: 2029 2026-01-07T18:01:20.724404Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592681326110223889:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:01:20.725965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:01:20.946900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:01:20.981295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:01:20.981383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:01:21.026676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:01:21.047177Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:01:21.108149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:01:21.108190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:01:21.108198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:01:21.108327Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:01:21.218999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:01:21.512535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:01:21.551396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:21.660708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:21.758847Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:01:21.795016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:21.858937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:23.895805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681338995127598:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:23.895926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:23.896258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681338995127608:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:23.896373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:24.177512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:24.207220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:24.235741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:24.264174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:24.298066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:24.336012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:24.394040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:24.435036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:24.507869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681343290095774:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:24.507923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:24.508014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681343290095779:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:24.508170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681343290095780:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:24.508227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:24.511617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:01:24.524164Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592681343290095783:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T18:01:24.621971Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592681343290095834:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:01:25.717666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592681326110223889:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:01:25.717749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:01:26.227267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:26.287262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpC ... elf_id=[3:7592681406923565287:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:01:39.579022Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:01:39.608952Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:01:39.688424Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:01:39.707279Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:01:39.707364Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:01:39.718820Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:01:39.897036Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:01:39.924583Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:01:39.924605Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:01:39.924614Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:01:39.924708Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:01:40.446868Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:01:40.462614Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:40.525045Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:40.624480Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:01:40.680320Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:40.760142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.287732Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592681424103436237:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.287851Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.288152Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592681424103436247:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.288212Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.354133Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.396731Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.467435Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.499078Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.564432Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.599428Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.637515Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.687983Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:43.778314Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592681424103437122:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.778403Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.778813Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592681424103437128:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.778857Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592681424103437127:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.778957Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:01:43.782977Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:01:43.796709Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7592681424103437131:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-07T18:01:43.868354Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7592681424103437182:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:01:44.581520Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7592681406923565287:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:01:44.581606Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:01:45.834132Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:01:45.929858Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |97.3%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [GOOD] |97.3%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002b54/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit_log.mzku6wi0.txt 2026-01-07T18:01:37.359672Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002b0a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit_log.bvlh88xa.txt 2026-01-07T18:01:44.809588Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] >> test_canonical_records.py::test_dml |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |97.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002ae1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit_log.fh85xgp8.txt 2026-01-07T18:01:51.887777Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] |97.6%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_single_dml_query_logged[insert] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 22662, MsgBus: 21687 2026-01-07T18:02:05.523267Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592681521662085504:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:02:05.523329Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:02:05.758823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:02:05.759073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:02:05.759076Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:02:05.853006Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592681521662085479:2081] 1767808925522017 != 1767808925522020 2026-01-07T18:02:05.858803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:02:05.866560Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:02:05.980556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:02:05.980590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:02:05.980598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:02:05.980727Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:02:05.998332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:02:06.425562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:02:06.476812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:06.546639Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:02:06.584130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:06.743664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:06.824307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:08.714897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681534546989242:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:08.715024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:08.715667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681534546989252:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:08.715720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:09.025951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:09.070416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:09.115356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:09.149624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:09.188268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:09.227257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:09.267104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:09.317987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:02:09.414691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681538841957423:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:09.414766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:09.414776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681538841957428:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:09.415082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592681538841957430:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:09.415145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:02:09.418415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:02:09.432699Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592681538841957431:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T18:02:09.534377Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592681538841957483:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:02:10.523296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592681521662085504:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:02:10.523384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:02:11.090729Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592681547431892379:2537], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-07T18:02:11.091212Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MjJiNzc2NGEtNDFhNGQxZmQtNjE4Nzk4OGUtMzk0NTBjMDI=, ActorId: [1:7592681543136925074:2531], ActorState: ExecuteState, LegacyTraceId: 01kecsz560b4q2my0q7wgffcwh, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] |97.6%| [TA] $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] |97.7%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] |97.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_auditlog.py::test_create_and_remove_tenant [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] >> test_canonical_records.py::test_dstool_add_group_http >> test_canonical_records.py::test_replace_config [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TA] $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] |97.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |97.8%| [TA] {RESULT} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_canonical_records.py::test_execute_minikql [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002a2d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_create_and_remove_tenant/audit_log._em_xzxb.txt 2026-01-07T18:02:06.146862Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2026-01-07T18:02:06.154432Z: {"paths":"[/Root/users/database]","tx_id":"281474976710660","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DATABASE","component":"schemeshard"} 2026-01-07T18:02:06.187219Z: {"paths":"[/Root/users/database]","tx_id":"281474976710661","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"ALTER DATABASE","component":"schemeshard"} 2026-01-07T18:02:10.666827Z: {"paths":"[/Root/users/database/.sys]","tx_id":"281474976725657","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} 2026-01-07T18:02:10.667319Z: {"paths":"[/Root/users/database/.sys/auth_owners]","tx_id":"281474976725658","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.667569Z: {"paths":"[/Root/users/database/.sys/query_sessions]","tx_id":"281474976725659","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.667812Z: {"paths":"[/Root/users/database/.sys/tables]","tx_id":"281474976725660","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.668286Z: {"paths":"[/Root/users/database/.sys/pg_class]","tx_id":"281474976725661","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.668605Z: {"paths":"[/Root/users/database/.sys/partition_stats]","tx_id":"281474976725662","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.669048Z: {"paths":"[/Root/users/database/.sys/query_metrics_one_minute]","tx_id":"281474976725663","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.669338Z: {"paths":"[/Root/users/database/.sys/top_queries_by_read_bytes_one_hour]","tx_id":"281474976725664","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.669571Z: {"paths":"[/Root/users/database/.sys/top_queries_by_duration_one_hour]","tx_id":"281474976725665","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.669850Z: {"paths":"[/Root/users/database/.sys/compile_cache_queries]","tx_id":"281474976725666","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.670304Z: {"paths":"[/Root/users/database/.sys/top_queries_by_request_units_one_minute]","tx_id":"281474976725667","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.670558Z: {"paths":"[/Root/users/database/.sys/top_queries_by_read_bytes_one_minute]","tx_id":"281474976725668","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.670743Z: {"paths":"[/Root/users/database/.sys/resource_pool_classifiers]","tx_id":"281474976725669","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.670985Z: {"paths":"[/Root/users/database/.sys/nodes]","tx_id":"281474976725670","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.671177Z: {"paths":"[/Root/users/database/.sys/top_queries_by_cpu_time_one_hour]","tx_id":"281474976725671","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.671350Z: {"paths":"[/Root/users/database/.sys/top_partitions_by_tli_one_minute]","tx_id":"281474976725672","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.671606Z: {"paths":"[/Root/users/database/.sys/top_queries_by_cpu_time_one_minute]","tx_id":"281474976725673","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.671763Z: {"paths":"[/Root/users/database/.sys/top_partitions_by_tli_one_hour]","tx_id":"281474976725674","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.671997Z: {"paths":"[/Root/users/database/.sys/top_queries_by_request_units_one_hour]","tx_id":"281474976725675","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.672940Z: {"paths":"[/Root/users/database/.sys/top_partitions_one_hour]","tx_id":"281474976725676","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.673156Z: {"paths":"[/Root/users/database/.sys/pg_tables]","tx_id":"281474976725677","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.673333Z: {"paths":"[/Root/users/database/.sys/top_queries_by_duration_one_minute]","tx_id":"281474976725678","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.673595Z: {"paths":"[/Root/users/database/.sys/auth_group_members]","tx_id":"281474976725679","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.673805Z: {"paths":"[/Root/users/database/.sys/auth_users]","tx_id":"281474976725680","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.674012Z: {"paths":"[/Root/users/database/.sys/resource_pools]","tx_id":"281474976725681","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.674197Z: {"paths":"[/Root/users/database/.sys/streaming_queries]","tx_id":"281474976725682","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.674521Z: {"paths":"[/Root/users/database/.sys/auth_permissions]","tx_id":"281474976725683","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.674747Z: {"paths":"[/Root/users/database/.sys/top_partitions_one_minute]","tx_id":"281474976725684","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.674982Z: {"paths":"[/Root/users/database/.sys/auth_effective_permissions]","tx_id":"281474976725685","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.675174Z: {"paths":"[/Root/users/database/.sys/auth_groups]","tx_id":"281474976725686","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-07T18:02:10.679509Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2026-01-07T18:02:11.568071Z: {"paths":"[.metadata/workload_manager/pools/default]","tx_id":"281474976720657","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2026-01-07T18:02:11.698994Z: {"reason":"Check failed: path: '/Root/users/database/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 35], type: EPathTypeResourcePool, state: EPathStateNoChanges)","paths":"[default]","tx_id":"281474976720658","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAlreadyExists","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2026-01-07T18:02:14.515094Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} 2026-01-07T18:02:14.520884Z: {"paths":"[/Root/users/database]","tx_id":"281474976710662","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DATABASE","component":"schemeshard"} 2026-01-07T18:02:14.548696Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002a03/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit_log.b3lkmwic.txt 2026-01-07T18:02:20.249626Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_replace_config [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/0029fa/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit_log.wj285bvm.txt 2026-01-07T18:02:18.272188Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002a07/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit_log.0i6h2cee.txt 2026-01-07T18:02:19.472311Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T18:02:19.472267Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-07T18:02:19.317171Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/0029c1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit_log.h91i_2y4.txt 2026-01-07T18:02:25.344958Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T18:02:25.344888Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-07T18:02:25.270358Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/0029a7/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit_log.zvbtm8qk.txt 2026-01-07T18:02:27.393687Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_canonical_records.py::test_dstool_evict_vdisk_grpc >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/00290b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit_log.09pjv7ld.txt 2026-01-07T18:02:44.962576Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml_through_http |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_restarts.py::test_basic [FAIL] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_auditlog.py::test_single_dml_query_logged[select] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/0028d3/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk22/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit_log.3j41b52l.txt 2026-01-07T18:03:10.729778Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T18:03:10.729734Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-07T18:03:10.657803Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_canonical_records.py::test_topic [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/0028c6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit_log.1qvy8221.txt 2026-01-07T18:03:13.099387Z: {"commit_tx":"1","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","cloud_id":"cloud-id-A","begin_tx":"1","resource_id":"database-id-C","end_time":"2026-01-07T18:03:13.099338Z","tx_id":"{none}","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-07T18:03:12.915784Z","database":"/Root/test_auditlog.py","subject":"root@builtin","status":"SUCCESS","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","component":"grpc-proxy","sanitized_token":"**** (B6C6F477)","detailed_status":"SUCCESS","remote_address":"127.0.0.1"} |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] >> test_auditlog.py::test_single_dml_query_logged[replace] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> test_restarts.py::test_basic [FAIL] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.1%| [TA] $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |98.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_add_group_http [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TA] $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TA] {RESULT} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_add_group_http [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002852/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit_log.w3ljb1_t.txt 2026-01-07T18:03:33.785661Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T18:03:33.785619Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-07T18:03:33.660053Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> YdbSdkSessionsPool::StressTestSync/0 >> YdbSdkSessionsPool1Session::FailTest/0 >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] >> YdbSdkSessionsPool::WaitQueue/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/002826/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit_log.ljvk782x.txt 2026-01-07T18:03:39.885626Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> YdbSdkSessionsPool::WaitQueue/1 >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> YdbSdkSessionsPool::StressTestSync/1 >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/0 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [FAIL] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/0027d8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit_log.46o6b465.txt 2026-01-07T18:03:47.426942Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T18:03:47.426894Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-07T18:03:47.358084Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/0027e0/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit_log.4psgh9jd.txt 2026-01-07T18:03:48.312135Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T18:03:48.312089Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-07T18:03:48.166388Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/00279d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit_log.jf4zb5a6.txt 2026-01-07T18:03:58.071231Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-07T18:03:58.071184Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-07T18:03:57.801357Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_canonical_records.py::test_dml_through_http [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [FAIL] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ec9d/00297e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit_log.ks2pxlwq.txt |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] >> test_canonical_records.py::test_restart_pdisk [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml_through_http [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [FAIL] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [FAIL] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> YdbSdkSessionsPool::StressTestAsync/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> basic_reading.py::TestBasicReading::test_basic_reading_solomon >> listing_batching.py::TestListingBatching::test_listing_batching_solomon |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> basic_reading.py::TestBasicReading::test_basic_reading_solomon [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 1709 30% 1709 50% 1709 90% 1709 99% 1709 ms Write: 10% 6657 30% 6657 50% 6657 90% 6657 99% 6657 ms Write: 10% 11940 30% 11940 50% 11940 90% 11940 99% 11940 ms Write: 10% 13307 30% 13307 50% 13307 90% 13307 99% 13307 ms Write: 10% 14051 30% 14051 50% 14051 90% 14051 99% 14051 ms Write: 10% 12949 30% 12949 50% 12949 90% 12949 99% 12949 ms Write: 10% 13304 30% 13304 50% 13304 90% 13304 99% 13304 ms Write: 10% 13569 30% 13569 50% 13569 90% 13569 99% 13569 ms Write: 10% 13093 30% 13093 50% 13093 90% 13093 99% 13093 ms Write: 10% 10097 30% 10097 50% 10097 90% 10097 99% 10097 ms Write: 10% 6291 30% 6291 50% 6291 90% 6291 99% 6291 ms Write: 10% 13734 30% 13734 50% 13734 90% 13734 99% 13734 ms Write: 10% 12808 30% 12808 50% 12808 90% 12808 99% 12808 ms Write: 10% 10145 30% 10145 50% 10145 90% 10145 99% 10145 ms Write: 10% 12272 30% 12272 50% 12272 90% 12272 99% 12272 ms Write: 10% 11933 30% 11933 50% 11933 90% 11933 99% 11933 ms Write: 10% 9833 30% 9833 50% 9833 90% 9833 99% 9833 ms Write: 10% 12006 30% 12006 50% 12006 90% 12006 99% 12006 ms Write: 10% 11761 30% 11761 50% 11761 90% 11761 99% 11761 ms Write: 10% 11091 30% 11091 50% 11091 90% 11091 99% 11091 ms Write: 10% 11384 30% 11384 50% 11384 90% 11384 99% 11384 ms Write: 10% 10999 30% 10999 50% 10999 90% 10999 99% 10999 ms Write: 10% 11052 30% 11052 50% 11052 90% 11052 99% 11052 ms Write: 10% 11026 30% 11026 50% 11026 90% 11026 99% 11026 ms Write: 10% 10682 30% 10682 50% 10682 90% 10682 99% 10682 ms Write: 10% 10501 30% 10501 50% 10501 90% 10501 99% 10501 ms Write: 10% 6104 30% 6104 50% 6104 90% 6104 99% 6104 ms Write: 10% 10277 30% 10277 50% 10277 90% 10277 99% 10277 ms Write: 10% 10080 30% 10080 50% 10080 90% 10080 99% 10080 ms Write: 10% 3140 30% 3140 50% 3140 90% 3140 99% 3140 ms Write: 10% 9778 30% 9778 50% 9778 90% 9778 99% 9778 ms Write: 10% 8625 30% 8625 50% 8625 90% 8625 99% 8625 ms Write: 10% 8768 30% 8768 50% 8768 90% 8768 99% 8768 ms Write: 10% 7139 30% 7139 50% 7139 90% 7139 99% 7139 ms Write: 10% 6581 30% 6581 50% 6581 90% 6581 99% 6581 ms Write: 10% 6479 30% 6479 50% 6479 90% 6479 99% 6479 ms Write: 10% 6243 30% 6243 50% 6243 90% 6243 99% 6243 ms Write: 10% 7269 30% 7269 50% 7269 90% 7269 99% 7269 ms Write: 10% 5959 30% 5959 50% 5959 90% 5959 99% 5959 ms Write: 10% 5028 30% 5028 50% 5028 90% 5028 99% 5028 ms Write: 10% 3661 30% 3661 50% 3661 90% 3661 99% 3661 ms Write: 10% 4002 30% 4002 50% 4002 90% 4002 99% 4002 ms Write: 10% 3076 30% 3076 50% 3076 90% 3076 99% 3076 ms Write: 10% 4119 30% 4119 50% 4119 90% 4119 99% 4119 ms Write: 10% 2706 30% 2706 50% 2706 90% 2706 99% 2706 ms Write: 10% 2989 30% 2989 50% 2989 90% 2989 99% 2989 ms Write: 10% 2646 30% 2646 50% 2646 90% 2646 99% 2646 ms Write: 10% 3045 30% 3045 50% 3045 90% 3045 99% 3045 ms Write: 10% 2502 30% 2502 50% 2502 90% 2502 99% 2502 ms Write: 10% 2332 30% 2332 50% 2332 90% 2332 99% 2332 ms Write: 10% 2525 30% 2525 50% 2525 90% 2525 99% 2525 ms Write: 10% 2782 30% 2782 50% 2782 90% 2782 99% 2782 ms Write: 10% 2265 30% 2265 50% 2265 90% 2265 99% 2265 ms Write: 10% 2170 30% 2170 50% 2170 90% 2170 99% 2170 ms Write: 10% 2121 30% 2121 50% 2121 90% 2121 99% 2121 ms Write: 10% 2027 30% 2027 50% 2027 90% 2027 99% 2027 ms Write: 10% 2041 30% 2041 50% 2041 90% 2041 99% 2041 ms Write: 10% 3466 30% 3466 50% 3466 90% 3466 99% 3466 ms Write: 10% 3431 30% 3431 50% 3431 90% 3431 99% 3431 ms Write: 10% 3143 30% 3143 50% 3143 90% 3143 99% 3143 ms Write: 10% 3507 30% 3507 50% 3507 90% 3507 99% 3507 ms Write: 10% 2943 30% 2943 50% 2943 90% 2943 99% 2943 ms Write: 10% 2382 30% 2382 50% 2382 90% 2382 99% 2382 ms Write: 10% 2242 30% 2242 50% 2242 90% 2242 99% 2242 ms Step 2. read write Write: 10% 5501 30% 5501 50% 5501 90% 5501 99% 5501 ms Write: 10% 6820 30% 6820 50% 6820 90% 6820 99% 6820 ms Write: 10% 6774 30% 6774 50% 6774 90% 6774 99% 6774 ms Write: 10% 10927 30% 10927 50% 10927 90% 10927 99% 10927 ms Write: 10% 13873 30% 13873 50% 13873 90% 13873 99% 13873 ms Write: 10% 13488 30% 13488 50% 13488 90% 13488 99% 13488 ms Write: 10% 12830 30% 12830 50% 12830 90% 12830 99% 12830 ms Write: 10% 12250 30% 12250 50% 12250 90% 12250 99% 12250 ms Write: 10% 11965 30% 11965 50% 11965 90% 11965 99% 11965 ms Write: 10% 11935 30% 11935 50% 11935 90% 11935 99% 11935 ms Write: 10% 11906 30% 11906 50% 11906 90% 11906 99% 11906 ms Write: 10% 11566 30% 11566 50% 11566 90% 11566 99% 11566 ms Write: 10% 6109 30% 6109 50% 6109 90% 6109 99% 6109 ms Write: 10% 12235 30% 12235 50% 12235 90% 12235 99% 12235 ms Write: 10% 11013 30% 11013 50% 11013 90% 11013 99% 11013 ms Write: 10% 10044 30% 10044 50% 10044 90% 10044 99% 10044 ms Write: 10% 10729 30% 10729 50% 10729 90% 10729 99% 10729 ms Write: 10% 2101 30% 2101 50% 2101 90% 2101 99% 2101 ms Write: 10% 9988 30% 9988 50% 9988 90% 9988 99% 9988 ms Write: 10% 4985 30% 4985 50% 4985 90% 4985 99% 4985 ms Write: 10% 7036 30% 7036 50% 7036 90% 7036 99% 7036 ms Write: 10% 9076 30% 9076 50% 9076 90% 9076 99% 9076 ms Write: 10% 9991 30% 9991 50% 9991 90% 9991 99% 9991 ms Write: 10% 9520 30% 9520 50% 9520 90% 9520 99% 9520 ms Write: 10% 9447 30% 9447 50% 9447 90% 9447 99% 9447 ms Write: 10% 7833 30% 7833 50% 7833 90% 7833 99% 7833 ms Write: 10% 5346 30% 5346 50% 5346 90% 5346 99% 5346 ms Write: 10% 8528 30% 8528 50% 8528 90% 8528 99% 8528 ms Write: 10% 8582 30% 8582 50% 8582 90% 8582 99% 8582 ms Write: 10% 9219 30% 9219 50% 9219 90% 9219 99% 9219 ms Write: 10% 7881 30% 7881 50% 7881 90% 7881 99% 7881 ms Write: 10% 5856 30% 5856 50% 5856 90% 5856 99% 5856 ms Write: 10% 7589 30% 7589 50% 7589 90% 7589 99% 7589 ms Write: 10% 8620 30% 8620 50% 8620 90% 8620 99% 8620 ms Write: 10% 7288 30% 7288 50% 7288 90% 7288 99% 7288 ms Write: 10% 3839 30% 3839 50% 3839 90% 3839 99% 3839 ms Write: 10% 6522 30% 6522 50% 6522 90% 6522 99% 6522 ms Write: 10% 6321 30% 6321 50% 6321 90% 6321 99% 6321 ms Write: 10% 3530 30% 3530 50% 3530 90% 3530 99% 3530 ms Write: 10% 6257 30% 6257 50% 6257 90% 6257 99% 6257 ms Write: 10% 5573 30% 5573 50% 5573 90% 5573 99% 5573 ms Write: 10% 5164 30% 5164 50% 5164 90% 5164 99% 5164 ms Write: 10% 3183 30% 3183 50% 3183 90% 3183 99% 3183 ms Write: 10% 7648 30% 7648 50% 7648 90% 7648 99% 7648 ms Write: 10% 5102 30% 5102 50% 5102 90% 5102 99% 5102 ms Write: 10% 7588 30% 7588 50% 7588 90% 7588 99% 7588 ms Write: 10% 5236 30% 5236 50% 5236 90% 5236 99% 5236 ms Write: 10% 5915 30% 5915 50% 5915 90% 5915 99% 5915 ms Write: 10% 3264 30% 3264 50% 3264 90% 3264 99% 3264 ms Write: 10% 5096 30% 5096 50% 5096 90% 5096 99% 5096 ms Write: 10% 4029 30% 4029 50% 4029 90% 4029 99% 4029 ms Write: 10% 8193 30% 8193 50% 8193 90% 8193 99% 8193 ms Write: 10% 3135 30% 3135 50% 3135 90% 3135 99% 3135 ms Write: 10% 5107 30% 5107 50% 5107 90% 5107 99% 5107 ms Write: 10% 2537 30% 2537 50% 2537 90% 2537 99% 2537 ms Write: 10% 2335 30% 2335 50% 2335 90% 2335 99% 2335 ms Write: 10% 2324 30% 2324 50% 2324 90% 2324 99% 2324 ms Write: 10% 2232 30% 2232 50% 2232 90% 2232 99% 2232 ms Write: 10% 2004 30% 2004 50% 2004 90% 2004 99% 2004 ms Write: 10% 2893 30% 2893 50% 2893 90% 2893 99% 2893 ms Write: 10% 3672 30% 3672 50% 3672 90% 3672 99% 3672 ms Write: 10% 1862 30% 1862 50% 1862 90% 1862 99% 1862 ms Write: 10% 2659 30% 2659 50% 2659 90% 2659 99% 2659 ms Write: 10% 3759 30% 3759 50% 3759 90% 3759 99% 3759 ms Read: 10% 4890 30% 7916 50% 10943 90% 16996 99% 18358 ms Step 3. write modify Write: 10% 2036 30% 2036 50% 2036 90% 2036 99% 2036 ms Write: 10% 1684 30% 1684 50% 1684 90% 1684 99% 1684 ms Write: 10% 2127 30% 2127 50% 2127 90% 2127 99% 2127 ms Write: 10% 4997 30% 4997 50% 4997 90% 4997 99% 4997 ms Write: 10% 6286 30% 6286 50% 6286 90% 6286 99% 6286 ms Write: 10% 12205 30% 12205 50% 12205 90% 12205 99% 12205 ms Write: 10% 12635 30% 12635 50% 12635 90% 12635 99% 12635 ms Write: 10% 13778 30% 13778 50% 13778 90% 13778 99% 13778 ms Write: 10% 16947 30% 16947 50% 16947 90% 16947 99% 16947 ms Write: 10% 16782 30% 16782 50% 16782 90% 16782 99% 16782 ms Write: 10% 18075 30% 18075 50% 18075 90% 18075 99% 18075 ms Write: 10% 16447 30% 16447 50% 16447 90% 16447 99% 16447 ms Write: 10% 14838 30% 14838 50% 14838 90% 14838 99% 14838 ms Write: 10% 17614 30% 17614 50% 17614 90% 17614 99% 17614 ms Write: 10% 17214 30% 17214 50% 17214 90% 17214 99% 17214 ms Was written: 18.75 MiB, Speed: 0.3125 MiB/s Write: 10% 16769 30% 16769 50% 16769 90% 16769 99% 16769 ms Write: 10% 12978 30% 12978 50% 12978 90% 12978 99% 12978 ms Write: 10% 13536 30% 13536 50% 13536 90% 13536 99% 13536 ms Write: 10% 15410 30% 15410 50% 15410 90% 15410 99% 15410 ms Write: 10% 12357 30% 12357 50% 12357 90% 12357 99% 12357 ms Write: 10% 15047 30% 15047 50% 15047 90% 15047 99% 15047 ms Write: 10% 5381 30% 5381 50% 5381 90% 5381 99% 5381 ms Write: 10% 15459 30% 15459 50% 15459 90% 15459 99% 15459 ms Write: 10% 14898 30% 14898 50% 14898 90% 14898 99% 14898 ms Write: 10% 13833 30% 13833 50% 13833 90% 13833 99% 13833 ms Write: 10% 14726 30% 14726 50% 14726 90% 14726 99% 14726 ms Write: 10% 15443 30% 15443 50% 15443 90% 15443 99% 15443 ms Write: 10% 17054 30% 17054 50% 17054 90% 17054 99% 17054 ms Write: 10% 13199 30% 13199 50% 13199 90% 13199 99% 13199 ms Write: 10% 15102 30% 15102 50% 15102 90% 15102 99% 15102 ms Write: 10% 2682 30% 2682 50% 2682 90% 2682 99% 2682 ms Write: 10% 13602 30% 13602 50% 13602 90% 13602 99% 13602 ms Write: 10% 14428 30% 14428 50% 14428 90% 14428 99% 14428 ms Write: 10% 5546 30% 5546 50% 5546 90% 5546 99% 5546 ms Write: 10% 6148 30% 6148 50% 6148 90% 6148 99% 6148 ms Write: 10% 4886 30% 4886 50% 4886 90% 4886 99% 4886 ms Write: 10% 12966 30% 12966 50% 12966 90% 12966 99% 12966 ms Write: 10% 4943 30% 4943 50% 4943 90% 4943 99% 4943 ms Write: 10% 2631 30% 2631 50% 2631 90% 2631 99% 2631 ms Write: 10% 3868 30% 3868 50% 3868 90% 3868 99% 3868 ms Write: 10% 10653 30% 10653 50% 10653 90% 10653 99% 10653 ms Write: 10% 9499 30% 9499 50% 9499 90% 9499 99% 9499 ms Write: 10% 3162 30% 3162 50% 3162 90% 3162 99% 3162 ms Write: 10% 7401 30% 7401 50% 7401 90% 7401 99% 7401 ms Write: 10% 8939 30% 8939 50% 8939 90% 8939 99% 8939 ms Write: 10% 2828 30% 2828 50% 2828 90% 2828 99% 2828 ms Write: 10% 16100 30% 16100 50% 16100 90% 16100 99% 16100 ms Write: 10% 7584 30% 7584 50% 7584 90% 7584 99% 7584 ms Write: 10% 3010 30% 3010 50% 3010 90% 3010 99% 3010 ms Write: 10% 5126 30% 5126 50% 5126 90% 5126 99% 5126 ms Write: 10% 11347 30% 11347 50% 11347 90% 11347 99% 11347 ms Write: 10% 4513 30% 4513 50% 4513 90% 4513 99% 4513 ms Write: 10% 9883 30% 9883 50% 9883 90% 9883 99% 9883 ms Write: 10% 4627 30% 4627 50% 4627 90% 4627 99% 4627 ms Write: 10% 4515 30% 4515 50% 4515 90% 4515 99% 4515 ms Write: 10% 3253 30% 3253 50% 3253 90% 3253 99% 3253 ms Write: 10% 7117 30% 7117 50% 7117 90% 7117 99% 7117 ms Write: 10% 3388 30% 3388 50% 3388 90% 3388 99% 3388 ms Write: 10% 6123 30% 6123 50% 6123 90% 6123 99% 6123 ms Write: 10% 6744 30% 6744 50% 6744 90% 6744 99% 6744 ms Write: 10% 5103 30% 5103 50% 5103 90% 5103 99% 5103 ms Write: 10% 14362 30% 14362 50% 14362 90% 14362 99% 14362 ms Write: 10% 5150 30% 5150 50% 5150 90% 5150 99% 5150 ms Write: 10% 3653 30% 3653 50% 3653 90% 3653 99% 3653 ms Update: 10% 658 30% 658 50% 658 90% 658 99% 658 ms Step 4. read modify write Write: 10% 4804 30% 4804 50% 4804 90% 4804 99% 4804 ms Write: 10% 14130 30% 14130 50% 14130 90% 14130 99% 14130 ms Write: 10% 15258 30% 15258 50% 15258 90% 15258 99% 15258 ms Write: 10% 17642 30% 17642 50% 17642 90% 17642 99% 17642 ms Write: 10% 17603 30% 17603 50% 17603 90% 17603 99% 17603 ms Write: 10% 15988 30% 15988 50% 15988 90% 15988 99% 15988 ms Write: 10% 17672 30% 17672 50% 17672 90% 17672 99% 17672 ms Write: 10% 14809 30% 14809 50% 14809 90% 14809 99% 14809 ms Write: 10% 15245 30% 15245 50% 15245 90% 15245 99% 15245 ms Write: 10% 13617 30% 13617 50% 13617 90% 13617 99% 13617 ms Write: 10% 17329 30% 17329 50% 17329 90% 17329 99% 17329 ms Write: 10% 12353 30% 12353 50% 12353 90% 12353 99% 12353 ms Write: 10% 12431 30% 12431 50% 12431 90% 12431 99% 12431 ms Write: 10% 18018 30% 18018 50% 18018 90% 18018 99% 18018 ms Write: 10% 6877 30% 6877 50% 6877 90% 6877 99% 6877 ms Write: 10% 12387 30% 12387 50% 12387 90% 12387 99% 12387 ms Write: 10% 17701 30% 17701 50% 17701 90% 17701 99% 17701 ms Write: 10% 12119 30% 12119 50% 12119 90% 12119 99% 12119 ms Write: 10% 11673 30% 11673 50% 11673 90% 11673 99% 11673 ms Write: 10% 16132 30% 16132 50% 16132 90% 16132 99% 16132 ms Write: 10% 11943 30% 11943 50% 11943 90% 11943 99% 11943 ms Write: 10% 10876 30% 10876 50% 10876 90% 10876 99% 10876 ms Write: 10% 19115 30% 19115 50% 19115 90% 19115 99% 19115 ms Write: 10% 16762 30% 16762 50% 16762 90% 16762 99% 16762 ms Write: 10% 14311 30% 14311 50% 14311 90% 14311 99% 14311 ms Write: 10% 7701 30% 7701 50% 7701 90% 7701 99% 7701 ms Write: 10% 13927 30% 13927 50% 13927 90% 13927 99% 13927 ms Write: 10% 13337 30% 13337 50% 13337 90% 13337 99% 13337 ms Write: 10% 6266 30% 6266 50% 6266 90% 6266 99% 6266 ms Write: 10% 13124 30% 13124 50% 13124 90% 13124 99% 13124 ms Write: 10% 13086 30% 13086 50% 13086 90% 13086 99% 13086 ms Write: 10% 6463 30% 6463 50% 6463 90% 6463 99% 6463 ms Write: 10% 6191 30% 6191 50% 6191 90% 6191 99% 6191 ms Write: 10% 18419 30% 18419 50% 18419 90% 18419 99% 18419 ms Write: 10% 12211 30% 12211 50% 12211 90% 12211 99% 12211 ms Write: 10% 4634 30% 4634 50% 4634 90% 4634 99% 4634 ms Write: 10% 4307 30% 4307 50% 4307 90% 4307 99% 4307 ms Write: 10% 3862 30% 3862 50% 3862 90% 3862 99% 3862 ms Write: 10% 10550 30% 10550 50% 10550 90% 10550 99% 10550 ms Write: 10% 13890 30% 13890 50% 13890 90% 13890 99% 13890 ms Write: 10% 9512 30% 9512 50% 9512 90% 9512 99% 9512 ms Write: 10% 3560 30% 3560 50% 3560 90% 3560 99% 3560 ms Write: 10% 6751 30% 6751 50% 6751 90% 6751 99% 6751 ms Write: 10% 8336 30% 8336 50% 8336 90% 8336 99% 8336 ms Write: 10% 6593 30% 6593 50% 6593 90% 6593 99% 6593 ms Write: 10% 12912 30% 12912 50% 12912 90% 12912 99% 12912 ms Write: 10% 6333 30% 6333 50% 6333 90% 6333 99% 6333 ms Write: 10% 7792 30% 7792 50% 7792 90% 7792 99% 7792 ms Write: 10% 4227 30% 4227 50% 4227 90% 4227 99% 4227 ms Write: 10% 4683 30% 4683 50% 4683 90% 4683 99% 4683 ms Write: 10% 5604 30% 5604 50% 5604 90% 5604 99% 5604 ms Write: 10% 5878 30% 5878 50% 5878 90% 5878 99% 5878 ms Write: 10% 3536 30% 3536 50% 3536 90% 3536 99% 3536 ms Write: 10% 3932 30% 3932 50% 3932 90% 3932 99% 3932 ms Write: 10% 4043 30% 4043 50% 4043 90% 4043 99% 4043 ms Write: 10% 4819 30% 4819 50% 4819 90% 4819 99% 4819 ms Write: 10% 5712 30% 5712 50% 5712 90% 5712 99% 5712 ms Write: 10% 14212 30% 14212 50% 14212 90% 14212 99% 14212 ms Write: 10% 3692 30% 3692 50% 3692 90% 3692 99% 3692 ms Write: 10% 3592 30% 3592 50% 3592 90% 3592 99% 3592 ms Write: 10% 6937 30% 6937 50% 6937 90% 6937 99% 6937 ms Write: 10% 5173 30% 5173 50% 5173 90% 5173 99% 5173 ms Write: 10% 9117 30% 9117 50% 9117 90% 9117 99% 9117 ms Write: 10% 4189 30% 4189 50% 4189 90% 4189 99% 4189 ms Read: 10% 7994 30% 11388 50% 14783 90% 21572 99% 23099 ms Update: 10% 1121 30% 1121 50% 1121 90% 1121 99% 1121 ms |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> YdbSdkSessionsPool1Session::GetSession/0 >> test_ttl.py::TestTTLDefaultEnv::test_case >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [FAIL] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> YdbSdkSessionsPool1Session::CustomPlan/0 >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> YdbSdkSessionsPool::PeriodicTask/0 |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> listing_batching.py::TestListingBatching::test_listing_batching_solomon [GOOD] >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_ttl.py::TestTTLAlterSettings::test_case |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [FAIL] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> YdbSdkSessionsPool::PeriodicTask/0 [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_select.py::TestSelect::test_as_table >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] |98.6%| [TA] $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.6%| [TA] {RESULT} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] >> test_select.py::TestSelect::test_as_table [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> overlapping_portions.py::TestOverlappingPortions::test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_as_table [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.7%| [TA] $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |98.7%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_canonical_requests.py::Test::test_essential |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> Login::TokenIsNotExpired >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |98.7%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> Login::TokenIsNotExpired [GOOD] >> BulkUpsert::BulkUpsert |98.7%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> DataShardDiskQuotas::DiskQuotaExceeded |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> GenericProviderLookupActor::Lookup >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupMultiMatches >> GenericProviderLookupActor::LookupMultiMatches [GOOD] >> GenericProviderLookupActor::LookupWithErrors [GOOD] |98.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2026-01-07 18:06:49.401 INFO ydb-library-yql-providers-generic-actors-ut(pid=736200, tid=0x00007F81162B6FC0) [generic] yql_generic_lookup_actor.cpp:153: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2026-01-07 18:06:49.453 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=736200, tid=0x00007F81162B6FC0) [generic] yql_generic_lookup_actor.cpp:301: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } ... nal_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2026-01-07 18:06:49.663 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=736200, tid=0x00007B81113C8640) [generic] yql_generic_lookup_actor.cpp:332: ActorId=[3:7592682738222886151:2051] Got TListSplitsStreamIterator 2026-01-07 18:06:49.663 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=736200, tid=0x00007B81113C8640) [generic] yql_generic_lookup_actor.cpp:200: ActorId=[3:7592682738222886151:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY GENERIC-CONNECTOR-MOCK Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2026-01-07 18:06:49.663 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=736200, tid=0x00007B81113C8640) [generic] yql_generic_lookup_actor.cpp:233: ActorId=[3:7592682738222886151:2051] Got ReadSplitsStreamIterator from Connector 2026-01-07 18:06:49.663 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=736200, tid=0x00007B81113C8640) [generic] yql_generic_lookup_actor.cpp:354: ActorId=[3:7592682738222886151:2051] Got DataChunk 2026-01-07 18:06:49.664 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=736200, tid=0x00007B81113C8640) [generic] yql_generic_lookup_actor.cpp:365: ActorId=[3:7592682738222886151:2051] Got EOF 2026-01-07 18:06:49.664 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=736200, tid=0x00007B81113C8640) [generic] yql_generic_lookup_actor.cpp:419: Sending lookup results for 3 keys |98.7%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest |98.7%| [TS] {BAZEL_UPLOAD} ydb/library/yql/providers/generic/actors/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/auth/gtest >> Login::TokenIsNotExpired [GOOD] |98.7%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/auth/gtest |98.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/auth/gtest >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/oom/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> QueryActorTest::SimpleQuery |98.7%| [TA] {RESULT} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] >> DataShardDiskQuotas::DiskQuotaExceeded [GOOD] >> DataShardDiskQuotas::ShardRestartOnCreateTable >> test_http_api.py::TestHttpApi::test_simple_analytics_query >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback >> KqpTpch::Query01 >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit >> TGRpcRateLimiterTest::CreateResource >> DataShardDiskQuotas::ShardRestartOnCreateTable [GOOD] >> DataShardDiskQuotas::ShardRestartOnSplitDst >> BasicExample::BasicExample >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> BasicExample::BasicExample [GOOD] >> test_cte.py::TestCte::test_toplevel >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |98.7%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest |98.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/basic_example/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_disk_quotas/unittest >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:06:53.499842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:06:53.840230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:06:53.894432Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:06:53.895079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:06:53.895146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:06:54.614041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:06:54.614169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:06:54.708697Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809209381158 != 1767809209381162 2026-01-07T18:06:54.726486Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:06:54.776824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:06:54.944096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) ... Setting hard disk quota to 1 byte 2026-01-07T18:06:55.408518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:833:2727], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:06:55.408603Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:06:55.408642Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-07T18:06:55.408726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:830:2725], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T18:06:55.408758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-07T18:06:55.577467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } DatabaseQuotas { data_size_hard_quota: 1 } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T18:06:55.577712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:06:55.581455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T18:06:55.581580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T18:06:55.586023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T18:06:55.586251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:06:55.586421Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T18:06:55.587136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T18:06:55.587324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T18:06:55.587385Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T18:06:55.587430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-07T18:06:55.587642Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T18:06:55.587694Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T18:06:55.587780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:06:55.587843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T18:06:55.587893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-07T18:06:55.587948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-07T18:06:55.588064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T18:06:55.588602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:833:2727], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T18:06:55.588649Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T18:06:55.588686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046644480 2026-01-07T18:06:55.588887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T18:06:55.589158Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-07T18:06:55.589340Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T18:06:55.589373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T18:06:55.589446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:06:55.589494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T18:06:55.589542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-07T18:06:55.589636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T18:06:55.590075Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T18:06:55.590115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-07T18:06:55.590208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-07T18:06:55.590236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-07T18:06:55.590283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:06:55.590318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:06:55.590367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T18:06:55.590415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-07T18:06:55.590469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-07T18:06:55.594706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T18:06:55.595329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-07T18:06:55.595396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-07T18:06:55.595545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T18:06:55.595827Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 281474976715657 2026-01-07T18:06:55.610776Z node 1 :FLAT_TX_SC ... .h:3153: StateWork, received event# 268829696, Sender [3:915:2777], Recipient [3:923:2783]: NKikimr::TEvTablet::TEvTabletDead 2026-01-07T18:07:26.357785Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T18:07:26.357959Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T18:07:26.360260Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [3:399:2398], Recipient [3:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-07T18:07:26.360310Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-07T18:07:26.360385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-07T18:07:26.360439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-07T18:07:26.360550Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2026-01-07T18:07:26.360971Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [3:921:2781], Recipient [3:399:2398]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037888 ClientId: [3:921:2781] ServerId: [3:926:2784] } 2026-01-07T18:07:26.361025Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-07T18:07:26.361068Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2026-01-07T18:07:26.362382Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [3:1243:3044], Recipient [3:399:2398]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-07T18:07:26.362423Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-07T18:07:26.362449Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046644480 2026-01-07T18:07:26.362670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-07T18:07:26.362749Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-07T18:07:26.363169Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [3:1535:3269], Recipient [3:399:2398]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [3:1535:3269] ServerId: [3:1536:3270] } 2026-01-07T18:07:26.363205Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-07T18:07:26.363232Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset 2026-01-07T18:07:26.363367Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-07T18:07:26.363461Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-07T18:07:26.689430Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T18:07:26.689528Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-07T18:07:26.689630Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:399:2398], Recipient [3:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-07T18:07:26.689664Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... Inserting the 4th row 2026-01-07T18:07:26.998104Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [3:1567:3284], Recipient [3:1390:3156]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-07T18:07:26.998192Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037890 2026-01-07T18:07:26.998373Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [3:1390:3156], Recipient [3:1390:3156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T18:07:26.998413Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T18:07:26.998504Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037890 2026-01-07T18:07:26.998669Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037890, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-07T18:07:26.998788Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table2, shard: 72075186224037890, write point (Uint32 : 4) 2026-01-07T18:07:26.998855Z node 3 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:39:1] 2026-01-07T18:07:26.998970Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CheckWrite 2026-01-07T18:07:26.999040Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-07T18:07:26.999088Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CheckWrite 2026-01-07T18:07:26.999129Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-07T18:07:26.999170Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-07T18:07:26.999209Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976715659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T18:07:26.999286Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037890 2026-01-07T18:07:26.999356Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-07T18:07:26.999395Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-07T18:07:26.999418Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BlockFailPoint 2026-01-07T18:07:26.999444Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BlockFailPoint 2026-01-07T18:07:26.999470Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-07T18:07:26.999490Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BlockFailPoint 2026-01-07T18:07:26.999512Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit ExecuteWrite 2026-01-07T18:07:26.999535Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit ExecuteWrite 2026-01-07T18:07:26.999572Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037890 2026-01-07T18:07:26.999629Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976715659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-07T18:07:26.999796Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037890, row count=1 2026-01-07T18:07:26.999860Z node 3 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T18:07:26.999938Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is ExecutedNoMoreRestarts 2026-01-07T18:07:26.999998Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit ExecuteWrite 2026-01-07T18:07:27.000047Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit FinishProposeWrite 2026-01-07T18:07:27.000087Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit FinishProposeWrite 2026-01-07T18:07:27.000198Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is DelayCompleteNoMoreRestarts 2026-01-07T18:07:27.000247Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit FinishProposeWrite 2026-01-07T18:07:27.000294Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit CompletedOperations 2026-01-07T18:07:27.000331Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CompletedOperations 2026-01-07T18:07:27.000380Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-07T18:07:27.000406Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CompletedOperations 2026-01-07T18:07:27.000435Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037890 has finished 2026-01-07T18:07:27.017687Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037890 2026-01-07T18:07:27.017782Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037890 on unit FinishProposeWrite 2026-01-07T18:07:27.017881Z node 3 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037890 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T18:07:27.017998Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_disk_quotas/unittest |98.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_disk_quotas/unittest >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> ServerRestartTest::RestartOnGetSession >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null [GOOD] >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_tests/py3test |98.8%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_ctas.py::TestYtCtas::test_simple_ctast >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> QueryActorTest::StreamQuery [GOOD] |98.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |98.8%| [TS] {RESULT} ydb/tests/library/ut/py3test |98.8%| [TS] {BAZEL_UPLOAD} ydb/tests/library/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2026-01-07T18:07:01.332228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592682792343299372:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:07:01.347936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:07:01.986235Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T18:07:02.313691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T18:07:02.345239Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:07:02.397724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:07:02.397820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:07:02.432410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:07:02.620215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:07:02.672285Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:07:02.746472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T18:07:03.133076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:07:03.145187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T18:07:03.239916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:07:03.504008Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7592682800933234717:2538], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-07T18:07:06.328889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592682792343299372:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:07:06.329004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:07:06.507136Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T18:07:06.518032Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976264483.033615s seconds to be completed 2026-01-07T18:07:06.536637Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=1&id=NGNhYzA5MjAtYTE2YTE2MjktMTg4ZTgxNjgtMjY5NzFkNGM=, workerId: [1:7592682813818136665:2309], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-07T18:07:06.536842Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-07T18:07:06.537074Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T18:07:06.537099Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T18:07:06.537134Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T18:07:06.549571Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7592682800933234717:2538], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=NGNhYzA5MjAtYTE2YTE2MjktMTg4ZTgxNjgtMjY5NzFkNGM=, TxId: , text: SELECT 42 2026-01-07T18:07:06.568758Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=NGNhYzA5MjAtYTE2YTE2MjktMTg4ZTgxNjgtMjY5NzFkNGM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7592682813818136665:2309] 2026-01-07T18:07:06.568833Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7592682813818136667:2553] 2026-01-07T18:07:08.477172Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [1:7592682813818136666:2310], selfId: [1:7592682792343299518:2266], source: [1:7592682813818136665:2309] 2026-01-07T18:07:08.478368Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7592682800933234717:2538], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NGNhYzA5MjAtYTE2YTE2MjktMTg4ZTgxNjgtMjY5NzFkNGM=, TxId: 2026-01-07T18:07:08.485683Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7592682800933234717:2538], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NGNhYzA5MjAtYTE2YTE2MjktMTg4ZTgxNjgtMjY5NzFkNGM=, TxId: 2026-01-07T18:07:08.496751Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=NGNhYzA5MjAtYTE2YTE2MjktMTg4ZTgxNjgtMjY5NzFkNGM=, workerId: [1:7592682813818136665:2309], local sessions count: 0 2026-01-07T18:07:09.302042Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592682826742607150:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:07:09.313817Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:07:09.397068Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-07T18:07:09.505893Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:07:09.531015Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:07:09.531097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:07:09.543539Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:07:09.671488Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T18:07:09.757653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:07:09.768594Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T18:07:09.772014Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:07:09.866147Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7592682826742607964:2535], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-07T18:07:10.295296Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:07:12.928216Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T18:07:12.932567Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976264476.619069s seconds to be completed 2026-01-07T18:07:12.934430Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=2&id=Y2ZiMTAyNDAtNjI3MDIxZWUtYmQzZTI3ZjUtOGY3ZDdiZGI=, workerId: [2:7592682839627509884:2305], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-07T18:07:12.934543Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-07T18:07:12.935083Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T18:07:12.935095Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T18:07:12.935107Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T18:07:12.936415Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7592682826742607964:2535], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Y2ZiMTAyNDAtNjI3MDIxZWUtYmQzZTI3ZjUtOGY3ZDdiZGI=, TxId: , text: DECLARE $k As Uint64; DECLARE $v As String; UPSERT INTO TestTable (Key, Value) VALUES ($k, $v) 2026-01-07T18:07:12.939822Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=2&id=Y2ZiMTAyNDAtNjI3MDIxZWUtYmQzZTI3ZjUtOGY3ZDdiZGI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7592682839627509884:2305] 2026-01-07T18:07:12.939864Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7592682839627509912:2548] 2026-01-07T18:07:13.358738Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:7592682839627509891:2309], selfId: [2:7592682826742607350:2264], source: [2:7592682839627509884:2305] 2026-01-07T18:07:13.359173Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7592682826742607964:2535], DataQuery #1 finished S ... 25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:07:20.370431Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:07:20.434281Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:07:20.629031Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-07T18:07:20.794504Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:07:20.804276Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:07:20.861528Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682871389833597:2538], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-07T18:07:21.113112Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:07:24.803053Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T18:07:24.806501Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976264464.745139s seconds to be completed 2026-01-07T18:07:24.808799Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=YmYzZDJmNzAtY2MyZDgyNGMtMTMyMzg1NmUtOTcyNGQ2ZWE=, workerId: [4:7592682888569702831:2309], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-07T18:07:24.810064Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-07T18:07:24.810165Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-07T18:07:24.810188Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-07T18:07:24.810218Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-07T18:07:24.810975Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682871389833597:2538], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2026-01-07T18:07:24.813472Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682871389833597:2538], Start read next stream part 2026-01-07T18:07:24.821220Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kect8qmd7ktj2026htcdhmq2", Created new session, sessionId: ydb://session/3?node_id=4&id=MzQ3Yzk5MGItMzFmYjQxOGYtNmViZjBkMmQtYzE5OGY1NDg=, workerId: [4:7592682888569702845:2310], database: /dc-1, longSession: 0, local sessions count: 2 2026-01-07T18:07:24.821658Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kect8qmd7ktj2026htcdhmq2, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=MzQ3Yzk5MGItMzFmYjQxOGYtNmViZjBkMmQtYzE5OGY1NDg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7592682888569702845:2310] 2026-01-07T18:07:24.821696Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7592682888569702846:2554] 2026-01-07T18:07:24.823217Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592682888569702855:2314], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:07:24.823275Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592682888569702847:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:07:24.823402Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:07:24.826539Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7592682888569702862:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:07:24.826604Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:07:24.852108Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:07:24.871356Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7592682888569702861:2315], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T18:07:24.950956Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7592682888569702914:2590] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:07:33.619478Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682871389833597:2538], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T18:07:33.623712Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:333: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682871389833597:2538], Cancel stream request 2026-01-07T18:07:33.623818Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682871389833597:2538], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=YmYzZDJmNzAtY2MyZDgyNGMtMTMyMzg1NmUtOTcyNGQ2ZWE=, TxId: 2026-01-07T18:07:33.624466Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=YmYzZDJmNzAtY2MyZDgyNGMtMTMyMzg1NmUtOTcyNGQ2ZWE=, workerId: [4:7592682888569702831:2309], local sessions count: 1 2026-01-07T18:07:33.629042Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682927224408659:2634], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-07T18:07:33.629386Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976264455.922245s seconds to be completed 2026-01-07T18:07:33.631775Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=NjUwZmFhNGYtNzY3NDZlNS0xMzI5NGRkMy1lOWJhNjRlMQ==, workerId: [4:7592682927224408661:2342], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-07T18:07:33.631973Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-07T18:07:33.632845Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682927224408659:2634], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2026-01-07T18:07:33.632993Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682927224408659:2634], Start read next stream part 2026-01-07T18:07:33.638320Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kect90805ehgqhqcgvnkvar5", Created new session, sessionId: ydb://session/3?node_id=4&id=ZDk4NGVhN2MtMjNhNmQ4OTItZGUxOGRjMS0zMmViMDZjMw==, workerId: [4:7592682927224408664:2343], database: /dc-1, longSession: 0, local sessions count: 3 2026-01-07T18:07:33.638600Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kect90805ehgqhqcgvnkvar5, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=ZDk4NGVhN2MtMjNhNmQ4OTItZGUxOGRjMS0zMmViMDZjMw==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7592682927224408664:2343] 2026-01-07T18:07:33.638630Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7592682927224408665:2636] 2026-01-07T18:07:33.791944Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767809253785, txId: 281474976710663] shutting down 2026-01-07T18:07:33.799249Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kect90805ehgqhqcgvnkvar5", Forwarded response to sender actor, requestId: 5, sender: [4:7592682927224408662:2634], selfId: [4:7592682871389832788:2090], source: [4:7592682927224408664:2343] 2026-01-07T18:07:33.799790Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=ZDk4NGVhN2MtMjNhNmQ4OTItZGUxOGRjMS0zMmViMDZjMw==, workerId: [4:7592682927224408664:2343], local sessions count: 2 2026-01-07T18:07:33.802084Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682927224408659:2634], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-07T18:07:33.827466Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682927224408659:2634], Start read next stream part 2026-01-07T18:07:33.827577Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682927224408659:2634], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-07T18:07:33.827681Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7592682927224408659:2634], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NjUwZmFhNGYtNzY3NDZlNS0xMzI5NGRkMy1lOWJhNjRlMQ==, TxId: 2026-01-07T18:07:33.828616Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=NjUwZmFhNGYtNzY3NDZlNS0xMzI5NGRkMy1lOWJhNjRlMQ==, workerId: [4:7592682927224408661:2342], local sessions count: 1 |98.8%| [TM] {RESULT} ydb/library/query_actor/ut/unittest >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources |98.8%| [TM] {BAZEL_UPLOAD} ydb/library/query_actor/ut/unittest >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> ServerRestartTest::RestartOnGetSession [GOOD] >> test_cte.py::TestCte::test_toplevel [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/plan2svg/py3test >> test_cte.py::TestCte::test_toplevel [GOOD] |98.8%| [TM] {RESULT} ydb/tests/functional/kqp/plan2svg/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/plan2svg/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/add_column/py3test >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/add_column/py3test |98.8%| [TM] {RESULT} ydb/tests/datashard/add_column/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> DataShardCompaction::CompactBorrowed |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |98.8%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> SdkCredProvider::PingFromProviderSyncDiscovery >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::ReconnectsWithKesusAfterSeveralRetries [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession >> RangeOps::Intersection >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> RangeOps::Intersection [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_range_ops/unittest >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> test_query_cache.py::TestQueryCache::test >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> TMemoryController::Counters |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] |98.8%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |98.8%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |98.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |98.8%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] [GOOD] >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_query_cache.py::TestQueryCache::test [GOOD] >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> MediatorTest::BasicTimecastUpdates >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] >> TFetchRequestTests::HappyWay >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> test_canonical_requests.py::Test::test_essential [FAIL] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> TSentinelUnstableTests::BSControllerCantChangeStatus >> CoordinatorTests::Route >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> CoordinatorTests::WaitNodesConnected >> LongTxService::BasicTransactions >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> MediatorTest::BasicTimecastUpdates [GOOD] >> CoordinatorTests::WaitNodesConnected [GOOD] >> CoordinatorTests::ProcessMappingWithNodeIds >> CoordinatorTests::ProcessMappingWithNodeIds [GOOD] >> CoordinatorTests::RebalanceAfterNewNodeConnected >> CoordinatorTests::RebalanceAfterNewNodeConnected [GOOD] >> CoordinatorTests::RebalanceAfterNodeDisconnected >> CoordinatorTests::RebalanceAfterNodeDisconnected [GOOD] >> MediatorTest::MultipleTablets >> LeaderElectionTests::Test1 >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode [GOOD] >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot >> TopicSessionTests::TwoSessionsWithoutOffsets >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> MediatorTimeCast::ReadStepSubscribe >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> MediatorTest::MultipleTablets [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> MediatorTest::TabletAckBeforePlanComplete |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/query_cache/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.8%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> LongTxService::LockSubscribe [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2026-01-07T18:08:17.765305Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.005851s 2026-01-07T18:08:18.651526Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T18:08:18.652048Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmpnyWmNn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T18:08:18.652595Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmpnyWmNn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmpnyWmNn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9729163428188811998 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T18:08:18.831818Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 1] Received TEvBeginTx from [1:441:2331] 2026-01-07T18:08:18.832875Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:123: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.876342Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:265: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:442:2101] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.876467Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:91:2048] 2026-01-07T18:08:18.876663Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:265: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:155:2090] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.876938Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 2] Received TEvCommitTx from [2:442:2101] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.877106Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 1] Received TEvCommitTx from [2:155:2090] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.877161Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:162: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 without side-effects 2026-01-07T18:08:18.877483Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 2] Received TEvRollbackTx from [2:442:2101] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.877688Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 1] Received TEvRollbackTx from [2:155:2090] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.885727Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 2] Received TEvRollbackTx from [2:442:2101] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.885937Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 1] Received TEvRollbackTx from [2:155:2090] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=1 2026-01-07T18:08:18.893577Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-07T18:08:18.893720Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-07T18:08:18.894062Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2026-01-07T18:08:18.894385Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:91:2048] 2026-01-07T18:08:18.894694Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:153:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-07T18:08:18.894917Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:64:2076] ServerId# [1:363:2280] TabletId# 72057594037932033 PipeClientId# [2:64:2076] 2026-01-07T18:08:18.901852Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 2] Received TEvCommitTx from [2:442:2101] LongTxId# ydb://long-tx/000000001jccshgjb117edncpb?node_id=3 2026-01-07T18:08:18.902093Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:480:2103] 2026-01-07T18:08:19.833469Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T18:08:19.833539Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:20.342188Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:21.215258Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T18:08:21.215768Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmpctsNII/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T18:08:21.215974Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmpctsNII/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmpctsNII/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8166069082043843465 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T18:08:21.800036Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:346: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:625:2497] for database /dc-1 2026-01-07T18:08:21.800118Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-07T18:08:21.812194Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-07T18:08:21.812397Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:821:2678] Sending navigate request for /dc-1 2026-01-07T18:08:21.822740Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:821:2678] Received navigate response status Ok 2026-01-07T18:08:21.822818Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:821:2678] Sending acquire step to coordinator 72057594046316545 2026-01-07T18:08:21.823367Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:821:2678] Received read step 1500 2026-01-07T18:08:21.823486Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2026-01-07T18:08:21.823698Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 3] Received TEvBeginTx from [3:625:2497] 2026-01-07T18:08:21.823745Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-07T18:08:21.834604Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-07T18:08:21.834802Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:834:2685] Sending navigate request for /dc-1 2026-01-07T18:08:21.835060Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:834:2685] Received navigate response status Ok 2026-01-07T18:08:21.835112Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:834:2685] Sending acquire step to coordinator 72057594046316545 2026-01-07T18:08:21.835327Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:834:2685] Received read step 1500 2026-01-07T18:08:21.835404Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2026-01-07T18:08:21.835453Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:425: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2026-01-07T18:08:21.835647Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 3] Received TEvBeginTx from [3:625:2497] 2026-01-07T18:08:21.835689Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-07T18:08:21.846044Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-07T18:08:21.846266Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:836:2687] Sending navigate request for /dc-1 2026-01-07T18:08:21.846527Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:836:2687] Received navigate response status Ok 2026-01-07T18:08:21.846575Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:836:2687] Sending acquire step to coordinator 72057594046316545 2026-01-07T18:08:21.846753Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:836:2687] Received read step 1500 2026-01-07T18:08:21.846841Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2026-01-07T18:08:21.846898Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:423: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001ed7nz524j4ysq869y4?node_id=3&snapshot=1500%3Amax 2026-01-07T18:08:22.903341Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-07T18:08:22.903815Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmp4uex6A/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-07T18:08:22.904015Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmp4uex6A/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ec9d/0021e0/r3tmp/tmp4uex6A/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2871321067556019725 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-07T18:08:22.964572Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:468: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2026-01-07T18:08:22.964683Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [5:438:2328] for LockId# 987 LockNode# 5 2026-01-07T18:08:22.980323Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:439:2101] for LockId# 987 LockNode# 5 2026-01-07T18:08:22.980453Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:102:2048] 2026-01-07T18:08:22.982520Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [6:154:2090] for LockId# 987 LockNode# 5 2026-01-07T18:08:22.983947Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2026-01-07T18:08:22.984178Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [5:438:2328] for LockId# 123 LockNode# 5 2026-01-07T18:08:22.984334Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:439:2101] for LockId# 123 LockNode# 5 2026-01-07T18:08:22.986533Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [6:154:2090] for LockId# 123 LockNode# 5 2026-01-07T18:08:22.986773Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2026-01-07T18:08:22.986939Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:479: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2026-01-07T18:08:22.987101Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2026-01-07T18:08:22.987277Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:439:2101] for LockId# 234 LockNode# 5 2026-01-07T18:08:22.987526Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 6 2026-01-07T18:08:22.987654Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-07T18:08:22.988284Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-07T18:08:22.988625Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:102:2048] 2026-01-07T18:08:22.988752Z node 6 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [6:152:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-07T18:08:22.988998Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:75:2076] ServerId# [5:360:2277] TabletId# 72057594037932033 PipeClientId# [6:75:2076] 2026-01-07T18:08:23.226538Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:470:2048] 2026-01-07T18:08:23.226808Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-07T18:08:23.226860Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-07T18:08:23.227357Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:470:2048] 2026-01-07T18:08:23.227545Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:471:2102] ServerId# [5:475:2348] TabletId# 72057594037932033 PipeClientId# [6:471:2102] 2026-01-07T18:08:23.485626Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:489:2048] 2026-01-07T18:08:23.486014Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-07T18:08:23.486088Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-07T18:08:23.486885Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:490:2103] ServerId# [5:494:2358] TabletId# 72057594037932033 PipeClientId# [6:490:2103] 2026-01-07T18:08:23.487180Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:489:2048] 2026-01-07T18:08:23.785309Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:511:2048] 2026-01-07T18:08:23.785595Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-07T18:08:23.785654Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-07T18:08:23.785883Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:511:2048] 2026-01-07T18:08:23.786261Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:512:2105] ServerId# [5:516:2371] TabletId# 72057594037932033 PipeClientId# [6:512:2105] |98.8%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 |98.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/long_tx_service/ut/unittest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> test_canonical_requests.py::Test::test_pdisk_set_status_inactive >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache >> MediatorTest::TabletAckBeforePlanComplete [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource >> MediatorTest::TabletAckWhenDead >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> test.py::test_local [GOOD] >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::CDC >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:07:52.253044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:07:52.574070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:07:52.653033Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:07:52.653694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:07:52.653764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:07:53.567089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:07:53.567230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:07:53.727458Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809268749104 != 1767809268749108 2026-01-07T18:07:53.763658Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:07:53.834011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:07:53.992808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:07:54.364518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:07:54.379175Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:07:54.524334Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T18:07:54.524431Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T18:07:54.525995Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T18:07:54.749915Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T18:07:54.750046Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T18:07:54.755399Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T18:07:54.755602Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T18:07:54.756088Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T18:07:54.756284Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T18:07:54.756513Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T18:07:54.756848Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T18:07:54.765230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:07:54.767536Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T18:07:54.767629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T18:07:54.869582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T18:07:54.870820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T18:07:54.872837Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T18:07:54.873343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:07:54.944705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T18:07:54.945593Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:07:54.945740Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:07:54.951466Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T18:07:54.951597Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T18:07:54.951699Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T18:07:54.955499Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:07:54.955738Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:07:54.955863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T18:07:54.967195Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:07:55.027690Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T18:07:55.029037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:07:55.029236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T18:07:55.029293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:07:55.029334Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T18:07:55.029377Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:07:55.029624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:07:55.029674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:07:55.031349Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T18:07:55.031484Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T18:07:55.031603Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:07:55.031648Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:07:55.031800Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T18:07:55.031859Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T18:07:55.031894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T18:07:55.031930Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T18:07:55.031977Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:07:55.032148Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:07:55.032193Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:07:55.032238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T18:07:55.032665Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T18:07:55.032745Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T18:07:55.032870Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T18:07:55.033324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T18:07:55.033403Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T18:07:55.034086Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... 1474976715661] at 72075186224037892 is DelayComplete 2026-01-07T18:08:30.298527Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2026-01-07T18:08:30.298559Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2026-01-07T18:08:30.298588Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037892 on unit CompletedOperations 2026-01-07T18:08:30.298624Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037892 is Executed 2026-01-07T18:08:30.298648Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2026-01-07T18:08:30.298676Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [72500:281474976715661] at 72075186224037892 has finished 2026-01-07T18:08:30.298707Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:08:30.298738Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037892 2026-01-07T18:08:30.298769Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037892 has no attached operations 2026-01-07T18:08:30.298798Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037892 2026-01-07T18:08:30.309755Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-07T18:08:30.309839Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-07T18:08:30.309883Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [72500:281474976715661] at 72075186224037892 on unit CompleteOperation 2026-01-07T18:08:30.309968Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [72500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1649:3442], exec latency: 0 ms, propose latency: 1 ms 2026-01-07T18:08:30.310018Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-07T18:08:30.310196Z node 2 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [2:1649:3442] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2026-01-07T18:08:30.310247Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2026-01-07T18:08:30.310410Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:1649:3442], Recipient [2:983:2833]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2026-01-07T18:08:30.310454Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-07T18:08:30.310543Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:983:2833], Recipient [2:983:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:08:30.310577Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:08:30.310668Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T18:08:30.310709Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-07T18:08:30.310753Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [72500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2026-01-07T18:08:30.310789Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2026-01-07T18:08:30.310828Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [72500:281474976715661] at 72075186224037890 2026-01-07T18:08:30.310869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-07T18:08:30.310907Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2026-01-07T18:08:30.310938Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2026-01-07T18:08:30.310972Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit ReadTableScan 2026-01-07T18:08:30.311224Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Continue 2026-01-07T18:08:30.311261Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T18:08:30.311292Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2026-01-07T18:08:30.311327Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-07T18:08:30.311364Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-07T18:08:30.311879Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1681:3471], Recipient [2:983:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-07T18:08:30.311930Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-07T18:08:30.312156Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2026-01-07T18:08:30.312287Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2026-01-07T18:08:30.314242Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-07T18:08:30.314293Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037890 2026-01-07T18:08:30.314527Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:983:2833], Recipient [2:983:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:08:30.314566Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:08:30.314631Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-07T18:08:30.314670Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-07T18:08:30.314709Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [72500:281474976715661] at 72075186224037890 for ReadTableScan 2026-01-07T18:08:30.314740Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit ReadTableScan 2026-01-07T18:08:30.314776Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [72500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2026-01-07T18:08:30.314821Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-07T18:08:30.314856Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2026-01-07T18:08:30.314887Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2026-01-07T18:08:30.314916Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit CompleteOperation 2026-01-07T18:08:30.315138Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is DelayComplete 2026-01-07T18:08:30.315176Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2026-01-07T18:08:30.315206Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2026-01-07T18:08:30.315237Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit CompletedOperations 2026-01-07T18:08:30.315268Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-07T18:08:30.315292Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2026-01-07T18:08:30.315321Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [72500:281474976715661] at 72075186224037890 has finished 2026-01-07T18:08:30.315353Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:08:30.315380Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-07T18:08:30.315411Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-07T18:08:30.315441Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-07T18:08:30.328757Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T18:08:30.328836Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-07T18:08:30.328881Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [72500:281474976715661] at 72075186224037890 on unit CompleteOperation 2026-01-07T18:08:30.328971Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [72500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1649:3442], exec latency: 1 ms, propose latency: 1 ms 2026-01-07T18:08:30.329033Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-07T18:08:30.329248Z node 2 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [2:1649:3442] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2026-01-07T18:08:30.329342Z node 2 :TX_PROXY INFO: datareq.cpp:834: Actor# [2:1649:3442] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_compaction/unittest |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serializable/py3test |98.8%| [TM] {RESULT} ydb/tests/functional/serializable/py3test >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 >> TopicSessionTests::TwoSessionWithoutPredicate >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota >> MediatorTest::TabletAckWhenDead [GOOD] >> MediatorTimeCast::GranularTimecast [GOOD] >> MediatorTest::PlanStepAckToReconnectedMediator >> SequenceShardTests::Basics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:08:26.261765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:08:26.431333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:08:26.477552Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:08:26.478617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:08:26.478680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:08:27.074837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:27.074964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:27.184227Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809302827176 != 1767809302827180 2026-01-07T18:08:27.221366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:27.282218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:27.427401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:08:27.810664Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:922: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2026-01-07T18:08:27.811341Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [1:26:2073] HANDLE EvClientConnected 2026-01-07T18:08:27.815620Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:999: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 1000 2026-01-07T18:08:27.826773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:08:27.948521Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1500 2026-01-07T18:08:28.039417Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2026-01-07T18:08:28.178451Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2026-01-07T18:08:28.315267Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2026-01-07T18:08:28.486820Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2026-01-07T18:08:28.664095Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2026-01-07T18:08:28.674940Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:970: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 8000 } 2026-01-07T18:08:28.817228Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2026-01-07T18:08:28.962666Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 8000 2026-01-07T18:08:28.977115Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:593: Actor# [1:26:2073] HANDLE EvClientDestroyed 2026-01-07T18:08:29.025295Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [1:26:2073] HANDLE EvClientConnected 2026-01-07T18:08:29.026176Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:999: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 8000 2026-01-07T18:08:29.039766Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:970: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 13000 } 2026-01-07T18:08:29.150290Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8500 2026-01-07T18:08:29.258406Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2026-01-07T18:08:29.423449Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2026-01-07T18:08:29.597598Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2026-01-07T18:08:29.761745Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2026-01-07T18:08:29.900909Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 13000 ... waiting for SysViewsRoster update finished 2026-01-07T18:08:34.747445Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:08:34.752794Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:08:34.756401Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:08:34.756719Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:08:34.756889Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:08:35.007553Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:35.008686Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:35.035441Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767809310802846 != 1767809310802850 2026-01-07T18:08:35.039768Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:35.087324Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:35.162126Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:08:35.598740Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-07T18:08:35.599547Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2026-01-07T18:08:35.599617Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:378: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-07T18:08:35.599669Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:886:2754] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2026-01-07T18:08:35.600041Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected 2026-01-07T18:08:35.600214Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 1000 2026-01-07T18:08:35.600348Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2026-01-07T18:08:35.600577Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-07T18:08:35.600658Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:304: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2026-01-07T18:08:35.600711Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:889:2756] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 1000} 2026-01-07T18:08:35.607444Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 1000 2026-01-07T18:08:35.607730Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-07T18:08:35.607817Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:304: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2026-01-07T18:08:35.607866Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:890:2757] {TEvRegisterTabletResult TabletI ... GranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-07T18:08:36.450256Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 3500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 3499 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-07T18:08:36.463782Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 3500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 3499 FrozenSteps: 3499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-07T18:08:36.475448Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-07T18:08:36.489381Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 3999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-07T18:08:36.511064Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 3999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 4499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-07T18:08:36.536539Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 4499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-07T18:08:36.565345Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2026-01-07T18:08:36.577525Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:593: Actor# [2:26:2073] HANDLE EvClientDestroyed 2026-01-07T18:08:36.577768Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 6 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 4500 2026-01-07T18:08:36.577823Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:361: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-07T18:08:36.578929Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected 2026-01-07T18:08:36.579121Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 7 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 4500 2026-01-07T18:08:36.579175Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:361: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-07T18:08:36.598223Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 2026-01-07T18:08:36.598704Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2026-01-07T18:08:36.621759Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 4500 ... tablet3 at 4500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2026-01-07T18:08:36.632916Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... tablet1 at 4000 ... tablet2 at 4500 ... tablet3 at 4500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 2026-01-07T18:08:36.644114Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4500} ... tablet1 at 4500 ... tablet2 at 4500 ... tablet3 at 4500 |98.8%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/time_cast/ut/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter >> TMLPConsumerTests::ReloadPQTablet >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> DescribeSchemaSecretsService::GetNewValue >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] Test command err: 2026-01-07T18:08:18.244967Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-07T18:08:18.245064Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-07T18:08:18.245160Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T18:08:18.245195Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-07T18:08:18.245273Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-07T18:08:18.245368Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-07T18:08:18.249509Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-07T18:08:18.254863Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... kId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-07T18:08:38.375912Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-07T18:08:38.375978Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-07T18:08:38.376394Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 6:27, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T18:08:38.376451Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 2:8, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T18:08:38.376480Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 4:18, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-07T18:08:38.376523Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-07T18:08:38.376773Z node 1 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 132 2026-01-07T18:08:38.376813Z node 1 :CMS ERROR: sentinel.cpp:1385: [Sentinel] [Main] Unsuccesful response from BSC: error# 2026-01-07T18:08:38.389270Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 1 2026-01-07T18:08:38.389353Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-07T18:08:38.389573Z node 1 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 133 2026-01-07T18:08:38.389640Z node 1 :CMS ERROR: sentinel.cpp:1385: [Sentinel] [Main] Unsuccesful response from BSC: error# 2026-01-07T18:08:38.401330Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-07T18:08:38.401403Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-07T18:08:38.401498Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 2 2026-01-07T18:08:38.401528Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-07T18:08:38.401731Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2026-01-07T18:08:38.401780Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-07T18:08:38.401817Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-07T18:08:38.401857Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-07T18:08:38.401888Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-07T18:08:38.401932Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-07T18:08:38.401962Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-07T18:08:38.401990Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-07T18:08:38.402434Z node 1 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 134 2026-01-07T18:08:38.402475Z node 1 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:8 2026-01-07T18:08:38.402517Z node 1 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 4:18 2026-01-07T18:08:38.402548Z node 1 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 6:27 2026-01-07T18:08:38.402855Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T18:08:38.403219Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T18:08:38.403492Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T18:08:38.403667Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T18:08:38.403814Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T18:08:38.403971Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T18:08:38.404101Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T18:08:38.404240Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-07T18:08:38.404304Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |98.8%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel_unstable/unittest >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> SequenceShardTests::NegativeIncrement >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> TFetchRequestTests::CDC [GOOD] >> TFetchRequestTests::SmallBytesRead |98.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |98.8%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |98.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> MediatorTest::WatcherReconnect >> SequenceShardTests::NegativeIncrement [GOOD] >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2026-01-07T18:08:40.429107Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-07T18:08:40.429314Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-07T18:08:40.450350Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-07T18:08:40.454654Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-07T18:08:40.454724Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-07T18:08:40.473182Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2026-01-07T18:08:40.475136Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2026-01-07T18:08:40.504443Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-07T18:08:40.504826Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2026-01-07T18:08:40.504872Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:33: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:40.504958Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-07T18:08:40.505345Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2026-01-07T18:08:40.507771Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2026-01-07T18:08:40.529153Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-07T18:08:40.529559Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-07T18:08:40.529685Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2026-01-07T18:08:40.547854Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.548153Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2026-01-07T18:08:40.548252Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2026-01-07T18:08:40.568579Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.569000Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-07T18:08:40.569092Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2026-01-07T18:08:40.583679Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.584088Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2026-01-07T18:08:40.584178Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2026-01-07T18:08:40.599768Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.600206Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2026-01-07T18:08:40.600267Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2026-01-07T18:08:40.600337Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.600564Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2026-01-07T18:08:40.600659Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2026-01-07T18:08:40.613789Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.614253Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2026-01-07T18:08:40.614336Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:40.614408Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.614656Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:40.614741Z node 1 :SEQUENCESHARD NOTICE: tx_drop_sequence.cpp:43: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:40.626874Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2026-01-07T18:08:40.627221Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:40.627293Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:33: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:40.627365Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2026-01-07T18:08:40.646290Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-07T18:08:40.646389Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-07T18:08:40.646839Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-07T18:08:40.647511Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-07T18:08:40.647700Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-07T18:08:40.655060Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-07T18:08:40.655133Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:40.655185Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.655415Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-07T18:08:40.655495Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2026-01-07T18:08:40.672536Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.673189Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2026-01-07T18:08:40.673305Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2026-01-07T18:08:40.687639Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-07T18:08:40.688082Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-07T18:08:40.688202Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2026-01-07T18:08:40.702460Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:40.703028Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2026-01-07T18:08:40.703122Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2026-01-07T18:08:40.728168Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-07T18:08:40.728668Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-07T18:08:40.728762Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2026-01-07T18:08:40.740253Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard ... Id: 43] Cache# 0 2026-01-07T18:08:42.124807Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2026-01-07T18:08:42.139791Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:42.140235Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2026-01-07T18:08:42.140289Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:66: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-07T18:08:42.140359Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2026-01-07T18:08:42.140586Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-07T18:08:42.140669Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-07T18:08:42.158376Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-07T18:08:42.158726Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-07T18:08:42.158829Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-07T18:08:42.182131Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-07T18:08:42.182440Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-07T18:08:42.182516Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-07T18:08:42.199661Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-07T18:08:42.200024Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-07T18:08:42.200074Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:54: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2026-01-07T18:08:42.200136Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:42.200390Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-07T18:08:42.200474Z node 3 :SEQUENCESHARD NOTICE: tx_freeze_sequence.cpp:68: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-07T18:08:42.213885Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2026-01-07T18:08:42.214387Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2026-01-07T18:08:42.214538Z node 3 :SEQUENCESHARD NOTICE: tx_restore_sequence.cpp:98: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2026-01-07T18:08:42.227449Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2026-01-07T18:08:42.227825Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2026-01-07T18:08:42.227913Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2026-01-07T18:08:42.240960Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-07T18:08:42.241393Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-07T18:08:42.241462Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:48: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2026-01-07T18:08:42.241525Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2026-01-07T18:08:42.241795Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-07T18:08:42.241875Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2026-01-07T18:08:42.265492Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:43.031604Z node 4 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-07T18:08:43.031713Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-07T18:08:43.047204Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-07T18:08:43.053854Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-07T18:08:43.053927Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-07T18:08:43.055838Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2026-01-07T18:08:43.055951Z node 4 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2026-01-07T18:08:43.088742Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-07T18:08:43.089065Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-07T18:08:43.089167Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2026-01-07T18:08:43.102717Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:43.103041Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-07T18:08:43.103139Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2026-01-07T18:08:43.118796Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:43.119126Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2026-01-07T18:08:43.119222Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2026-01-07T18:08:43.134703Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:43.135028Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2026-01-07T18:08:43.135077Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:43.135139Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:43.135429Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2026-01-07T18:08:43.135519Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-07T18:08:43.150717Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-07T18:08:43.151082Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-07T18:08:43.151179Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2026-01-07T18:08:43.166065Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-07T18:08:43.166432Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-07T18:08:43.166534Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2026-01-07T18:08:43.178850Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete |98.8%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceshard/ut/unittest >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> test_commit.py::TestCommit::test_commit |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/http_api/py3test |98.9%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> MediatorTest::WatcherReconnect [GOOD] >> test_commit.py::TestCommit::test_commit [GOOD] >> test_timeout.py::TestTimeout::test_timeout >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> MediatorTest::MultipleSteps >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> DataShardReplication::SimpleApplyChanges >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> DescribeSchemaSecretsService::GetNewValue [GOOD] >> DescribeSchemaSecretsService::GetUpdatedValue >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> test_canonical_requests.py::Test::test_pdisk_set_status_inactive [GOOD] >> MediatorTest::MultipleSteps [GOOD] >> MediatorTest::WatchesBeforeFirstStep >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows >> TSequence::CreateTableWithDefaultFromSequence >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> ExportS3BufferTest::MinBufferSize [GOOD] >> ExportS3BufferTest::MinBufferSizeWithCompression [GOOD] >> ExportS3BufferTest::MinBufferSizeWithCompressionAndEncryption [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] |98.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_export/unittest >> ExportS3BufferTest::MinBufferSizeWithCompressionAndEncryption [GOOD] |98.9%| [TS] {RESULT} ydb/core/tx/datashard/ut_export/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_export/unittest >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 |98.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_timeout.py::TestTimeout::test_timeout [GOOD] |98.9%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test |98.9%| [TS] {BAZEL_UPLOAD} ydb/tests/tools/pq_read/test/py3test >> DataShardReplication::SimpleApplyChanges [GOOD] >> DataShardReplication::SplitMergeChanges >> TTxDataShardTestInit::TestGetShardStateAfterInitialization ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2026-01-07T18:07:15.298145Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.012370s 2026-01-07T18:07:15.357279Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.005643s 2026-01-07T18:07:15.373563Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592682850201797517:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:07:15.377701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:07:15.509680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:07:15.846848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:07:16.155845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:07:16.155971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:07:16.161288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:07:16.258079Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:07:16.275662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:07:16.390457Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:07:17.042264Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:07:17.042338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:07:17.042344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:07:17.042422Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:07:18.210980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:07:18.473476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-07T18:07:21.939719Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7592682877531365482:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:07:21.939787Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:07:22.045375Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011706s 2026-01-07T18:07:22.049291Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:07:22.317510Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:07:22.347548Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:07:22.409952Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:07:22.410033Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:07:22.460316Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:07:22.677804Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:07:22.677852Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:07:22.677860Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:07:22.677946Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:07:22.843694Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:07:22.923014Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:07:22.961655Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:07:23.014086Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-07T18:07:28.177277Z node 7 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.008123s 2026-01-07T18:07:28.299114Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7592682906942684256:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:07:28.305889Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:07:28.369309Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.015500s 2026-01-07T18:07:28.450271Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:07:28.701259Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:07:28.765075Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:07:28.788840Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:07:28.797378Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:07:28.806905Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:07:28.925142Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.014502s 2026-01-07T18:07:28.996182Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:07:28.996204Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:07:28.996211Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:07:28.996287Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:07:29.133791Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:07:29.297576Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:07:29.384616Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:07:29.434118Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-07T18:07:34.115098Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7592682933040821425:2221];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:07:34.115275Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:07:34.174036Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:07:34.350426Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:07:34.371182Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:07:34.371268Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:07:34.392332Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:07:34.417029Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:07:34.613336Z node ... tions 2026-01-07T18:08:21.342662Z node 29 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007427s 2026-01-07T18:08:21.492630Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:08:21.492660Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:08:21.492674Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:08:21.492788Z node 28 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:08:22.058320Z node 28 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:08:22.060958Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:22.243652Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-07T18:08:29.933124Z node 32 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.006369s 2026-01-07T18:08:30.072537Z node 31 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7592683172140955582:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:30.072617Z node 31 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:08:30.199584Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:30.485592Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:30.674417Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:30.674540Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:30.682628Z node 31 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:30.725972Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:30.813836Z node 33 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.015613s 2026-01-07T18:08:30.933760Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:08:30.933790Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:08:30.933804Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:08:30.933920Z node 31 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:08:30.985301Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:08:31.139444Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:08:31.455304Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:31.572155Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-07T18:08:40.701122Z node 34 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7592683216178178081:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:40.701268Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:08:40.802631Z node 34 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005245s 2026-01-07T18:08:40.853149Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:40.854853Z node 36 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639240 Duration# 0.008502s 2026-01-07T18:08:41.052352Z node 34 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:41.073119Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:41.078810Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:41.078918Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:41.088146Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:41.205615Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:08:41.205646Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:08:41.205661Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:08:41.212404Z node 34 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:08:41.212448Z node 35 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.013955s 2026-01-07T18:08:41.554559Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:41.668641Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:08:41.713641Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-07T18:08:41.761289Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:08:48.871112Z node 37 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7592683252724572407:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:48.871191Z node 37 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:08:48.986902Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:49.174682Z node 37 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:49.196217Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:49.207141Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:49.207250Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:49.222998Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:49.301812Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:08:49.301832Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:08:49.301841Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:08:49.301917Z node 37 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:08:49.450106Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:49.533221Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-07T18:08:49.547413Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:08:49.969202Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |98.9%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/services/rate_limiter/ut/unittest >> MediatorTest::WatchesBeforeFirstStep [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota >> MediatorTest::RebootTargetTablets >> TFetchRequestTests::SmallBytesRead [GOOD] >> TFetchRequestTests::EmptyTopic >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> TxKeys::ComparePointKeys >> TopicSessionTests::TwoSessionsWithOffsets >> DescribeSchemaSecretsService::GetUpdatedValue [GOOD] >> DescribeSchemaSecretsService::GetUnexistingValue >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> test_canonical_requests.py::Test::test_vdisk_ready_stable_period |98.9%| [TM] {RESULT} ydb/tests/functional/wardens/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/wardens/py3test >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> TDqPqRdReadActorTests::TestReadFromTopic2 >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration >> MediatorTest::RebootTargetTablets [GOOD] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> TDqPqRdReadActorTests::SessionError |98.9%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> TDqPqRdReadActorTests::SessionError [GOOD] >> MediatorTest::ResendSubset >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> test_example.py::TestExample::test_example >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> TMLPConsumerTests::ReloadPQTablet [GOOD] >> TMLPConsumerTests::AlterConsumer >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> TxKeys::ComparePointAndRangeWithInf [GOOD] >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> TopicSessionTests::BadDataSessionError >> DescribeSchemaSecretsService::GetUnexistingValue [GOOD] >> DescribeSchemaSecretsService::GetDroppedValue >> TDqPqRdReadActorTests::Backpressure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2026-01-07T18:09:03.451089Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvBoot 2026-01-07T18:09:03.514645Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvRestored 2026-01-07T18:09:03.522816Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:128:2152] 2026-01-07T18:09:03.524495Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:03.567655Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:111:2141], Recipient [1:128:2152]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T18:09:03.785389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T18:09:03.785461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:03.814622Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:03.815012Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:03.824351Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T18:09:03.824476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T18:09:03.824555Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T18:09:03.826804Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:03.827301Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:03.827421Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:195:2152] in generation 2 2026-01-07T18:09:04.054241Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:04.102328Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T18:09:04.106368Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:04.106654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-07T18:09:04.106745Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T18:09:04.106788Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T18:09:04.106866Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T18:09:04.107299Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:128:2152], Recipient [1:128:2152]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:04.107373Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:04.109788Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T18:09:04.109932Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T18:09:04.110095Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:04.110175Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:04.110227Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-07T18:09:04.110316Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T18:09:04.110430Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:04.110575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T18:09:04.110649Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T18:09:04.110705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T18:09:04.110743Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T18:09:04.110811Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T18:09:04.131989Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:128:2152]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-07T18:09:04.132097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T18:09:04.132246Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-07T18:09:04.132562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-07T18:09:04.132674Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-07T18:09:04.136728Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-07T18:09:04.136868Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-07T18:09:04.136915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-07T18:09:04.137001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-07T18:09:04.137064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T18:09:04.137669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-07T18:09:04.137727Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-07T18:09:04.137771Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-07T18:09:04.137810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-07T18:09:04.137888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-07T18:09:04.137922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-07T18:09:04.137960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-07T18:09:04.137990Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-07T18:09:04.138021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-07T18:09:04.158051Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-07T18:09:04.158190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-07T18:09:04.158274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-07T18:09:04.158365Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-07T18:09:04.165923Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-07T18:09:04.188379Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:04.188481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:04.188534Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-07T18:09:04.188821Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:128:2152]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-07T18:09:04.188869Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-07T18:09:04.189138Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-07T18:09:04.189192Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-07T18:09:04.189232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-07T18:09:04.189270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-07T18:09:04.193248Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-07T18:09:04.193355Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T18:09:04.193697Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:128:2152], Recipient [1:128:2152]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:04.193740Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:04.193804Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T18:09:04.193854Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-07T18:09:04.193897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-07T18:09:04.193957Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-07T18:09:04.194003Z node 1 :TX_DATASHARD TRACE: dat ... ode 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-07T18:09:09.744073Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [5:26:2073], Recipient [5:138:2158]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-07T18:09:09.744121Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-07T18:09:09.744163Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-07T18:09:09.744206Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T18:09:09.749369Z node 5 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2026-01-07T18:09:09.749457Z node 5 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2026-01-07T18:09:09.749572Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T18:09:09.750216Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T18:09:09.750268Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2026-01-07T18:09:09.750310Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T18:09:09.750359Z node 5 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 9437184 2026-01-07T18:09:09.750390Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2026-01-07T18:09:09.750454Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-07T18:09:09.750508Z node 5 :TX_DATASHARD INFO: datashard.cpp:1599: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2026-01-07T18:09:09.750598Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T18:09:09.751455Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 138 RawX2: 21474838638 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2026-01-07T18:09:09.751578Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [5:231:2227], Recipient [5:138:2158]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:233:2228] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T18:09:09.751621Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-07T18:09:09.751713Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [5:126:2150], Recipient [5:138:2158]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-07T18:09:09.751742Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-07T18:09:09.751783Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-07T18:09:09.751850Z node 5 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-07T18:09:09.752730Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 65543, Sender [5:104:2137], Recipient [5:138:2158]: NActors::TEvents::TEvPoison 2026-01-07T18:09:09.753302Z node 5 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 9437184 2026-01-07T18:09:09.753448Z node 5 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 9437184 2026-01-07T18:09:09.766467Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [5:236:2229], Recipient [5:238:2230]: NKikimr::TEvTablet::TEvBoot 2026-01-07T18:09:09.771640Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [5:236:2229], Recipient [5:238:2230]: NKikimr::TEvTablet::TEvRestored 2026-01-07T18:09:09.771780Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828684, Sender [5:236:2229], Recipient [5:238:2230]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T18:09:09.780430Z node 5 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:238:2230] 2026-01-07T18:09:09.780712Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:09.784983Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:704: TxInitSchema.Execute Persist Sys_SubDomainInfo 2026-01-07T18:09:09.823885Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:09.824047Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:09.826078Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T18:09:09.826164Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T18:09:09.826223Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T18:09:09.826643Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:09.826847Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:09.826913Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [5:281:2230] in generation 3 2026-01-07T18:09:09.847155Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:09.847266Z node 5 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 9437184 2026-01-07T18:09:09.847346Z node 5 :TX_DATASHARD INFO: datashard.cpp:1599: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2026-01-07T18:09:09.847450Z node 5 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2026-01-07T18:09:09.847676Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [5:286:2269] 2026-01-07T18:09:09.847715Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T18:09:09.847759Z node 5 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 9437184 2026-01-07T18:09:09.847792Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T18:09:09.847919Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-07T18:09:09.848032Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-07T18:09:09.848371Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [5:238:2230], Recipient [5:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:09.848436Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:09.848603Z node 5 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T18:09:09.848687Z node 5 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T18:09:09.848854Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [5:26:2073], Recipient [5:238:2230]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-07T18:09:09.848894Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-07T18:09:09.848969Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-07T18:09:09.849018Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T18:09:09.849103Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [5:26:2073], Recipient [5:238:2230]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2026-01-07T18:09:09.849129Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-07T18:09:09.849165Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2026-01-07T18:09:09.849286Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 238 RawX2: 21474838710 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2026-01-07T18:09:09.849335Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T18:09:09.849371Z node 5 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:09.849406Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T18:09:09.849437Z node 5 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T18:09:09.849469Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T18:09:09.849496Z node 5 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T18:09:09.849551Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T18:09:09.849628Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [5:284:2267], Recipient [5:238:2230]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:288:2271] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T18:09:09.849663Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-07T18:09:09.849743Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [5:126:2150], Recipient [5:238:2230]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-07T18:09:09.849770Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-07T18:09:09.849804Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-07T18:09:09.849854Z node 5 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-07T18:09:09.862663Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [5:284:2267], Recipient [5:238:2230]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:284:2267] ServerId: [5:288:2271] } 2026-01-07T18:09:09.862741Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_keys/unittest |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest >> MediatorTest::ResendSubset [GOOD] >> MediatorTest::ResendNotSubset >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2026-01-07T18:09:00.372688Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:113:2143]: NKikimr::TEvTablet::TEvBoot 2026-01-07T18:09:00.440847Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:113:2143]: NKikimr::TEvTablet::TEvRestored 2026-01-07T18:09:00.449701Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:113:2143] 2026-01-07T18:09:00.452203Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:00.481004Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:113:2143]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T18:09:00.544862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:00.545350Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:00.554032Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-07T18:09:00.554138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-07T18:09:00.554230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-07T18:09:00.561508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:00.561794Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:00.561873Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:136:2143] in generation 2 2026-01-07T18:09:00.593819Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:00.691602Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-07T18:09:00.693445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:00.693650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:141:2163] 2026-01-07T18:09:00.693695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-07T18:09:00.693767Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-07T18:09:00.693805Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-07T18:09:00.694012Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:113:2143], Recipient [1:113:2143]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:00.698218Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:00.703472Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-07T18:09:00.703603Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-07T18:09:00.703663Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-07T18:09:00.703715Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:00.703758Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-07T18:09:00.703807Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-07T18:09:00.703841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-07T18:09:00.703885Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-07T18:09:00.703930Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-07T18:09:00.709325Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [1:104:2137], Recipient [1:113:2143]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 104 RawX2: 4294969433 } 2026-01-07T18:09:00.709414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState ... waiting for SysViewsRoster update finished 2026-01-07T18:09:05.271248Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:05.429656Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:05.441023Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:05.441999Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:05.442220Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:06.110265Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:06.110391Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:06.228759Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767809341281181 != 1767809341281185 2026-01-07T18:09:06.252680Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:06.317359Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:06.434561Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:06.808057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:06.837549Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:06.986252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:07.050987Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-07T18:09:07.051290Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:07.119411Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:07.119571Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:07.122398Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T18:09:07.122504Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T18:09:07.122586Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T18:09:07.123052Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:07.123219Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:07.123345Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-07T18:09:07.137650Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:07.137787Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T18:09:07.137972Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:07.138085Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-07T18:09:07.138135Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:09:07.138170Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T18:09:07.138226Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:07.138763Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T18:09:07.138879Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T18:09:07.139390Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:09:07.139442Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:07.139486Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T18:09:07.139561Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:07.139642Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-07T18:09:07.140206Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T18:09:07.140472Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T18:09:07.140590Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T18:09:07.145602Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T18:09: ... planned 0 immediate 0 planned 1 2026-01-07T18:09:12.825917Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-07T18:09:12.826195Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-07T18:09:12.826337Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T18:09:12.827753Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:09:12.827829Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T18:09:12.828288Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T18:09:12.828711Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:12.830968Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-07T18:09:12.831019Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:12.832110Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-07T18:09:12.832178Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:12.834114Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:12.834175Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:09:12.834239Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T18:09:12.834311Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T18:09:12.834361Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T18:09:12.834475Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:12.835682Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T18:09:12.838690Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T18:09:12.838909Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T18:09:12.838972Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T18:09:12.858356Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-07T18:09:12.860030Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-07T18:09:12.911053Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:956:2812] 2026-01-07T18:09:12.911357Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:12.920089Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:12.920746Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:12.927461Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T18:09:12.927602Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T18:09:12.927670Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T18:09:12.928964Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:12.930137Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:12.930239Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [3:971:2812] in generation 2 2026-01-07T18:09:12.957961Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:12.958120Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037888 2026-01-07T18:09:12.958233Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-07T18:09:12.958357Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T18:09:12.958474Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4210: Resolve path at 72075186224037888: reason# empty path 2026-01-07T18:09:12.958599Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [3:975:2822] 2026-01-07T18:09:12.958643Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:09:12.958695Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T18:09:12.958733Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:12.959065Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-07T18:09:12.959363Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-07T18:09:12.960484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 956 RawX2: 12884904700 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-07T18:09:12.960589Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T18:09:12.960696Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T18:09:12.962684Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 2000 2026-01-07T18:09:12.962752Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:12.963101Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T18:09:12.963271Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:09:12.963323Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:12.963371Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T18:09:12.963422Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:12.963708Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-07T18:09:13.072103Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4271: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 2000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-07T18:09:13.072512Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-07T18:09:13.072632Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-07T18:09:13.072902Z node 3 :TX_DATASHARD DEBUG: datashard__store_table_path.cpp:20: TTxStoreTablePath::Execute at 72075186224037888 2026-01-07T18:09:13.082731Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:979:2826], serverId# [3:981:2827], sessionId# [0:0:0] 2026-01-07T18:09:13.105709Z node 3 :TX_DATASHARD DEBUG: datashard__store_table_path.cpp:39: TTxStoreTablePath::Complete at 72075186224037888 |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_init/unittest >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 >> TTxDataShardBuildFulltextDictScan::BadRequest >> test_example.py::TestExample::test_example [GOOD] >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] >> TFetchRequestTests::EmptyTopic [GOOD] >> TFetchRequestTests::BadTopicName >> TDataShardRSTest::TestCleanupInRS+UseSink >> MediatorTest::ResendNotSubset [GOOD] >> TopicSessionTests::BadDataSessionError [GOOD] >> MediatorTest::OneCoordinatorResendTxNotLost >> QuoterWithKesusTest::PrefetchCoefficient [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause >> TopicSessionTests::WrongFieldType >> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> DescribeSchemaSecretsService::GetDroppedValue [GOOD] >> DescribeSchemaSecretsService::GetInParallel >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> TMLPConsumerTests::AlterConsumer [GOOD] >> TMLPConsumerTests::RecreateConsumer >> StatisticsScan::RunScanOnShard >> DataShardStats::OneChannelStatsCorrect >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink >> KqpTpch::Query22 [GOOD] >> TTxDataShardBuildFulltextDictScan::BadRequest [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst-SkipLast >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |98.9%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/kqprun/tests/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:08:15.667679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:08:15.971930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:08:16.017708Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:08:16.018464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:08:16.018544Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:08:16.962751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:16.962876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:17.092662Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809291965903 != 1767809291965907 2026-01-07T18:08:17.154922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:17.233246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:17.378815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:08:17.857470Z node 1 :TX_MEDIATOR INFO: mediator__schema.cpp:23: tablet# 72057594047365120 TTxSchema Complete 2026-01-07T18:08:17.858312Z node 1 :TX_MEDIATOR INFO: mediator__init.cpp:88: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2026-01-07T18:08:17.859081Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:853:2738] connected 2026-01-07T18:08:17.859235Z node 1 :TX_MEDIATOR NOTICE: mediator_impl.cpp:133: tablet# 72057594047365120 actor# [1:836:2728] HANDLE TEvMediatorConfiguration Version# 1 2026-01-07T18:08:17.859756Z node 1 :TX_MEDIATOR DEBUG: mediator__configure.cpp:77: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2026-01-07T18:08:17.859920Z node 1 :TX_MEDIATOR INFO: mediator__init.cpp:64: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2026-01-07T18:08:17.860533Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:859:2743] connected 2026-01-07T18:08:17.860624Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-07T18:08:17.860671Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:175: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:857:2742] bucket# 0 ... waiting for watcher to connect (done) 2026-01-07T18:08:17.860915Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-07T18:08:17.861490Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:159: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvWatch 2026-01-07T18:08:17.861537Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:164: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:856:2741] bucket.ActiveActor 2026-01-07T18:08:17.861630Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:380: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:857:2742]} 2026-01-07T18:08:17.861736Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:391: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2026-01-07T18:08:17.872617Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:863:2747] connected 2026-01-07T18:08:17.872779Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2026-01-07T18:08:17.872847Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:861:2745] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 951 Coordinator# 72057594046316545 2026-01-07T18:08:17.873259Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2026-01-07T18:08:17.873317Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:856:2741] bucket.ActiveActor step# 1000 2026-01-07T18:08:17.873412Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2026-01-07T18:08:17.873623Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:171: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2026-01-07T18:08:17.895891Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2026-01-07T18:08:17.895959Z node 1 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2026-01-07T18:08:17.896053Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594047365120 SEND EvCommitStep to# [1:855:2740] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2026-01-07T18:08:17.896216Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2026-01-07T18:08:17.896263Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2026-01-07T18:08:17.896311Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND Ev to# [1:856:2741] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2026-01-07T18:08:17.896385Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:856:2741] bucket.ActiveActor step# 1010 2026-01-07T18:08:17.896479Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}} marker# M4 2026-01-07T18:08:17.896701Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2026-01-07T18:08:17.899077Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:885:2759] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T18:08:17.899167Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-07T18:08:17.899220Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 ... waiting for blocked plan step (done) ... waiting for no pending commands 2026-01-07T18:08:17.899782Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-07T18:08:17.899847Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:189: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:857:2742] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2026-01-07T18:08:17.900179Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:342: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2026-01-07T18:08:17.900239Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:415: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:861:2745] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2026-01-07T18:08:17.900368Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:171: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) ... waiting for SysViewsRoster update finished 2026-01-07T18:08:21.979199Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:08:21.987898Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:08:21.992361Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:08:21.992732Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:08:21.992901Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:08:22.351778Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:22.351897Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:22.392599Z node 2 :CONFIGS_DISPATCHER ERROR: configs_disp ... EDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:901:2767] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 950 Coordinator# 72057594046316546 2026-01-07T18:09:24.847430Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2026-01-07T18:09:24.847487Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2026-01-07T18:09:24.847566Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2026-01-07T18:09:24.847594Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2026-01-07T18:09:24.847727Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594047365120 SEND EvCommitStep to# [12:855:2740] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2026-01-07T18:09:24.847866Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [12:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2026-01-07T18:09:24.847916Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2026-01-07T18:09:24.847976Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2026-01-07T18:09:24.848029Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2026-01-07T18:09:24.848063Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2026-01-07T18:09:24.848104Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:856:2741] bucket.ActiveActor step# 1010 2026-01-07T18:09:24.848228Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}} marker# M4 2026-01-07T18:09:24.848477Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}} marker# M4 2026-01-07T18:09:24.848603Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2026-01-07T18:09:24.849503Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:908:2773] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T18:09:24.849578Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-07T18:09:24.849619Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2026-01-07T18:09:24.849666Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-07T18:09:24.850111Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:910:2774] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-07T18:09:24.850154Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2026-01-07T18:09:24.850183Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2026-01-07T18:09:24.850217Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-07T18:09:24.861911Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [12:913:2777] connected 2026-01-07T18:09:24.862132Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2026-01-07T18:09:24.862193Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:911:2775] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 951 Coordinator# 72057594046316546 2026-01-07T18:09:24.862482Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2026-01-07T18:09:24.862544Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2026-01-07T18:09:24.862637Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:223: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:855:2740] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2026-01-07T18:09:24.862762Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:130: Actor# [12:855:2740] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:911:2775] 2026-01-07T18:09:24.862809Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2026-01-07T18:09:24.862872Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2026-01-07T18:09:24.862928Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2026-01-07T18:09:24.862984Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2026-01-07T18:09:24.863084Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:222: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:911:2775]}}} 2026-01-07T18:09:24.863164Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:222: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:911:2775]}}} 2026-01-07T18:09:24.878957Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:294: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:905:2771] ServerId: [12:908:2773] } 2026-01-07T18:09:24.896646Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:934:2787] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-07T18:09:24.896758Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-07T18:09:24.896805Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2026-01-07T18:09:24.896856Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-07T18:09:24.909720Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:24.948875Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:294: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:907:2772] ServerId: [12:910:2774] } 2026-01-07T18:09:24.968132Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:973:2803] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-07T18:09:24.968272Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2026-01-07T18:09:24.968321Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2026-01-07T18:09:24.968373Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/mediator/ut/unittest |98.9%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo >> TopicSessionTests::WrongFieldType [GOOD] >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> Graph::CreateGraphShard >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> test_example.py::TestExample::test_example2 >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst-SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst-SkipLast ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/tests/kikimr_tpch/unittest |98.9%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas >> TabletService_ChangeSchema::Basics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:09:00.700017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:00.994231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:01.083655Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:01.084284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:01.084336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:01.911166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:01.911296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:02.073172Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809335967177 != 1767809335967181 2026-01-07T18:09:02.122837Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:02.188920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:02.387390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:02.829596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:02.845547Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:03.035878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:03.659409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:03.659521Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:03.659617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:03.667219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:03.667479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:03.673899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:09:03.825956Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:997:2840], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:09:03.928508Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1055:2879] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } ... waiting for SysViewsRoster update finished 2026-01-07T18:09:10.993307Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:10.999372Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:11.007185Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:11.007441Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:11.007617Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:11.322223Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:11.322330Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:11.348509Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767809347667263 != 1767809347667267 2026-01-07T18:09:11.352088Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:11.397063Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:11.483367Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:11.818455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:11.834976Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:11.951043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:12.301973Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1031:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:12.302084Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1041:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:12.302420Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:12.303178Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1047:2879], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:12.303370Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:12.316433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:09:12.509979Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1045:2877], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:09:12.548638Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1104:2917] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } item ... ervice.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:20.309973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:20.326448Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:20.435519Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:20.736661Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:20.736768Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:994:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:20.749720Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:20.750686Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:20.751059Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:20.763320Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:09:20.932389Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:997:2840], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:09:20.976274Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1055:2879] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } ... waiting for SysViewsRoster update finished 2026-01-07T18:09:26.742807Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:26.748747Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:26.752866Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:26.753313Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:26.753371Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:27.009656Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:27.009794Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:27.033231Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1767809363266348 != 1767809363266352 2026-01-07T18:09:27.037665Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:27.086544Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:27.160717Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:27.531102Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:27.544669Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:27.659319Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:27.950265Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:27.950441Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:994:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:27.950539Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:27.951739Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:27.952119Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:27.964562Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:09:28.106591Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:997:2840], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:09:28.145272Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1055:2879] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:09:28.325773Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:1065:2888], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2026-01-07T18:09:28.328822Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=ZTFmOGQwZTMtZGQwOTY0ZGYtYWNhMjRmMWMtMmFhYjJlZjM=, ActorId: [4:980:2829], ActorState: ExecuteState, LegacyTraceId: 01kectcfwc7zvr3q0m48149a2g, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } } tx_id# trace_id# 2026-01-07T18:09:28.375745Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:1087:2904], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2026-01-07T18:09:28.378699Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=YzA2ZTcxZWMtMmY2OGU0MTAtNDQ5YzRiYjAtNDYzMTYyYTg=, ActorId: [4:1079:2896], ActorState: ExecuteState, LegacyTraceId: 01kectcg8d8a1871gha09kr3sa, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } } tx_id# trace_id# |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_sequence/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] Test command err: 2026-01-07T18:07:11.644038Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-07T18:07:11.644194Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-07T18:07:11.644261Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-07T18:07:11.644651Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-07T18:07:11.644703Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-07T18:07:11.644759Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-07T18:07:11.645972Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-07T18:07:11.666499Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-07T18:07:11.666588Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-07T18:07:11.666628Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-07T18:07:11.678001Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-07T18:07:11.678102Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-07T18:07:11.679109Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-07T18:07:11.679382Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:11.679434Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-07T18:07:11.679481Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:11.679517Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2026-01-07T18:07:11.679590Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:11.679631Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2026-01-07T18:07:11.679663Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-07T18:07:11.679787Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-07T18:07:11.679827Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-07T18:07:11.680027Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2026-01-07T18:07:11.680072Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2026-01-07T18:07:11.680138Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2026-01-07T18:07:11.680186Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2026-01-07T18:07:11.680230Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-07T18:07:11.680314Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2026-01-07T18:07:11.680351Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:1] Checkpoint completed 2026-01-07T18:07:11.680405Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:649: [my-graph-id.42] Got TEvRunGraph 2026-01-07T18:07:11.785765Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-07T18:07:11.785908Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-07T18:07:11.785959Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-07T18:07:11.786267Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-07T18:07:11.786305Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-07T18:07:11.786338Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-07T18:07:11.786408Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-07T18:07:11.786572Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-07T18:07:11.786608Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-07T18:07:11.786637Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-07T18:07:11.786991Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-07T18:07:11.787070Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-07T18:07:11.787135Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-07T18:07:11.787303Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:11.787339Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-07T18:07:11.787378Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:11.787488Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2026-01-07T18:07:11.787531Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:11.787565Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2026-01-07T18:07:11.787598Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-07T18:07:11.787683Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-07T18:07:11.787720Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-07T18:07:11.787858Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2026-01-07T18:07:11.787914Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2026-01-07T18:07:11.787951Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2026-01-07T18:07:11.787984Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2026-01-07T18:07:11.788036Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-07T18:07:11.788096Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2026-01-07T18:07:11.788135Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:1] Checkpoint completed 2026-01-07T18:07:11.788176Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:649: [my-graph-id.42] Got TEvRunGraph 2026-01-07T18:07:11.914570Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-07T18:07:11.914698Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-07T18:07:11.914744Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Wa ... -01-07T18:07:12.022571Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2026-01-07T18:07:12.022612Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:12.022675Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2026-01-07T18:07:12.022748Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-07T18:07:12.022820Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-07T18:07:12.022856Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-07T18:07:12.023017Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2026-01-07T18:07:12.023059Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2026-01-07T18:07:12.023100Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2026-01-07T18:07:12.023157Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2026-01-07T18:07:12.023205Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-07T18:07:12.023299Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2026-01-07T18:07:12.023341Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:3] Checkpoint completed 2026-01-07T18:07:12.023384Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2026-01-07T18:07:12.023447Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-07T18:07:12.023534Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2026-01-07T18:07:12.023569Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-07T18:07:12.023657Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:12.023692Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2026-01-07T18:07:12.023758Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:12.023796Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2026-01-07T18:07:12.023844Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:12.023879Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2026-01-07T18:07:12.023908Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-07T18:07:12.023977Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-07T18:07:12.024049Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-07T18:07:12.024195Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2026-01-07T18:07:12.024234Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2026-01-07T18:07:12.024276Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2026-01-07T18:07:12.024315Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2026-01-07T18:07:12.024345Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-07T18:07:12.024515Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2026-01-07T18:07:12.024550Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:4] Checkpoint completed 2026-01-07T18:07:12.124775Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-07T18:07:12.124953Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-07T18:07:12.125002Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-07T18:07:12.125290Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-07T18:07:12.125327Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-07T18:07:12.125362Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-07T18:07:12.125441Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-07T18:07:12.125625Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-07T18:07:12.125661Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-07T18:07:12.125695Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-07T18:07:12.125857Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-07T18:07:12.125897Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-07T18:07:12.125957Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-07T18:07:12.126137Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-07T18:07:12.126178Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-07T18:07:12.126241Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2026-01-07T18:07:12.126278Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2026-01-07T18:07:12.126330Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2026-01-07T18:07:12.126370Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2026-01-07T18:07:12.126403Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:484: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2026-01-07T18:07:12.126476Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:590: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2026-01-07T18:07:12.126512Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: checkpoint_coordinator.cpp:596: [my-graph-id.42] [42:1] Checkpoint aborted 2026-01-07T18:07:12.126558Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2026-01-07T18:07:12.126595Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-07T18:07:12.126660Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2026-01-07T18:07:12.126694Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-07T18:07:12.225994Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-07T18:07:12.226127Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 0, ActorsToNotify count: 1, ActorsToWaitFor count: 2 2026-01-07T18:07:12.226165Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:107: [my-graph-id.42] No ingress tasks, coordinator was disabled 2026-01-07T18:07:12.226202Z node 6 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess |98.9%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/checkpointing/ut/unittest >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> StatisticsScan::RunScanOnShard [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> DescribeSchemaSecretsService::GetInParallel [GOOD] >> DescribeSchemaSecretsService::GetSameValueMultipleTimes >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:09:26.153542Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:26.338460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:26.384493Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:26.385314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:26.385397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:27.031827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:27.031973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:27.131138Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809362357144 != 1767809362357148 2026-01-07T18:09:27.171840Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:27.226398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:27.425132Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:27.785009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:27.798964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:27.942079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:28.427442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:28.427607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:961:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:28.428120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:28.431368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:28.431731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:28.441230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:09:28.594108Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:09:28.683890Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_column_stats/unittest >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst-SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst+SkipLast >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> test.py::TestViewer::test_whoami_root >> TMLPConsumerTests::RecreateConsumer [GOOD] >> TMLPConsumerTests::ReloadPQTabletAfterAlterConsumer >> TTestYqlToMiniKQLCompile::CheckResolve >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf |98.9%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange >> test_example.py::TestExample::test_example2 [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract >> TTestYqlToMiniKQLCompile::Extract [GOOD] |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed |98.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |98.9%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/client/minikql_compile/ut/unittest >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst+SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst+SkipLast >> test_canonical_requests.py::Test::test_vdisk_ready_stable_period [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> QuoterWithKesusTest::KesusRecreation >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> DescribeSchemaSecretsService::GetSameValueMultipleTimes [GOOD] >> DescribeSchemaSecretsService::FailWithoutGrants >> TDqSolomonWriteActorTest::TestWriteFormat >> TopicSessionTests::ReadNonExistentTopic >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemoryController::ResourceBroker_ConfigCS >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst+SkipLast [GOOD] >> TTxDataShardBuildFulltextIndexScan::BadRequest >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_canonical_requests.py::Test::test_cluster_get_set >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> Vacuum::Vacuum >> TCreateAndDropViewTest::CheckCreatedView >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> TTxDataShardBuildFulltextIndexScan::BadRequest [GOOD] >> TTxDataShardBuildFulltextIndexScan::Build >> TListAllTopicsTests::PlainList >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_example.py::TestExample::test_linked_with_testcase >> TopicSessionTests::SlowSession >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> TFetchRequestTests::CheckAccess [GOOD] >> DescribeSchemaSecretsService::FailWithoutGrants [GOOD] >> DescribeSchemaSecretsService::GroupGrants ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:08:53.349338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:08:53.618533Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.005001s 2026-01-07T18:08:53.634878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:08:53.728399Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:08:53.729213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:08:53.729271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:08:54.653125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:54.653249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:54.811073Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809330109282 != 1767809330109286 2026-01-07T18:08:54.841348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:54.906965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:55.079532Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:08:55.590649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:55.604514Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:08:55.761193Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T18:08:55.761286Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T18:08:55.765284Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T18:08:55.913857Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T18:08:55.913971Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T18:08:55.915760Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T18:08:55.915916Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T18:08:55.916298Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T18:08:55.916445Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T18:08:55.916633Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T18:08:55.916901Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T18:08:55.924268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:55.925611Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T18:08:55.925680Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T18:08:56.012971Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T18:08:56.014068Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T18:08:56.015597Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T18:08:56.015949Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:08:56.102469Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T18:08:56.103199Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:08:56.103359Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:08:56.108858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T18:08:56.109027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T18:08:56.109090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T18:08:56.114457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:08:56.114679Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:08:56.114786Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T18:08:56.129578Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:08:56.230712Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T18:08:56.237296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:08:56.237566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T18:08:56.237636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:08:56.237693Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T18:08:56.237730Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:08:56.237955Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:08:56.238005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:08:56.244326Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T18:08:56.244451Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T18:08:56.244527Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:08:56.244582Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:08:56.244708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T18:08:56.244762Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T18:08:56.244791Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T18:08:56.244824Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T18:08:56.244884Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:08:56.245033Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:08:56.245092Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:08:56.245143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T18:08:56.245579Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T18:08:56.245624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T18:08:56.245719Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T18:08:56.246079Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T18:08:56.246141Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.c ... :49.167225Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-07T18:09:49.167274Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T18:09:49.167300Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T18:09:49.167324Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T18:09:49.167350Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-07T18:09:49.167478Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-07T18:09:49.167808Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T18:09:49.167869Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2026-01-07T18:09:49.167923Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[8:1068:2888], 0} after executionsCount# 1 2026-01-07T18:09:49.167981Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[8:1068:2888], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T18:09:49.168063Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[8:1068:2888], 0} finished in read 2026-01-07T18:09:49.168156Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T18:09:49.168186Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T18:09:49.168221Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T18:09:49.168266Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-07T18:09:49.168315Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-07T18:09:49.168337Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T18:09:49.168388Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-07T18:09:49.168430Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T18:09:49.168547Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T18:09:49.168781Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [8:70:2117], Recipient [8:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_SUBSCRIBED 2026-01-07T18:09:49.174348Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [8:1068:2888], Recipient [8:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T18:09:49.174429Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } 2026-01-07T18:09:49.176591Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [8:1072:2893], Recipient [8:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:49.176657Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:49.176720Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [8:1071:2892], serverId# [8:1072:2893], sessionId# [0:0:0] 2026-01-07T18:09:49.180089Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549570, Sender [8:1070:2891], Recipient [8:886:2765]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2026-01-07T18:09:49.180300Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2026-01-07T18:09:49.180432Z node 8 :TX_DATASHARD TRACE: locks.cpp:194: Lock 281474976715660 marked broken at v{min} 2026-01-07T18:09:49.191570Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 2501 from mediator time cast 2026-01-07T18:09:49.192449Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270977, Sender [8:26:2073], Recipient [8:886:2765]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 2501} 2026-01-07T18:09:49.192502Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2026-01-07T18:09:49.192548Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2026-01-07T18:09:49.192605Z node 8 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:49.435642Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [8:1094:2910], Recipient [8:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-07T18:09:49.435840Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-07T18:09:49.435933Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-07T18:09:49.436035Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T18:09:49.436079Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-07T18:09:49.436122Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T18:09:49.436159Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T18:09:49.436208Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-07T18:09:49.436246Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T18:09:49.436272Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T18:09:49.436296Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T18:09:49.436318Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-07T18:09:49.436439Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-07T18:09:49.436727Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715660, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-07T18:09:49.436787Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2026-01-07T18:09:49.436842Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[8:1094:2910], 0} after executionsCount# 1 2026-01-07T18:09:49.436907Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[8:1094:2910], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T18:09:49.437030Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[8:1094:2910], 0} finished in read 2026-01-07T18:09:49.437113Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T18:09:49.437141Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T18:09:49.437166Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T18:09:49.437193Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-07T18:09:49.437243Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-07T18:09:49.437263Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T18:09:49.437287Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-07T18:09:49.437338Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T18:09:49.437464Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T18:09:49.438516Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [8:1094:2910], Recipient [8:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T18:09:49.438567Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_replication/unittest >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> TMemoryController::ResourceBroker_ConfigCS [GOOD] >> TMemoryController::GroupedMemoryLimiter_ConfigCS >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/fetcher/ut/unittest >> TFetchRequestTests::CheckAccess [GOOD] Test command err: 2026-01-07T18:08:12.265212Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592683096465573621:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:12.265288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:08:12.436171Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:08:13.033386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:13.033476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:13.055305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:13.109586Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:13.135740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:13.258065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:08:13.283958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:08:14.000048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/002606/r3tmp/yandexI8UgXi.tmp 2026-01-07T18:08:14.000079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/002606/r3tmp/yandexI8UgXi.tmp 2026-01-07T18:08:14.003384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/002606/r3tmp/yandexI8UgXi.tmp 2026-01-07T18:08:14.003514Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:08:14.692194Z INFO: TTestServer started on Port 26979 GrpcPort 22656 PQClient connected to localhost:22656 2026-01-07T18:08:15.171989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:15.189606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T18:08:15.201745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T18:08:15.332911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:17.260006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683117940410921:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:17.260036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683117940410934:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:17.260119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:17.260910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683117940410937:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:17.261020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:17.263086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592683096465573621:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:17.263160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:08:17.289224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:08:17.308571Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592683117940410936:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T18:08:17.414760Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683117940411005:2650] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:08:18.360819Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592683117940411013:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T18:08:18.382197Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTAzODU5ZDQtNzk2NWRjMWQtNWRlODIzNGUtM2JkOGZkNGU=, ActorId: [1:7592683117940410919:2331], ActorState: ExecuteState, LegacyTraceId: 01kectaatd0ednkrzcf006kwqb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T18:08:18.395695Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T18:08:18.548331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:18.622878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:18.803110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592683126530345911:2840] === CheckClustersList. Ok 2026-01-07T18:08:26.072937Z :HappyWay INFO: TTopicSdkTestSetup started 2026-01-07T18:08:26.137667Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T18:08:26.208615Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592683156595117192:2941] connected; active server actors: 1 2026-01-07T18:08:26.213081Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T18:08:26.215079Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T18:08:26.218714Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T18:08:26.233004Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T18:08:26.257102Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T18:08:26.258953Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-07T18:08:26.259073Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T18:08:26.259127Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01 ... : [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.593575Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.593587Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.593595Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T18:09:47.598176Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037895][Partition][2][StateIdle] No data for blobs compaction 2026-01-07T18:09:47.601602Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2026-01-07T18:09:47.601642Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.601662Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.601685Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.601704Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2026-01-07T18:09:47.601762Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2026-01-07T18:09:47.601776Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.601786Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.601797Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.601805Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2026-01-07T18:09:47.613996Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][3][StateIdle] No data for blobs compaction 2026-01-07T18:09:47.617691Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037897][Partition][4][StateIdle] No data for blobs compaction 2026-01-07T18:09:47.631682Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037898][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T18:09:47.693842Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2026-01-07T18:09:47.693883Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.693899Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.693919Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.693936Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2026-01-07T18:09:47.693987Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037898][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:09:47.693998Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.694006Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.694018Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.694027Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist 2026-01-07T18:09:47.694054Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:09:47.694062Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.694071Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.694083Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.694089Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T18:09:47.694125Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-07T18:09:47.694134Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.694142Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.694152Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.694159Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-07T18:09:47.702019Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2026-01-07T18:09:47.702059Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.702073Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.702094Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.702108Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2026-01-07T18:09:47.702172Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2026-01-07T18:09:47.702181Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.702191Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.702202Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.702211Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2026-01-07T18:09:47.794225Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2026-01-07T18:09:47.794273Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.794296Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.794320Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.794339Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2026-01-07T18:09:47.794419Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037898][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:09:47.794431Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.794442Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.794456Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.794466Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist 2026-01-07T18:09:47.794495Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:09:47.794508Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.794517Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.794532Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.794542Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T18:09:47.794569Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-07T18:09:47.794580Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.794589Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.794601Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.794612Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-07T18:09:47.802585Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2026-01-07T18:09:47.802627Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.802644Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.802667Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.802683Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2026-01-07T18:09:47.802742Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2026-01-07T18:09:47.802753Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.802763Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-07T18:09:47.802783Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:09:47.802795Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist |98.9%| [TM] {RESULT} ydb/core/persqueue/public/fetcher/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/fetcher/ut/unittest >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> TIndexProcesorTests::TestCreateIndexProcessor >> TTxDataShardBuildFulltextIndexScan::Build [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithData >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag >> BulkUpsert::BulkUpsert [GOOD] >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> Graph::LocalBordersOnGet [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> test_example.py::TestExample::test_linked_with_testcase [GOOD] >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> NodeWardenDsProxyConfigRetrieval::Disconnect >> GraphShard::CreateGraphShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:130:2058] recipient: [1:113:2143] 2026-01-07T18:09:30.282209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T18:09:30.282378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T18:09:30.282421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T18:09:30.282460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T18:09:30.282501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T18:09:30.282527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T18:09:30.282604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T18:09:30.282693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T18:09:30.283564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T18:09:30.286596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T18:09:30.422961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T18:09:30.423039Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:30.441315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T18:09:30.441823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T18:09:30.447077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T18:09:30.466493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T18:09:30.466963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T18:09:30.474453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T18:09:30.480113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T18:09:30.508448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T18:09:30.513117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T18:09:30.533423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T18:09:30.533516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T18:09:30.533635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T18:09:30.533679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T18:09:30.533803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T18:09:30.544341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T18:09:30.832815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.835446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.835634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.835708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.835786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.835860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.835935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.836027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.836126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.836189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.836252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.836308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.836372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.836464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:30.836557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7T18:09:59.657773Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.657804Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.657872Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2026-01-07T18:09:59.657895Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.657950Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.657987Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.658093Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2026-01-07T18:09:59.658119Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.658147Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.658180Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.658277Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2026-01-07T18:09:59.658299Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.658326Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.658356Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.658420Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2026-01-07T18:09:59.658446Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.658496Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.658534Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.658638Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2026-01-07T18:09:59.658665Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.658690Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.658721Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.658807Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2026-01-07T18:09:59.658830Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.658875Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.658917Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.665230Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2026-01-07T18:09:59.665309Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.665386Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.665470Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.665717Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2026-01-07T18:09:59.665754Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.665793Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.665841Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.665945Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2026-01-07T18:09:59.665976Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.682862Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.683033Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.683346Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2026-01-07T18:09:59.683385Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.683454Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.683503Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.683631Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2026-01-07T18:09:59.683662Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-07T18:09:59.683693Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-07T18:09:59.683734Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-07T18:09:59.683803Z node 6 :GRAPH TRACE: shard_impl.cpp:226: SHARD Handle TEvGraph::TEvGetMetrics from [6:1019:2950] 2026-01-07T18:09:59.683895Z node 6 :GRAPH DEBUG: tx_get_metrics.cpp:20: SHARD TTxGetMetrics::Execute 2026-01-07T18:09:59.683947Z node 6 :GRAPH DEBUG: backends.cpp:352: DB Querying from 0 to 119 2026-01-07T18:09:59.699117Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699230Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699261Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699291Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699317Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699346Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699372Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699398Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699436Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699463Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699490Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699515Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699542Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699569Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699593Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699619Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699643Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699670Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699694Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699718Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699742Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699768Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699793Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699817Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699842Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699868Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699895Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699919Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699944Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699968Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.699994Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700022Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700049Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700080Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700105Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700130Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700156Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700181Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700204Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700227Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700248Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700272Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700340Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700374Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700399Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700420Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700444Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700467Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700490Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700513Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700536Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700559Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700584Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700607Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700629Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700652Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700676Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700700Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700720Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700742Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-07T18:09:59.700776Z node 6 :GRAPH DEBUG: tx_get_metrics.cpp:25: SHARD TTxGetMetric::Complete 2026-01-07T18:09:59.700825Z node 6 :GRAPH TRACE: tx_get_metrics.cpp:26: SHARD TxGetMetrics returned 60 points for request 3 2026-01-07T18:09:59.701045Z node 6 :GRAPH TRACE: service_impl.cpp:201: SVC TEvMetricsResult 3 2026-01-07T18:09:59.701103Z node 6 :GRAPH TRACE: service_impl.cpp:204: SVC TEvMetricsResult found request 3 resending to [6:1020:2951] |98.9%| [TM] {RESULT} ydb/core/graph/ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/graph/ut/unittest >> Vacuum::Vacuum [GOOD] >> Vacuum::VacuumWithoutCompaction >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> TopicSessionTests::SlowSession [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-07T18:09:57.108686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-07T18:09:57.108819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T18:09:57.108873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-07T18:09:57.108906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-07T18:09:57.108960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-07T18:09:57.109003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-07T18:09:57.109069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-07T18:09:57.109139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-07T18:09:57.109993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-07T18:09:57.117772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-07T18:09:57.430677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-07T18:09:57.430740Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:57.464326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-07T18:09:57.464804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-07T18:09:57.474758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-07T18:09:57.519540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-07T18:09:57.520039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-07T18:09:57.527710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-07T18:09:57.533452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-07T18:09:57.552792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-07T18:09:57.557255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-07T18:09:57.585259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T18:09:57.585387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T18:09:57.585537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-07T18:09:57.585593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-07T18:09:57.585711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-07T18:09:57.589033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-07T18:09:57.910127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.916675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.916954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.917972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.918061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-07T18:09:57.918159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2026-01-07T18:10:00.715743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:1 128 -> 240 2026-01-07T18:10:00.715831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2026-01-07T18:10:00.715962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-07T18:10:00.716103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[1:824:2791], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-07T18:10:00.717757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186234409546, cookie: 281474976720658 2026-01-07T18:10:00.717907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186234409546, cookie: 281474976720658 2026-01-07T18:10:00.717945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186234409546, txId: 281474976720658 2026-01-07T18:10:00.717982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186234409546, txId: 281474976720658, pathId: [OwnerId: 72075186234409546, LocalPathId: 1], version: 7 2026-01-07T18:10:00.718050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186234409546, LocalPathId: 1] was 7 2026-01-07T18:10:00.718140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720658, ready parts: 0/1, is published: true 2026-01-07T18:10:00.719378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720657 2026-01-07T18:10:00.719490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720657 2026-01-07T18:10:00.721894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720658 FAKE_COORDINATOR: Erasing txId 102 2026-01-07T18:10:00.722332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-07T18:10:00.722375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-07T18:10:00.722591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-07T18:10:00.722651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-07T18:10:00.723023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2026-01-07T18:10:00.723078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2026-01-07T18:10:00.723132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:1 240 -> 240 2026-01-07T18:10:00.723927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T18:10:00.724024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-07T18:10:00.724059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-07T18:10:00.724092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-07T18:10:00.724134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-07T18:10:00.724213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2026-01-07T18:10:00.727585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2026-01-07T18:10:00.727649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:1 ProgressState 2026-01-07T18:10:00.727731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:1 progress is 2/2 2026-01-07T18:10:00.727781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-07T18:10:00.727838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:1 progress is 2/2 2026-01-07T18:10:00.727871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-07T18:10:00.727903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2026-01-07T18:10:00.727939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-07T18:10:00.727990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-07T18:10:00.728044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-07T18:10:00.728220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-07T18:10:00.728271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-07T18:10:00.728302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-07T18:10:00.728437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-07T18:10:00.728948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-07T18:10:00.730787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-07T18:10:00.730840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-07T18:10:00.731290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-07T18:10:00.731390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-07T18:10:00.731431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1029:2961] TestWaitNotification: OK eventTxId 102 2026-01-07T18:10:00.741569Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-07T18:10:00.741946Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/db1" took 428us result status StatusSuccess 2026-01-07T18:10:00.745405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |99.0%| [TS] {BAZEL_UPLOAD} ydb/core/graph/shard/ut/unittest |99.0%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram >> TTxDataShardBuildFulltextIndexScan::BuildWithData [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithTextData >> TMemoryController::GroupedMemoryLimiter_ConfigCS [GOOD] >> TMemoryController::ColumnShardCaches_Config |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] |99.0%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery >> QuoterWithKesusTest::AllocationStatistics [FAIL] >> QuoterWithKesusTest::UpdatesCountersForParentResources >> TMLPConsumerTests::ReloadPQTabletAfterAlterConsumer [GOOD] >> TMLPConsumerTests::HtmlApp_Success |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> DescribeSchemaSecretsService::GroupGrants [GOOD] >> DescribeSchemaSecretsService::BatchRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2026-01-07T18:10:01.982983Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-07T18:10:02.071311Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/ec9d/0032b5/r3tmp/tmp9CVo6X/static.dat" PDiskGuid: 4086510566281633866 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 4086510566281633866 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 4086510566281633866 } } } } AvailabilityDomains: 0 } 2026-01-07T18:10:02.097086Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/ec9d/0032b5/r3tmp/tmp9CVo6X/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-07T18:10:02.153736Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-07T18:10:02.189206Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 4086510566281633866 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T18:10:02.190667Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 4086510566281633866 2026-01-07T18:10:02.190746Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-07T18:10:02.197139Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2076] ControllerId# 72057594037932033 2026-01-07T18:10:02.197201Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T18:10:02.198275Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-07T18:10:02.198472Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-07T18:10:02.281038Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-07T18:10:02.309003Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-07T18:10:02.406059Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T18:10:02.406189Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-07T18:10:02.406230Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-07T18:10:02.425115Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-07T18:10:02.459128Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-07T18:10:02.459205Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-07T18:10:02.473103Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-07T18:10:02.486185Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\201\302\0130L!\245\337h\"mcxC\227\342\022\336\344\205" } 2026-01-07T18:10:02.517246Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-07T18:10:02.517325Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-07T18:10:02.592308Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/ec9d/0032b5/r3tmp/tmp9CVo6X/static.dat" PDiskGuid: 4086510566281633866 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 4086510566281633866 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 4086510566281633866 } } } } AvailabilityDomains: 0 } 2026-01-07T18:10:02.592500Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-07T18:10:02.592720Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-07T18:10:02.737618Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-07T18:10:02.801391Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-07T18:10:02.880989Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-07T18:10:02.930307Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-07T18:10:02.933557Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-07T18:10:02.934319Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T18:10:02.938170Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-07T18:10:02.938910Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-07T18:10:02.938962Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-07T18:10:02.939213Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-07T18:10:02.991340Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-07T18:10:02.993587Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-07T18:10:02.995728Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-07T18:10:02.995872Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T18:10:02.995975Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-07T18:10:02.997788Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T18:10:03.071451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-07T18:10:03.071577Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T18:10:03.088234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-07T18:10:03.088369Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T18:10:03.088437Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-07T18:10:03.088567Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T18:10:03.088691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-07T18:10:03.088749Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T18:10:03.088801Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-07T18:10:03.088890Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T18:10:03.107833Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-07T18:10:03.108017Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T18:10:03.124313Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-07T18:10:03.124533Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-07T18:10:03.127818Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-07T18:10:03.127889Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-07T18:10:03.152502Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-07T18:10:03.158877Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-07T18:10:03.159422Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639257 Sender# [1:92:2123] SessionId# [0:0:0] Cookie# 0 Pipe connected clientId# [1:29:2076] 2026-01-07T18:10:03.159739Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:53} TEvTabletPipe::TEvClientConnected OK ClientId# [1:29:2076] ServerId# [1:125:2147] TabletId# 72057594037932033 PipeClientId# [1:29:2076] 2026-01-07T18:10:03.160189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 4086510566281633866 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2026-01-07T18:10:03.160342Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2026-01-07T18:10:03.164589Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/ec9d/0032b5/r3t ... okie# 0 === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:29:2076] 2026-01-07T18:10:03.235996Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:29:2076] ServerId# [1:125:2147] TabletId# 72057594037932033 PipeClientId# [1:29:2076] 2026-01-07T18:10:03.236084Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:139:2160] ControllerId# 72057594037932033 2026-01-07T18:10:03.236113Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-07T18:10:03.236562Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:143} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2026-01-07T18:10:03.236615Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 2147483648 HasGroupInfo# false GroupInfoGeneration# 2026-01-07T18:10:03.236649Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:285} RequestGroupConfig GroupId# 2147483648 2026-01-07T18:10:03.236894Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 2026-01-07T18:10:03.237007Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-07T18:10:03.237086Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-07T18:10:03.246451Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 Pipe connected clientId# [1:139:2160] 2026-01-07T18:10:03.246716Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:53} TEvTabletPipe::TEvClientConnected OK ClientId# [1:139:2160] ServerId# [1:150:2169] TabletId# 72057594037932033 PipeClientId# [1:139:2160] 2026-01-07T18:10:03.246921Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 4086510566281633866 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2026-01-07T18:10:03.247007Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2026-01-07T18:10:03.247332Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2026-01-07T18:10:03.247503Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:150:2169] RecipientRewrite# [1:92:2123] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2026-01-07T18:10:03.247559Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2026-01-07T18:10:03.247691Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2026-01-07T18:10:03.247854Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2026-01-07T18:10:03.282308Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:845} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/ec9d/0032b5/r3tmp/tmp9CVo6X/static.dat" PDiskGuid: 4086510566281633866 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "e4b496e7-b9ca5fab-18e4503b-ba6b6184" Comprehensive: true AvailDomain: 0 UpdateSyncers: true } 2026-01-07T18:10:03.282535Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:863} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/ec9d/0032b5/r3tmp/tmp9CVo6X/static.dat" PDiskGuid: 4086510566281633866 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "e4b496e7-b9ca5fab-18e4503b-ba6b6184" Comprehensive: true AvailDomain: 0 UpdateSyncers: true } 2026-01-07T18:10:03.282721Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/ec9d/0032b5/r3tmp/tmp9CVo6X/static.dat" PDiskGuid: 4086510566281633866 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2026-01-07T18:10:03.282903Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-07T18:10:03.282976Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-07T18:10:03.283052Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 4086510566281633866 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-07T18:10:03.283918Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 4086510566281633866 2026-01-07T18:10:03.290299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2026-01-07T18:10:03.293860Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:845} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2026-01-07T18:10:03.293987Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:863} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2026-01-07T18:10:03.294093Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 4086510566281633866 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2026-01-07T18:10:03.294232Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-07T18:10:03.294288Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-07T18:10:03.294703Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 4086510566281633866 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2026-01-07T18:10:03.295432Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2026-01-07T18:10:03.301938Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2026-01-07T18:10:03.307168Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-07T18:10:03.308150Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 4086510566281633866 Status: REPLICATING OnlyPhantomsRemain: false } } 2026-01-07T18:10:03.308688Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-07T18:10:03.308888Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 4086510566281633866 Status: READY OnlyPhantomsRemain: false } } 2026-01-07T18:10:03.994225Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34189869056 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 17041457152 State: Normal SlotCount: 2 SlotSizeInUnits: 0 PDiskUsage: 0.10449320794148381 } } |99.0%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> Vacuum::VacuumWithoutCompaction [GOOD] >> Vacuum::MultipleVacuums >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker >> TTxDataShardBuildFulltextIndexScan::BuildWithTextData [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithTextFromKey >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue >> test_canonical_requests.py::Test::test_cluster_get_set [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> TMemoryController::ColumnShardCaches_Config [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> ConfigGRPCService::ReplaceConfig >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0B ... waiting for SysViewsRoster update finished 2026-01-07T18:08:01.725805Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2026-01-07T18:08:01.728196Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2026-01-07T18:08:01.728336Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 12.5MiB Min: 12.5MiB Max: 12.5MiB 2026-01-07T18:08:01.729395Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2026-01-07T18:08:01.729499Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:458: Apply ResourceBroker config: LimitBytes: 60MiB queue_cs_general: 7.5MiB queue_cs_indexation: 2.5MiB queue_cs_normalizer: 7.5MiB queue_cs_ttl: 2.5MiB queue_kqp_resource_manager: 40MiB 2026-01-07T18:08:01.735801Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:161: Bootstrapped with config HardLimitBytes: 209715200 2026-01-07T18:08:01.753177Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-07T18:08:01.755405Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 33554432 2026-01-07T18:08:05.023784Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer SharedCache [1:20:2067] registered 2026-01-07T18:08:05.026278Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1189: New config diff: Queues { Name: "queue_cs_general" Limit { Memory: 7864320 } } Queues { Name: "queue_cs_indexation" Limit { Memory: 2621440 } } Queues { Name: "queue_cs_normalizer" Limit { Memory: 7864320 } } Queues { Name: "queue_cs_ttl" Limit { Memory: 2621440 } } Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2026-01-07T18:08:05.027906Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 2621440 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 2621440 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 7864320 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 7864320 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 62914560 } 2026-01-07T18:08:05.029027Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2026-01-07T18:08:05.029439Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:238: Register memory consumer 2026-01-07T18:08:05.029528Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-07T18:08:05.034114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:08:05.045687Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesBlobCache [1:21:2068] registered 2026-01-07T18:08:05.071259Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesDataAccessorCache [1:22:2069] registered 2026-01-07T18:08:05.240612Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesColumnDataCache [1:23:2070] registered 2026-01-07T18:08:05.245505Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesScanGroupedMemory [1:51:2098] registered 2026-01-07T18:08:05.245709Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-07T18:08:05.246080Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesCompGroupedMemory [1:52:2099] registered 2026-01-07T18:08:05.252187Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesDeduplicationGroupedMemory [1:53:2100] registered 2026-01-07T18:08:05.252470Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:335: ResourceBroker configure result Success: true 2026-01-07T18:08:05.329852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:08:05.335842Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:320:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:08:05.336093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:08:05.487940Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:428:2393] 1 registered 2026-01-07T18:08:05.500875Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:440:2395] 0 registered 2026-01-07T18:08:05.507023Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:440:2395] 2 registered 2026-01-07T18:08:05.507376Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:440:2395] 4 registered 2026-01-07T18:08:05.507538Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:440:2395] 5 registered 2026-01-07T18:08:05.511210Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:451:2397] 1 registered 2026-01-07T18:08:05.515053Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:451:2397] 2 registered 2026-01-07T18:08:05.562757Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 1 registered 2026-01-07T18:08:05.614359Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 2 registered 2026-01-07T18:08:05.614866Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 3 registered 2026-01-07T18:08:05.615855Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 4 registered 2026-01-07T18:08:05.616160Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 5 registered 2026-01-07T18:08:05.616406Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 6 registered 2026-01-07T18:08:05.616477Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 7 registered 2026-01-07T18:08:05.616563Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 8 registered 2026-01-07T18:08:05.616648Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 9 registered 2026-01-07T18:08:05.618028Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 10 registered 2026-01-07T18:08:05.626771Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 11 registered 2026-01-07T18:08:05.627126Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 12 registered 2026-01-07T18:08:05.654984Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 13 registered 2026-01-07T18:08:05.655471Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 14 registered 2026-01-07T18:08:05.656433Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2399] 15 regist ... in: 50MiB Max: 50MiB 2026-01-07T18:10:09.919421Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-07T18:10:09.919452Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-07T18:10:09.919483Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-07T18:10:09.919515Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 50MiB Min: 50MiB Max: 50MiB 2026-01-07T18:10:09.919547Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 31.3MiB Min: 31.3MiB Max: 31.3MiB 2026-01-07T18:10:09.919611Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 100MiB 2026-01-07T18:10:09.919725Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 236MiB 2026-01-07T18:10:09.919761Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-07T18:10:10.130111Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: 1.95GiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 1.95GiB SoftLimit: 1.46GiB TargetUtilization: 1000MiB ActivitiesLimitBytes: 600MiB ConsumersConsumption: 172KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 1000MiB ResultingConsumersConsumption: 1000MiB Coefficient: 0.90625 2026-01-07T18:10:10.130789Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer SharedCache state: Consumption: 0B Limit: 944MiB Min: 400MiB Max: 1000MiB 2026-01-07T18:10:10.130860Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 172KiB Limit: 56.3MiB Min: 20MiB Max: 60MiB 2026-01-07T18:10:10.130901Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesScanGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-07T18:10:10.130940Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesCompGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-07T18:10:10.130971Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-07T18:10:10.131001Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-07T18:10:10.131030Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-07T18:10:10.131067Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-07T18:10:10.131097Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 125MiB Min: 125MiB Max: 125MiB 2026-01-07T18:10:10.131149Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 400MiB 2026-01-07T18:10:10.131243Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:458: Apply ResourceBroker config: LimitBytes: 600MiB queue_cs_general: 75MiB queue_cs_indexation: 25MiB queue_cs_normalizer: 75MiB queue_cs_ttl: 25MiB queue_kqp_resource_manager: 400MiB 2026-01-07T18:10:10.131546Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1189: New config diff: Queues { Name: "queue_cs_general" Limit { Memory: 78643200 } } Queues { Name: "queue_cs_indexation" Limit { Memory: 26214400 } } Queues { Name: "queue_cs_normalizer" Limit { Memory: 78643200 } } Queues { Name: "queue_cs_ttl" Limit { Memory: 26214400 } } Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 419430400 } } ResourceLimit { Memory: 629145600 } 2026-01-07T18:10:10.132504Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 26214400 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 26214400 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 78643200 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 78643200 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 419430400 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 629145600 } 2026-01-07T18:10:10.134754Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2026-01-07T18:10:10.135208Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 944MiB 2026-01-07T18:10:10.135279Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-07T18:10:10.136367Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:335: ResourceBroker configure result Success: true 2026-01-07T18:10:10.282227Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: 1.95GiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 1.95GiB SoftLimit: 1.46GiB TargetUtilization: 1000MiB ActivitiesLimitBytes: 600MiB ConsumersConsumption: 172KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 1000MiB ResultingConsumersConsumption: 1000MiB Coefficient: 0.90625 2026-01-07T18:10:10.282838Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer SharedCache state: Consumption: 0B Limit: 944MiB Min: 400MiB Max: 1000MiB 2026-01-07T18:10:10.282896Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 172KiB Limit: 56.3MiB Min: 20MiB Max: 60MiB 2026-01-07T18:10:10.282927Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesScanGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-07T18:10:10.282966Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesCompGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-07T18:10:10.283017Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-07T18:10:10.283052Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-07T18:10:10.283086Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-07T18:10:10.283121Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-07T18:10:10.283156Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 125MiB Min: 125MiB Max: 125MiB 2026-01-07T18:10:10.283204Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 400MiB 2026-01-07T18:10:10.283322Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 944MiB 2026-01-07T18:10:10.283361Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/memory_controller/ut/unittest |99.0%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest >> TTxDataShardBuildFulltextIndexScan::BuildWithTextFromKey [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithRelevance >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/example/py3test |99.0%| [TM] {RESULT} ydb/tests/example/py3test >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> Vacuum::MultipleVacuums [GOOD] >> Vacuum::MultipleVacuumsWithOldGenerations >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TMLPConsumerTests::HtmlApp_Success [GOOD] >> TMLPConsumerTests::HtmlApp_BadConsumer |99.0%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/solomon/py3test |99.0%| [TM] {RESULT} ydb/tests/fq/solomon/py3test >> DescribeSchemaSecretsService::BatchRequest [GOOD] >> DescribeSchemaSecretsService::BigBatchRequest |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/dstool/py3test >> test_canonical_requests.py::Test::test_cluster_get_set [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/dstool/py3test |99.0%| [TM] {RESULT} ydb/tests/functional/dstool/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> test.py::TestViewer::test_whoami_root [GOOD] >> test.py::TestViewer::test_whoami_database >> ReadUpdateWrite::Load >> test.py::TestViewer::test_whoami_database [GOOD] >> test.py::TestViewer::test_whoami_viewer [GOOD] >> test.py::TestViewer::test_whoami_monitoring |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> test.py::TestViewer::test_whoami_monitoring [GOOD] >> test.py::TestViewer::test_counter >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::ReplaceConfigWithInvalidHostConfig >> test.py::TestViewer::test_counter [GOOD] >> test.py::TestViewer::test_viewer_nodelist >> test.py::TestViewer::test_viewer_nodelist [GOOD] >> test.py::TestViewer::test_viewer_nodes >> test.py::TestViewer::test_viewer_nodes [GOOD] >> test.py::TestViewer::test_viewer_nodes_all >> test.py::TestViewer::test_viewer_nodes_all [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_no_database [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_no_database_filter_node_id >> test.py::TestViewer::test_viewer_storage_nodes_no_database_filter_node_id [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes >> TTxDataShardBuildFulltextIndexScan::BuildWithRelevance [GOOD] >> TTxDataShardBuildIndexScan::BadRequest >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx >> test.py::TestViewer::test_viewer_storage_nodes [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_all >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_all [GOOD] >> test.py::TestViewer::test_viewer_nodes_group >> test.py::TestViewer::test_viewer_nodes_group [GOOD] >> test.py::TestViewer::test_storage_groups [GOOD] >> test.py::TestViewer::test_viewer_groups_group_by_pool_name [GOOD] >> test.py::TestViewer::test_viewer_groups_group_by_capacity_alert [GOOD] >> test.py::TestViewer::test_viewer_sysinfo [GOOD] >> test.py::TestViewer::test_viewer_vdiskinfo [GOOD] >> test.py::TestViewer::test_viewer_pdiskinfo >> test.py::TestViewer::test_viewer_pdiskinfo [GOOD] >> test.py::TestViewer::test_viewer_bsgroupinfo [GOOD] >> test.py::TestViewer::test_viewer_tabletinfo >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> test.py::TestViewer::test_viewer_tabletinfo [GOOD] >> test.py::TestViewer::test_viewer_describe >> TopicSessionTests::RestartSessionIfQueryStopped >> test.py::TestViewer::test_viewer_describe [GOOD] >> test.py::TestViewer::test_viewer_cluster [GOOD] >> test.py::TestViewer::test_viewer_tenantinfo >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics >> test.py::TestViewer::test_viewer_tenantinfo [GOOD] >> test.py::TestViewer::test_viewer_tenantinfo_db >> test.py::TestViewer::test_viewer_tenantinfo_db [GOOD] >> test.py::TestViewer::test_viewer_healthcheck >> test.py::TestViewer::test_viewer_healthcheck [GOOD] >> test.py::TestViewer::test_viewer_acl |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test.py::TestViewer::test_viewer_acl [GOOD] >> test.py::TestViewer::test_viewer_acl_write >> test.py::TestViewer::test_viewer_acl_write [GOOD] >> test.py::TestViewer::test_viewer_autocomplete >> test.py::TestViewer::test_viewer_autocomplete [GOOD] >> test.py::TestViewer::test_viewer_check_access |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test.py::TestViewer::test_viewer_check_access [GOOD] >> test.py::TestViewer::test_viewer_query >> test.py::TestViewer::test_viewer_query [GOOD] >> test.py::TestViewer::test_viewer_query_from_table >> Vacuum::MultipleVacuumsWithOldGenerations [GOOD] >> Vacuum::VacuumWithRestart >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> test.py::TestViewer::test_viewer_query_from_table [GOOD] >> test.py::TestViewer::test_viewer_query_from_table_different_schemas >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> ConfigGRPCService::ReplaceConfigWithInvalidHostConfig [GOOD] >> ConfigGRPCService::FetchConfig >> test.py::TestViewer::test_viewer_query_from_table_different_schemas [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13757 >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> test.py::TestViewer::test_viewer_query_issue_13757 [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13945 >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint >> test.py::TestViewer::test_viewer_query_issue_13945 [GOOD] >> test.py::TestViewer::test_pqrb_tablet |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> test.py::TestViewer::test_pqrb_tablet [GOOD] >> test.py::TestViewer::test_viewer_nodes_issue_14992 [GOOD] >> test.py::TestViewer::test_operations_list [GOOD] >> test.py::TestViewer::test_operations_list_page >> test.py::TestViewer::test_operations_list_page [GOOD] >> test.py::TestViewer::test_operations_list_page_bad [GOOD] >> test.py::TestViewer::test_scheme_directory >> test_drain.py::TestHive::test_drain_tablets >> test.py::TestViewer::test_scheme_directory [GOOD] >> test.py::TestViewer::test_topic_data >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> TTxDataShardBuildIndexScan::BadRequest [GOOD] >> TTxDataShardBuildIndexScan::RunScan >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx >> ConfigGRPCService::FetchConfig [GOOD] >> ConfigGRPCService::CheckV1IsBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:09:20.592163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:20.865815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:20.964010Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:20.964735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:20.964792Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:21.682293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:21.682410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:21.773072Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809356969248 != 1767809356969252 2026-01-07T18:09:21.787940Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:21.842534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:21.977766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:22.432722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:22.449536Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:22.583038Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-07T18:09:22.583155Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-07T18:09:22.587974Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-07T18:09:22.754937Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-07T18:09:22.755065Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-07T18:09:22.757898Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-07T18:09:22.758066Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-07T18:09:22.758492Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-07T18:09:22.758671Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-07T18:09:22.758865Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-07T18:09:22.759130Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-07T18:09:22.765881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:22.770278Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-07T18:09:22.770362Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-07T18:09:22.845002Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T18:09:22.846839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T18:09:22.861436Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T18:09:22.861877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:22.922455Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T18:09:22.923232Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:22.923355Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:22.925913Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T18:09:22.926015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T18:09:22.926074Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T18:09:22.928108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:22.928272Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:22.928367Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T18:09:22.939817Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:23.018603Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T18:09:23.020769Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:23.021003Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T18:09:23.021048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:09:23.021080Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T18:09:23.021148Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:23.021394Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:23.024224Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:23.026867Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T18:09:23.027029Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T18:09:23.027130Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:09:23.027181Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:23.027233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T18:09:23.027273Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T18:09:23.027306Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T18:09:23.027339Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T18:09:23.027396Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:23.027518Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:23.027559Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:23.027647Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T18:09:23.028038Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T18:09:23.028099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T18:09:23.028201Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T18:09:23.028510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T18:09:23.028571Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T18:09:23.028648Z ... }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2026-01-07T18:10:28.368636Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [8:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T18:10:28.368729Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [8:68:2115] TxId# 281474976715665 ProcessProposeKqpTransaction 2026-01-07T18:10:28.370345Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269877761, Sender [8:1304:3063], Recipient [8:1279:3045]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:10:28.370424Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3303: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:10:28.370495Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1302:3062], serverId# [8:1304:3063], sessionId# [0:0:0] 2026-01-07T18:10:28.372497Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553215, Sender [8:1307:3064], Recipient [8:1279:3045]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-07T18:10:28.372558Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3305: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2026-01-07T18:10:28.372691Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-07T18:10:28.372761Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T18:10:28.373026Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2026-01-07T18:10:28.373135Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:837: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=6, epoch=1} 2026-01-07T18:10:28.373799Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:854: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=4, epoch=1} 2026-01-07T18:10:28.374291Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:925: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2026-01-07T18:10:28.374395Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037890 changed HEAD read to repeatable v2500/18446744073709551615 2026-01-07T18:10:28.374500Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2026-01-07T18:10:28.374604Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-07T18:10:28.374656Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2026-01-07T18:10:28.374714Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-07T18:10:28.374757Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-07T18:10:28.374798Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2026-01-07T18:10:28.374841Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-07T18:10:28.374868Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-07T18:10:28.374890Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2026-01-07T18:10:28.374914Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-07T18:10:28.375055Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T18:10:28.375327Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Restart 2026-01-07T18:10:28.375363Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2026-01-07T18:10:28.375450Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T18:10:28.375523Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} pin 0 (0 b) load 1 (65 b) 2026-01-07T18:10:28.375590Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2026-01-07T18:10:28.375682Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} request page collection [72075186224037888:1:24:1:12288:190:0] pages [ 0 ] 2026-01-07T18:10:28.375772Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, loading 1 pages, 65 bytes, newly pinned 0 pages, 0 bytes 2026-01-07T18:10:28.375983Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} got result TEvResult{1 pages [72075186224037888:1:24:1:12288:190:0] ok OK}, type 1 2026-01-07T18:10:28.376084Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} activated 2026-01-07T18:10:28.376162Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2026-01-07T18:10:28.376201Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-07T18:10:28.376291Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T18:10:28.376528Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[8:1307:3064], 0} after executionsCount# 2 2026-01-07T18:10:28.376605Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[8:1307:3064], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2026-01-07T18:10:28.376705Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-07T18:10:28.376733Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2026-01-07T18:10:28.376758Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2026-01-07T18:10:28.376785Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2026-01-07T18:10:28.376840Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-07T18:10:28.376865Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2026-01-07T18:10:28.376894Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2026-01-07T18:10:28.377048Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-07T18:10:28.377154Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T18:10:28.377229Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2026-01-07T18:10:28.377303Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-07T18:10:28.377474Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553217, Sender [8:1279:3045], Recipient [8:1279:3045]: NKikimr::TEvDataShard::TEvReadContinue 2026-01-07T18:10:28.377514Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3306: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2026-01-07T18:10:28.377609Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2026-01-07T18:10:28.377673Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-07T18:10:28.377748Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3081: 72075186224037890 ReadContinue for iterator# {[8:1307:3064], 0}, firstUnprocessedQuery# 0 2026-01-07T18:10:28.377825Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3174: 72075186224037890 ReadContinue: iterator# {[8:1307:3064], 0}, FirstUnprocessedQuery# 0 2026-01-07T18:10:28.377946Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3338: 72075186224037890 readContinue iterator# {[8:1307:3064], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-07T18:10:28.378014Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3362: 72075186224037890 read iterator# {[8:1307:3064], 0} finished in ReadContinue 2026-01-07T18:10:28.378119Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-07T18:10:28.378189Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2026-01-07T18:10:28.379382Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553219, Sender [8:1307:3064], Recipient [8:1279:3045]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T18:10:28.379431Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3308: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2026-01-07T18:10:28.379496Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest >> TListAllTopicsTests::ListLimitAndPaging [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_followers/unittest >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> DescribeSchemaSecretsService::BigBatchRequest [GOOD] >> DescribeSchemaSecretsService::EmptyBatch >> Vacuum::VacuumWithRestart [GOOD] >> Vacuum::OutReadSetsCleanedAfterCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/list_topics/ut/unittest >> TListAllTopicsTests::ListLimitAndPaging [GOOD] Test command err: 2026-01-07T18:09:51.142170Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592683520655007521:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:09:51.142223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:09:51.320769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:51.366128Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:09:51.881876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:09:51.950580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:51.950706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:51.960292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:52.089130Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592683520655007485:2081] 1767809391133211 != 1767809391133214 2026-01-07T18:09:52.109159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:52.120497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:09:52.157514Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:52.942667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/0021a6/r3tmp/yandexVHPhnC.tmp 2026-01-07T18:09:52.942764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/0021a6/r3tmp/yandexVHPhnC.tmp 2026-01-07T18:09:52.949518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/0021a6/r3tmp/yandexVHPhnC.tmp 2026-01-07T18:09:52.949744Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:09:53.798922Z INFO: TTestServer started on Port 9945 GrpcPort 2625 PQClient connected to localhost:2625 2026-01-07T18:09:54.189614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:54.200610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T18:09:54.225843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-07T18:09:54.369916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:54.691474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-07T18:09:55.953832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683537834877717:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.953944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683537834877730:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.954027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.955831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683537834877734:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.955891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.970120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:09:55.978793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683537834877773:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.978879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.979096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683537834877781:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.979143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:55.984179Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592683537834877733:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-07T18:09:56.073967Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683542129845089:2648] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:09:57.066494Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592683520655007521:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:09:57.066774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:09:57.166629Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592683542129845105:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T18:09:57.189578Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MzJlZWUzMGItOTZmMGI0NzQtOGE5N2IxMWQtYzU1ZWI2OTE=, ActorId: [1:7592683537834877715:2330], ActorState: ExecuteState, LegacyTraceId: 01kectdb6h6cy8sp27evce7djh, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T18:09:57.199818Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T18:09:57.428637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:57.496550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:57.715944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, ... Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-07T18:10:30.200767Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T18:10:30.200777Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.200795Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T18:10:30.200804Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.200828Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.200838Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T18:10:30.200849Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.200858Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T18:10:30.200869Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.200902Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72075186224037896][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-07T18:10:30.201270Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T18:10:30.201324Z node 3 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037896][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1767809430240, TxId 281474976715676 2026-01-07T18:10:30.201341Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.201350Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T18:10:30.201362Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.201382Z node 3 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037896][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-07T18:10:30.201503Z node 3 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037896][Partition][0][StateIdle] Schedule reply tx done 281474976715676 2026-01-07T18:10:30.201518Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T18:10:30.201527Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037896][Partition][0][StateIdle] Batch completed (1) 2026-01-07T18:10:30.201539Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.201702Z node 3 :PERSQUEUE DEBUG: read.h:275: [72075186224037896][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-07T18:10:30.209303Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037896][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-07T18:10:30.209579Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037896][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-07T18:10:30.209658Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037896][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-07T18:10:30.209682Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.209693Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.209726Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.209768Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.209779Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.209805Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2026-01-07T18:10:30.210177Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037896] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MonitoringProjectId: "" 2026-01-07T18:10:30.210236Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T18:10:30.210357Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-07T18:10:30.215534Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-07T18:10:30.236009Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.236037Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.236048Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.236068Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.236078Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.299504Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.299534Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.299546Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.299562Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.299572Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.305313Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.305358Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.305369Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.305385Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.305393Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.341878Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.341914Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.341928Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.341957Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.341978Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.400941Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.400979Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.400991Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.401008Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.401018Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.407277Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.407305Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.407319Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.407340Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.407351Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-07T18:10:30.441699Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:10:30.441734Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.441747Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:10:30.441768Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-07T18:10:30.441779Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |99.0%| [TM] {RESULT} ydb/core/persqueue/public/list_topics/ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/list_topics/ut/unittest >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> TMLPConsumerTests::HtmlApp_BadConsumer [GOOD] >> TMLPConsumerTests::HtmlApp_BadPartition >> test.py::TestViewer::test_topic_data [GOOD] >> test.py::TestViewer::test_topic_data_cdc |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test.py::TestViewer::test_topic_data_cdc [GOOD] >> test.py::TestViewer::test_async_replication_describe >> test.py::TestViewer::test_async_replication_describe [GOOD] >> test.py::TestViewer::test_transfer_describe >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] >> test.py::TestViewer::test_transfer_describe [GOOD] >> test.py::TestViewer::test_viewer_query_long >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> ConfigGRPCService::CheckV1IsBlocked [GOOD] >> TopicSessionTests::WrongJson >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::CheckV1IsBlocked [GOOD] Test command err: 2026-01-07T18:10:13.423125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592683614756419588:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:10:13.423199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:10:13.532639Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.007276s 2026-01-07T18:10:13.652199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:14.122299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:10:14.418153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:10:14.453097Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:10:14.455469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:10:14.569629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:14.569736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:14.590766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:14.704658Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:14.704677Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592683614756419541:2082] 1767809413396978 != 1767809413396981 2026-01-07T18:10:14.747132Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:10:14.836100Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-07T18:10:14.836494Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-07T18:10:14.836695Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-07T18:10:14.836782Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-07T18:10:14.843557Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-07T18:10:14.843606Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-07T18:10:14.843657Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-07T18:10:14.843665Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-07T18:10:14.848010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/.sys 2026-01-07T18:10:14.850070Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1001 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1002 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038083 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1003 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038084 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1004 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2026-01-07T18:10:14.850484Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2026-01-07T18:10:14.850902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7592683619051387546:2400] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, mkdir '/Root/.sys', status: StatusAccepted 2026-01-07T18:10:14.850922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7592683619051387546:2400] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715657 2026-01-07T18:10:14.851030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 3, at schemeshard: 72057594046644480 2026-01-07T18:10:14.851197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_permissions 2026-01-07T18:10:14.851215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715658:0, there is await operations num 1 2026-01-07T18:10:14.851323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715659, response: Status: StatusAccepted TxId: 281474976715659 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2026-01-07T18:10:14.851419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_effective_permissions 2026-01-07T18:10:14.851431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715659:0, there is await operations num 1 2026-01-07T18:10:14.851479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusAccepted TxId: 281474976715660 SchemeshardId: 72057594046644480 PathId: 5, at schemeshard: 72057594046644480 2026-01-07T18:10:14.851595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_users 2026-01-07T18:10:14.851605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715660:0, there is await operations num 1 2026-01-07T18:10:14.851665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715661, response: Status: StatusAccepted TxId: 281474976715661 SchemeshardId: 72057594046644480 PathId: 6, at schemeshard: 72057594046644480 2026-01-07T18:10:14.851756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/pg_tables 2026-01-07T18:10:14.851773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715661:0, there is await operations num 1 2026-01-07T18:10:14.851832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715662, response: Status: StatusAccepted TxId: 281474976715662 SchemeshardId: 72057594046644480 PathId: 7, at schemeshard: 72057594046644480 2026-01-07T18:10:14.851903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715662, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/ds_pdisks 2026-01-07T18:10:14.851913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715662:0, there is await operations num 1 2026-01-07T18:10:14.851963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715663, response: Status: StatusAccepted TxId: 281474976715663 SchemeshardId: 72057594046644480 PathId: 8, at schemeshard: 72057594046644480 2026-01-07T18:10:14.852050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715663, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_group_members 2026-01-07T18:10:14.852081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715663:0, there is await operations num 1 2026-01-07T18:10:14.852126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715664, response: Status: StatusAccepted TxId: 281474976715664 SchemeshardId: 72057594046644480 PathId: 9, at schemeshard: 72057594046644480 2026-01-07T18:10:14.852232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715664, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/top_partitions_one_minute 2026-01-07T18:10:14.852243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715664:0, there is await operations num 1 2026-01-07T18:10:14.852390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusAccepted TxId: 281474976715665 SchemeshardId: 72057594046644480 PathId: 10, at schemeshard: 72057594046644480 2026-01-07T18:10:14.852463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/top_queries_by_request_units_one_hour 2026-01-07T18:10:14.852472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715665:0, there is await operations num 1 2026-01-07T18:10:14.852516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusAccepted TxId: 281474976715666 SchemeshardId: 72057594046644480 PathId: 11, at schemeshard: 72057594046644480 2026-01-07T18:10:14.852723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemesha ... ys 2026-01-07T18:10:32.216911Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-07T18:10:32.216970Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-07T18:10:32.217482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710657 2026-01-07T18:10:32.217504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2026-01-07T18:10:32.217519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2026-01-07T18:10:32.217544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T18:10:32.217626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-07T18:10:32.217644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7592683698890049692:2305] 2026-01-07T18:10:32.222177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710657 2026-01-07T18:10:32.276580Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ReplaceConfigRequest, traceId# 01kecteepj8ar38y9ae4axgank, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33780, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-07T18:10:32.339796Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { GetInterfaceVersion { } } } 2026-01-07T18:10:32.340662Z node 1 :BS_CONTROLLER DEBUG: {BSC24@console_interaction.cpp:224} Console replace config request Request# {ClusterYaml: "\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\n\nconfig:\n host_configs:\n - host_config_id: 1\n drive:\n - path: SectorMap:1:64\n type: SSD\n - path: SectorMap:2:64\n type: SSD\n - host_config_id: 2\n drive:\n - path: SectorMap:3:64\n type: SSD\n hosts:\n - host: ::1\n port: 12001\n host_config_id: 2\n feature_flags:\n switch_to_config_v2: true\n" DedicatedConfigMode: false AllowUnknownFields: false BypassMetadataChecks: false PeerName: "ipv6:[::1]:33780" UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2026-01-07T18:10:32.342011Z node 1 :BS_CONTROLLER DEBUG: {BSC36@console_interaction.cpp:407} Sending TEvControllerValidateConfigRequest to console ConsolePipe# [1:7592683694595081863:2467] 2026-01-07T18:10:32.351993Z node 1 :BS_CONTROLLER DEBUG: {BSC27@console_interaction.cpp:478} Console validate config response Response# {Status: ConfigIsValid YAML: "\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\n\nconfig:\n host_configs:\n - host_config_id: 1\n drive:\n - path: SectorMap:1:64\n type: SSD\n - path: SectorMap:2:64\n type: SSD\n - host_config_id: 2\n drive:\n - path: SectorMap:3:64\n type: SSD\n hosts:\n - host: ::1\n port: 12001\n host_config_id: 2\n feature_flags:\n switch_to_config_v2: true\n" } 2026-01-07T18:10:32.363784Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 5670699008 State: Normal SlotSizeInUnits: 0 PDiskUsage: 0.098231827111984277 } } 2026-01-07T18:10:32.364105Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } Rollback: true StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } 2026-01-07T18:10:32.364470Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# SectorMap:1:64 2026-01-07T18:10:32.364486Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# SectorMap:2:64 2026-01-07T18:10:32.364502Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-07T18:10:32.364586Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# SectorMap:3:64 2026-01-07T18:10:32.364599Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1001 2026-01-07T18:10:32.364608Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-07T18:10:32.364616Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1000 2026-01-07T18:10:32.374314Z node 1 :BS_CONTROLLER DEBUG: {BSC14@bsc.cpp:624} ApplyStorageConfig Request# {Request { Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } } 2026-01-07T18:10:32.375910Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } 2026-01-07T18:10:32.376259Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# SectorMap:1:64 2026-01-07T18:10:32.376277Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# SectorMap:2:64 2026-01-07T18:10:32.376294Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-07T18:10:32.376355Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# SectorMap:3:64 2026-01-07T18:10:32.376363Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1001 2026-01-07T18:10:32.376374Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-07T18:10:32.376384Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1000 2026-01-07T18:10:32.379792Z node 1 :BS_CONTROLLER ERROR: {BSC15@bsc.cpp:687} TEvControllerConfigResponse Response# {Status { Success: true } Status { Success: true } Status { Success: true } ErrorDescription: "failed to remove PDisk# 1:1 as it has active VSlots" ConfigTxSeqNo: 4 } 2026-01-07T18:10:32.396164Z node 1 :BS_CONTROLLER DEBUG: {BSC20@console_interaction.cpp:199} Console commit config response Response# {Status: Committed } 2026-01-07T18:10:32.417127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: FeatureFlags { SwitchToConfigV2: true } QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2026-01-07T18:10:32.417171Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:32.420762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage 2026-01-07T18:10:32.420834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop response: ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Config.ReplaceConfigResult] { } } 2026-01-07T18:10:32.437763Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:10:32.446477Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# FetchConfigRequest, traceId# 01kecteevxet56zwh100h74sa8, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33780, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-07T18:10:32.448367Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { GetInterfaceVersion { } } } 2026-01-07T18:10:32.456340Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ReplaceConfigRequest, traceId# 01kecteew4e8mrh52pdjw9hv8x, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33780, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef |99.0%| [TM] {BAZEL_UPLOAD} ydb/services/config/ut/unittest |99.0%| [TM] {RESULT} ydb/services/config/ut/unittest |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:09:35.109034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:35.341975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:35.391690Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:35.392331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:35.392398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:36.111006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:36.111141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:36.226091Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809371086409 != 1767809371086413 2026-01-07T18:09:36.261402Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:36.324577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:36.502967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:36.945556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:36.958685Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema ... waiting for SysViewsRoster update finished 2026-01-07T18:09:42.868833Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:42.875212Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:42.879260Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:288:2335], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:42.879559Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:42.879723Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:43.156112Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:43.156240Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:43.216241Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767809379190498 != 1767809379190502 2026-01-07T18:09:43.220292Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:43.270244Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:43.367283Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:43.709997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:43.726312Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) ... waiting for SysViewsRoster update finished 2026-01-07T18:09:48.552916Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:48.560045Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:48.562843Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:48.563138Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:48.563289Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:48.859899Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:48.860059Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:48.889492Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1767809384740916 != 1767809384740919 2026-01-07T18:09:48.895116Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:48.941906Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:49.033285Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:49.351698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:49.366262Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-07T18:09:54.682258Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:54.688102Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:54.698889Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:54.699396Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:54.699466Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:55.123221Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:55.123336Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:55.162528Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1767809390419838 != 1767809390419842 2026-01-07T18:09:55.167576Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:55.292429Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:55.375289Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:55.735311Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:55.748796Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-07T18:10:01.450285Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:01.456854Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:10:01.462050Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:224:2271], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2 ... __operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:08.477827Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-07T18:10:13.482806Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:13.504842Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:10:13.511788Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:318:2361], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:10:13.512056Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:10:13.512193Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:10:13.933021Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:13.933169Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:13.979047Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:34:2081] 1767809409201444 != 1767809409201448 2026-01-07T18:10:13.987983Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:14.041454Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:14.118765Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:10:14.447274Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:14.463272Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-07T18:10:20.415890Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:20.424288Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:10:20.429874Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:112:2159], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:10:20.430152Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:10:20.430236Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:10:20.943311Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:20.943452Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:21.061633Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:34:2081] 1767809415408329 != 1767809415408333 2026-01-07T18:10:21.075132Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:21.158582Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:21.247618Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:10:21.803498Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:21.820530Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-07T18:10:27.132602Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:27.139345Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:10:27.142378Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:301:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:10:27.142732Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:10:27.142894Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:10:27.491454Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:27.491594Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:27.498618Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1767809422854657 != 1767809422854660 2026-01-07T18:10:27.505160Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:27.559853Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:27.653513Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:10:28.012327Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:28.028647Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... restarting tablet 72057594046644480 2026-01-07T18:10:28.382249Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2026-01-07T18:10:34.821580Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:34.833742Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:10:34.836794Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:319:2361], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:10:34.837120Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:10:34.837363Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:10:35.155830Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:35.155997Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:35.192333Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1767809429638413 != 1767809429638417 2026-01-07T18:10:35.198150Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:35.250570Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:35.345115Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:10:35.684302Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:35.711528Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2026-01-07T18:10:36.121205Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/tablet/ut/unittest |99.0%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2026-01-07T18:09:42.217001Z node 1 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-07T18:09:42.221399Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-07T18:09:42.224271Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-07T18:09:42.227547Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-07T18:09:42.227572Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:09:42.234135Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:09:42 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-07T18:09:42.245557Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:09:52.617496Z node 2 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-07T18:09:52.642288Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-07T18:09:52.692589Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2026-01-07T18:09:52.734841Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-07T18:09:52.756010Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-07T18:09:52.772058Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-07T18:09:52.791742Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-07T18:09:52.823320Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-07T18:09:52.846639Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-07T18:09:52.854728Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2026-01-07T18:09:52.855059Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2026-01-07T18:09:52.855325Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-07T18:09:52.855557Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-07T18:09:52.855577Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:09:53.001716Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:09:52 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:09:53.002219Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-07T18:09:53.002241Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:09:53.121417Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:09:53 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:09:53.121769Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-07T18:09:53.121787Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:09:53.231464Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:09:53 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:09:53.231884Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-07T18:09:53.231904Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:09:53.345430Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:09:53 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:09:53.345836Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-07T18:09:53.345852Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:09:53.514230Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:09:53 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:09:53.514460Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2026-01-07T18:09:53.514475Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2026-01-07T18:09:53.759684Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:09:53 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:09:53.759780Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:09:53.789353Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Wed, 07 Jan 2026 18:09:53 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 500} 2026-01-07T18:09:53.789469Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:09:53.845240Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:09:53 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:09:53.845385Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:04.103915Z node 3 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-07T18:10:04.110227Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-07T18:10:04.130768Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 79903 bytes of data to buffer 2026-01-07T18:10:04.147397Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-07T18:10:04.158045Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-07T18:10:04.168786Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-07T18:10:04.179627Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-07T18:10:04.190456Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-07T18:10:04.201022Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-07T18:10:04.206460Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 40513 bytes of data to buffer 2026-01-07T18:10:04.206734Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 79903 bytes to solomon 2026-01-07T18:10:04.206957Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-07T18:10:04.207131Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-07T18:10:04.207143Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:10:04.281560Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-07T18:10:04.281931Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-07T18:10:04.281950Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:10:04.409423Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-07T18:10:04.409749Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-07T18:10:04.409769Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:10:04.517513Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-07T18:10:04.517971Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-07T18:10:04.517990Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:10:04.597631Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-07T18:10:04.598014Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-07T18:10:04.598029Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:10:04.677487Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-07T18:10:04.677688Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 40513 bytes to solomon 2026-01-07T18:10:04.677703Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2026-01-07T18:10:04.756689Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-07T18:10:04.756804Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:04.837702Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-07T18:10:04.837803Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:04.877470Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 25 Date: Wed, 07 Jan 2026 18:10:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 500} 2026-01-07T18:10:04.877577Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:16.056823Z node 4 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-07T18:10:16.073085Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-07T18:10:16.073481Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2026-01-07T18:10:16.073647Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2026-01-07T18:10:16.073661Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:16.082182Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Wed, 07 Jan 2026 18:10:16 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 10} 2026-01-07T18:10:16.082283Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:26.413170Z node 5 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-07T18:10:26.416241Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-07T18:10:26.434099Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2026-01-07T18:10:26.451317Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-07T18:10:26.456878Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2026-01-07T18:10:26.457606Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2026-01-07T18:10:26.457827Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-07T18:10:26.457904Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2026-01-07T18:10:26.457918Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-07T18:10:26.484430Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Wed, 07 Jan 2026 18:10:26 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 400} 2026-01-07T18:10:26.484545Z node 5 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:373: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2026-01-07T18:10:26.626156Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:10:26 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:10:26.626288Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2026-01-07T18:10:26.665404Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Wed, 07 Jan 2026 18:10:26 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-07T18:10:26.665526Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:27.210500Z node 6 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-07T18:10:27.210738Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-07T18:10:27.210859Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-07T18:10:27.210953Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-07T18:10:27.210966Z node 6 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:373: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2026-01-07T18:10:27.223787Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:27 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-07T18:10:27.223901Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:27.224122Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-07T18:10:27.224234Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-07T18:10:27.224314Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-07T18:10:27.224327Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-07T18:10:27.228102Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Wed, 07 Jan 2026 18:10:27 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-07T18:10:27.228205Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |99.0%| [TM] {BAZEL_UPLOAD} ydb/library/yql/providers/solomon/actors/ut/unittest |99.0%| [TM] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> Vacuum::OutReadSetsCleanedAfterCopyTable [GOOD] >> Vacuum::BorrowerDataCleanedAfterCopyTable >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> test.py::TestViewer::test_viewer_query_long [GOOD] >> test.py::TestViewer::test_viewer_query_long_multipart >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> test.py::TestViewer::test_viewer_query_long_multipart [GOOD] >> test.py::TestViewer::test_viewer_query_event_stream >> test.py::TestViewer::test_viewer_query_event_stream [GOOD] >> test.py::TestViewer::test_security >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> test.py::TestViewer::test_security [GOOD] >> test.py::TestViewer::test_storage_stats >> ReadUpdateWrite::Load [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows >> test.py::TestViewer::test_storage_stats [GOOD] >> test.py::TestViewer::test_viewer_peers |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test.py::TestViewer::test_viewer_peers [GOOD] >> DescribeSchemaSecretsService::EmptyBatch [GOOD] >> DescribeSchemaSecretsService::MixedGrantsInBatch >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 2 MiB, Speed: 1844674407370955159 MiB/s Write: 10% Write: 10% Write: 10% 1.106144s 30% 1.106144s 50% 1.106144s 90% 1.106144s 99% 1.106144s Write: 10% 0.751860s 30% 0.751860s 50% 0.751860s 90% 0.751860s 99% 0.751860s 0.743045s 30% 0.743045s 50% 0.743045s 90% 0.743045s 99% 0.743045s Write: 10% 0.809690s 30% 0.809690s 50% 0.809690s 90% 0.809690s 99% 0.809690s Write: 10% 0.886606s 30% 0.886606s 50% 1.101957s 30% 1.101957s 50% 1.101957s 90% 1.101957s 99% 1.101957s0.886606s 90% 0.886606s 99% 0.886606s Write: 10% 1.010326s 30% 1.010326s 50% Write: 10% 0.756922s 30% 0.756922s 50% 0.756922s 90% 0.756922s 99% 0.756922s 1.010326s 90% 1.010326s 99% 1.010326sWrite: 10% Write: 10% 0.950089s 30% 0.950089s 50% 0.950089s 90% 0.950089s 99% 0.950089s Write: 10% 1.013271s 30% 1.013271s 50% 1.013271s 90% 1.013271s 99% 1.013271s0.761700s 30% 0.761700s 50% 0.761700s 90% 0.761700s 99% 0.761700s Write: 10% 0.462123s 30% 0.462123s 50% 0.462123s 90% 0.462123s 99% 0.462123s Write: 10% 1.149894s 30% 1.149894s 50% Write: 10% 0.730652s 30% 0.730652s 50% 0.730652s 90% 0.730652s 99% 0.730652s 1.149894s 90% 1.149894s 99% 1.149894sWrite: 10% 1.149977s 30% 1.149977s 50% Write: 10% 0.591769s 30% 0.591769s 50% 1.149977s 90% 1.149977s 99% 1.149977sWrite0.591769s 90% 0.591769s 99% 0.591769sWrite: 10% 1.157909s 30% 1.157909s 50% 1.157909s 90% 1.157909s 99% 1.157909s : 10% 1.085782s 30% 1.085782s 50% 1.085782s 90% 1.085782s 99% 1.085782s Write: 10% 1.009449s 30% 1.009449s 50% 1.009449s 90% 1.009449s 99% 1.009449s Write: 10% 0.610601s 30% 0.610601s 50% 0.610601s 90% 0.610601s 99% 0.610601s Write: 10% 0.663949s 30% 0.663949s 50% 0.663949s 90% 0.663949s 99% 0.663949s Write: 10% 0.756166s 30% 0.756166s 50% 0.756166s 90% 0.756166s 99% 0.756166s Write: 10% 0.705519s 30% 0.705519s 50% 0.705519s 90% 0.705519s 99% 0.705519s Write: 10% 1.343418s 30% 1.343418s 50% 1.343418s 90% 1.343418s 99% 1.343418s Write: 10% 1.316038s 30% 1.316038s 50% 1.316038s 90% 1.316038s 99% 1.316038s Write: 10% 0.940561s 30% 0.940561s 50% 0.940561s 90% 0.940561s 99% 0.940561s Write: 10% 0.948825s 30% 0.948825s 50% 0.948825s 90% 0.948825s 99% 0.948825s Write: 10% 0.782421s 30% 0.782421s 50% 0.782421s 90% 0.782421s 99% 0.782421s Write: 10% 0.999910s 30% 0.999910s 50% 0.999910s 90% 0.999910s 99% 0.999910s Write: 10% 0.718335s 30% 0.718335s 50% 0.718335s 90% 0.718335s 99% 0.718335s Write: 10% 0.576546s 30% 0.576546s 50% 0.576546s 90% 0.576546s 99% 0.576546s Write: 10% 1.327736s 30% 1.327736s 50% 1.327736s 90% 1.327736s 99% 1.327736s Write: 10% 0.618341s 30% 0.618341s 50% 0.618341s 90% 0.618341s 99% 0.618341s Write: 10% 0.808151s 30% 0.808151s 50% 0.808151s 90% 0.808151s 99% 0.808151s Write: 10% 0.774545s 30% 0.774545s 50% 0.774545s 90% 0.774545s 99% 0.774545s Write: 10% 0.770763s 30% 0.770763s 50% 0.770763s 90% 0.770763s 99% 0.770763s Write: 10% 0.680018s 30% 0.680018s 50% 0.680018s 90% 0.680018s 99% 0.680018s Write: 10% 0.693769s 30% 0.693769s 50% 0.693769s 90% 0.693769s 99% 0.693769s Write: 10% 0.865587s 30% 0.865587s 50% 0.865587s 90% 0.865587s 99% 0.865587s Write: 10% 0.607459s 30% 0.607459s 50% 0.607459s 90% 0.607459s 99% 0.607459s Write: 10% 0.699449s 30% 0.699449s 50% 0.699449s 90% 0.699449s 99% 0.699449s Write: 10% 0.742211s 30% 0.742211s 50% 0.742211s 90% 0.742211s 99% 0.742211s Write: 10% 0.706164s 30% 0.706164s 50% 0.706164s 90% 0.706164s 99% 0.706164s Write: 10% 0.736222s 30% 0.736222s 50% 0.736222s 90% 0.736222s 99% 0.736222s Write: 10% 0.766399s 30% 0.766399s 50% 0.766399s 90% 0.766399s 99% 0.766399s Write: 10% 0.654419s 30% 0.654419s 50% 0.654419s 90% 0.654419s 99% 0.654419s Write: 10% 0.686900s 30% 0.686900s 50% 0.686900s 90% 0.686900s 99% 0.686900s Write: 10% 0.607505s 30% 0.607505s 50% 0.607505s 90% 0.607505s 99% 0.607505s Write: 10% 1.314258s 30% 1.314258s 50% 1.314258s 90% 1.314258s 99% 1.314258s Write: 10% 0.689435s 30% 0.689435s 50% 0.689435s 90% 0.689435s 99% 0.689435s Write: 10% 0.876337s 30% 0.876337s 50% 0.876337s 90% 0.876337s 99% 0.876337s Write: 10% 0.681009s 30% 0.681009s 50% 0.681009s 90% 0.681009s 99% 0.681009s Write: 10% 1.278821s 30% 1.278821s 50% 1.278821s 90% 1.278821s 99% 1.278821s Write: 10% 0.785682s 30% 0.785682s 50% 0.785682s 90% 0.785682s 99% 0.785682s Write: 10% 0.708535s 30% 0.708535s 50% 0.708535s 90% 0.708535s 99% 0.708535s Write: 10% 1.179849s 30% 1.179849s 50% 1.179849s 90% 1.179849s 99% 1.179849s Write: 10% Write: 10% 1.397341s 30% 1.397341s 50% 0.654370s 30% 1.397341s 90% 1.397341s 99% 1.397341s 0.654370s 50% 0.654370s 90% 0.654370s 99% 0.654370s Write: 10% 1.403935s 30% 1.403935s 50% 1.403935s 90% 1.403935s 99% 1.403935sWrite: 10% 0.707898s 30% 0.707898s 50% 0.707898s 90% 0.707898s 99% 0.707898s Write: 10% 1.021813s 30% 1.021813s 50% 1.021813s 90% 1.021813s 99% 1.021813s Write: 10% 0.868436s 30% 0.868436s 50% 0.868436s 90% 0.868436s 99% 0.868436s Write: 10% 1.191694s 30% 1.191694s 50% 1.191694s 90% 1.191694s 99% 1.191694s Write: 10% 0.974918s 30% 0.974918s 50% 0.974918s 90% 0.974918s 99% 0.974918s Step 2. read write Write: 10% 0.431743s 30% 0.431743s 50% 0.431743s 90% 0.431743s 99% 0.431743s Write: 10% 0.578297s 30% 0.578297s 50% 0.578297s 90% 0.578297s 99% 0.578297s Write: 10% 0.265314s 30% 0.265314s 50% 0.265314s 90% 0.265314s 99% 0.265314s Write: 10% 0.241735s 30% 0.241735s 50% 0.241735s 90% 0.241735s 99% 0.241735s Write: 10% 0.396881s 30% 0.396881s 50% 0.396881s 90% 0.396881s 99% 0.396881s Write: 10% 0.548017s 30% 0.548017s 50% 0.548017s 90% 0.548017s 99% 0.548017s Write: 10% 0.709926s 30% 0.709926s 50% 0.709926s 90% 0.709926s 99% 0.709926s Write: 10% 0.357472s 30% 0.357472s 50% 0.357472s 90% 0.357472s 99% 0.357472sWrite: 10% 0.460214s 30% 0.460214s 50% 0.460214s 90% 0.460214s 99% 0.460214s Write: 10% 0.319624s 30% 0.319624s 50% 0.319624s 90% 0.319624s 99% 0.319624s Write: 10% 0.768728s 30% 0.768728s 50% 0.768728s 90% 0.768728s 99% 0.768728s Write: 10% 0.244126s 30% 0.244126s 50% 0.244126s 90% 0.244126s 99% 0.244126s Write: 10% 0.345023s 30% 0.345023s 50% 0.345023s 90% 0.345023s 99% 0.345023s Write: 10% 0.398946s 30% 0.398946s 50% 0.398946s 90% 0.398946s 99% 0.398946s Write: 10% 0.214555s 30% 0.214555s 50% 0.214555s 90% 0.214555s 99% 0.214555s Write: 10% 0.270646s 30% 0.270646s 50% 0.270646s 90% 0.270646s 99% 0.270646s Write: 10% 0.421865s 30% 0.421865s 50% 0.421865s 90% 0.421865s 99% 0.421865s Write: 10% 0.310906s 30% 0.310906s 50% 0.310906s 90% 0.310906s 99% 0.310906s Write: 10% 0.214527s 30% 0.214527s 50% 0.214527s 90% 0.214527s 99% 0.214527s Write: 10% 0.252369s 30% 0.252369s 50% 0.252369s 90% 0.252369s 99% 0.252369s Write: 10% 0.326653s 30% 0.326653s 50% 0.326653s 90% 0.326653s 99% 0.326653s Write: 10% 0.255443s 30% 0.255443s 50% 0.255443s 90% 0.255443s 99% 0.255443s Write: 10% 0.379367s 30% 0.379367s 50% 0.379367s 90% 0.379367s 99% 0.379367s Write: 10% 0.896196s 30% 0.896196s 50% 0.896196s 90% 0.896196s 99% 0.896196s Write: 10% 0.478084s 30% 0.478084s 50% 0.478084s 90% 0.478084s 99% 0.478084s Write: 10% 0.329532s 30% 0.329532s 50% 0.329532s 90% 0.329532s 99% 0.329532s Write: 10% 0.657030s 30% 0.657030s 50% 0.657030s 90% 0.657030s 99% 0.657030s Write: 10% 0.464151s 30% 0.464151s 50% 0.464151s 90% 0.464151s 99% 0.464151s Write: 10% 0.632102s 30% 0.632102s 50% 0.632102s 90% 0.632102s 99% 0.632102s Write: 10% 0.608826s 30% 0.608826s 50% 0.608826s 90% 0.608826s 99% 0.608826s Write: 10% 0.667126s 30% 0.667126s 50% 0.667126s 90% 0.667126s 99% 0.667126s Write: 10% 0.450311s 30% 0.450311s 50% 0.450311s 90% 0.450311s 99% 0.450311s Write: 10% 0.569228s 30% 0.569228s 50% 0.569228s 90% 0.569228s 99% 0.569228s Write: 10% 0.455774s 30% 0.455774s 50% 0.455774s 90% 0.455774s 99% 0.455774s Write: 10% 0.448497s 30% 0.448497s 50% 0.448497s 90% 0.448497s 99% 0.448497s Write: 10% 0.440773s 30% 0.440773s 50% 0.440773s 90% 0.440773s 99% 0.440773s Write: 10% 0.495606s 30% 0.495606s 50% 0.495606s 90% 0.495606s 99% 0.495606s Write: 10% 0.492415s 30% 0.492415s 50% 0.492415s 90% 0.492415s 99% 0.492415s Write: 10% 0.430371s 30% 0.430371s 50% 0.430371s 90% 0.430371s 99% 0.430371s Write: 10% 0.475638s 30% 0.475638s 50% 0.475638s 90% 0.475638s 99% 0.475638s Write: 10% 0.462061s 30% 0.462061s 50% 0.462061s 90% 0.462061s 99% 0.462061s Write: 10% 0.412733s 30% 0.412733s 50% 0.412733s 90% 0.412733s 99% 0.412733s Write: 10% 0.405376s 30% 0.405376s 50% 0.405376s 90% 0.405376s 99% 0.405376s Write: 10% 0.437757s 30% 0.437757s 50% 0.437757s 90% 0.437757s 99% 0.437757s Write: 10% 0.437881s 30% 0.437881s 50% 0.437881s 90% 0.437881s 99% 0.437881s Write: 10% 0.471720s 30% 0.471720s 50% 0.471720s 90% 0.471720s 99% 0.471720s Write: 10% 0.479082s 30% 0.479082s 50% 0.479082s 90% 0.479082s 99% 0.479082s Write: 10% 0.468102s 30% 0.468102s 50% 0.468102s 90% 0.468102s 99% 0.468102s Write: 10% 0.462480s 30% 0.462480s 50% 0.462480s 90% 0.462480s 99% 0.462480s Write: 10% 0.478437s 30% 0.478437s 50% 0.478437s 90% 0.478437s 99% 0.478437s Write: 10% 0.486795s 30% 0.486795s 50% 0.486795s 90% 0.486795s 99% 0.486795s Write: 10% 0.463150s 30% 0.463150s 50% 0.463150s 90% 0.463150s 99% 0.463150s Write: 10% 0.482508s 30% 0.482508s 50% 0.482508s 90% 0.482508s 99% 0.482508s Write: 10% 0.460834s 30% 0.460834s 50% 0.460834s 90% 0.460834s 99% 0.460834s Write: 10% 0.488921s 30% 0.488921s 50% 0.488921s 90% 0.488921s 99% 0.488921s Write: 10% 0.494945s 30% 0.494945s 50% 0.494945s 90% 0.494945s 99% 0.494945s Write: 10% 0.482712s 30% 0.482712s 50% 0.482712s 90% 0.482712s 99% 0.482712s Write: 10% 0.524338s 30% 0.524338s 50% 0.524338s 90% 0.524338s 99% 0.524338s Write: 10% 0.526143s 30% 0.526143s 50% 0.526143s 90% 0.526143s 99% 0.526143s Write: 10% 0.527148s 30% 0.527148s 50% 0.527148s 90% 0.527148s 99% 0.527148s Write: 10% 0.537759s 30% 0.537759s 50% 0.537759s 90% 0.537759s 99% 0.537759s Write: 10% 0.512273s 30% 0.512273s 50% 0.512273s 90% 0.512273s 99% 0.512273s Write: 10% 0.545606s 30% 0.545606s 50% 0.545606s 90% 0.545606s 99% 0.545606s Write: 10% 0.550163s 30% 0.550163s 50% 0.550163s 90% 0.550163s 99% 0.550163s Read: 10% 1.769570s 30% 1.769570s 50% 1.769570s 90% 1.769570s 99% 1.769570s Step 3. write modify Write: 10% 0.374944s 30% 0.374944s 50% 0.374944s 90% 0.374944s 99% Write: 10% 0.342619s 30% 0.342619s 50% 0.342619s 90% 0.342619s 99% 0.342619s Write: 10% 0.374944s 0.251850s 30% 0.251850s 50% 0.251850s 90% 0.251850s 99% 0.251850s Write: 10% 0.389366s 30% 0.389366s 50% 0.389366s 90% 0.389366s 99% 0.389366s Write: 10% 0.262532s 30% 0.262532s 50% 0.262532s 90% 0.262532s 99% 0.262532s Write: 10% 0.301229s 30% 0.301229s 50% 0.301229s 90% 0.301229s 99% 0.301229s Write: 10% 0.452092s 30% 0.452092s 50% 0.452092s 90% 0.452092s 99% 0.452092s Write: 10% 0.231827s 30% 0.231827s 50% 0.231827s 90% 0.231827s 99% 0.231827s Write: 10% 0.440489s 30% 0.440489s 50% 0.440489s 90% 0.440489s 99% 0.440489s Write: 10% 0.250531s 30% 0.250531s 50% Write: 10% 0.171698s 30% 0.171698s 50% 0.171698s 90% 0.171698s 99% 0.171698s 0.250531s 90% 0.250531s 99% 0.250531sWrite: 10% 0.627238s 30% 0.627238s 50% 0.627238s 90% 0.627238s 99% 0.627238sWrite: 10% 0.487149s 30% 0.487149s 50% 0.487149s 90% 0.487149s 99% 0.487149s Write: 10% 0.177256s 30% 0.177256s 50% 0.177256s 90% 0.177256s 99% 0.177256s Write: 10% 0.307238s 30% 0.307238s 50% 0.307238s 90% 0.307238s 99% 0.307238s Write: 10% 0.399999s 30% 0.399999s 50% 0.399999s 90% 0.399999s 99% 0.399999s Write: 10% 0.657636s 30% 0.657636s 50% 0.657636s 90% 0.657636s 99% 0.657636sWrite: 10% 0.401613s 30% 0.401613s 50% 0.401613s 90% 0.401613s 99% 0.401613s Write: 10% 0.411843s 30% 0.411843s 50% 0.411843s 90% 0.411843s 99% 0.411843sWrite: 10% 0.203290s 30% 0.203290s 50% 0.203290s 90% 0.203290s 99% 0.203290s Write: 10% 0.338920s 30% 0.338920s 50% Write: 10% 0.188462s 30% 0.188462s 50% 0.188462s 90% 0.188462s 99% 0.188462s Write: 10% 0.260229s 30% 0.260229s 50% Write0.260229s 90% 0.260229s 99% 0.260229s: 10% 0.396136s 30% 0.396136s 50% 0.396136s 90% 0.396136s 99% 0.396136s Write: 10% 0.265511s 30% 0.265511s 50% Write: 10% 0.397300s 30% 0.397300s 50% 0.338920s 90% 0.338920s 99% 0.338920s0.397300s 90% 0.397300s 99% 0.397300s 0.265511s 90% 0.265511s 99% 0.265511sWrite: 10% 0.225058s 30% 0.225058s 50% 0.225058s 90% 0.225058s 99% 0.225058s Write: 10% 0.287355s 30% 0.287355s 50% 0.287355s 90% 0.287355s 99% 0.287355s Write: 10% 0.346050s 30% 0.346050s 50% 0.346050s 90% 0.346050s 99% 0.346050s Write: 10% 0.421820s 30% 0.421820s 50% 0.421820s 90% 0.421820s 99% 0.421820s Write: 10% 0.365598s 30% 0.365598s 50% 0.365598s 90% 0.365598s 99% 0.365598s Write: 10% 0.548541s 30% 0.548541s 50% 0.548541s 90% 0.548541s 99% 0.548541s Write: 10% 0.366240s 30% 0.366240s 50% 0.366240s 90% 0.366240s 99% 0.366240s Write: 10% 0.391950s 30% 0.391950s 50% 0.391950s 90% 0.391950s 99% 0.391950s Write: 10% 0.351020s 30% 0.351020s 50% 0.351020s 90% 0.351020s 99% 0.351020s Write: 10% 0.351676s 30% 0.351676s 50% 0.351676s 90% 0.351676s 99% 0.351676s Write: 10% 0.435977s 30% 0.435977s 50% 0.435977s 90% 0.435977s 99% 0.435977s Write: 10% 0.441462s 30% 0.441462s 50% 0.441462s 90% 0.441462s 99% 0.441462s Write: 10% 0.464819s 30% 0.464819s 50% 0.464819s 90% 0.464819s 99% 0.464819s Write: 10% 0.505460s 30% 0.505460s 50% 0.505460s 90% 0.505460s 99% 0.505460s Write: 10% 0.326378s 30% 0.326378s 50% 0.326378s 90% 0.326378s 99% 0.326378s Write: 10% 0.327563s 30% 0.327563s 50% 0.327563s 90% 0.327563s 99% 0.327563s Write: 10% 0.353318s 30% 0.353318s 50% 0.353318s 90% 0.353318s 99% 0.353318s Write: 10% 0.433339s 30% 0.433339s 50% 0.433339s 90% 0.433339s 99% 0.433339s Write: 10% 0.499613s 30% 0.499613s 50% 0.499613s 90% 0.499613s 99% 0.499613s Write: 10% 0.461097s 30% 0.461097s 50% 0.461097s 90% 0.461097s 99% 0.461097s Write: 10% 0.522063s 30% 0.522063s 50% 0.522063s 90% 0.522063s 99% 0.522063s Write: 10% 0.362258s 30% 0.362258s 50% 0.362258s 90% 0.362258s 99% 0.362258s Write: 10% 0.462520s 30% 0.462520s 50% 0.462520s 90% 0.462520s 99% 0.462520s Write: 10% 0.348273s 30% 0.348273s 50% 0.348273s 90% 0.348273s 99% 0.348273s Write: 10% 0.375419s 30% 0.375419s 50% 0.375419s 90% 0.375419s 99% 0.375419s Write: 10% 0.446238s 30% 0.446238s 50% 0.446238s 90% 0.446238s 99% 0.446238s Write: 10% 0.479786s 30% 0.479786s 50% 0.479786s 90% 0.479786s 99% 0.479786s Write: 10% 0.562082s 30% 0.562082s 50% 0.562082s 90% 0.562082s 99% 0.562082s Write: 10% 0.566740s 30% 0.566740s 50% 0.566740s 90% 0.566740s 99% 0.566740s Write: 10% 0.432043s 30% 0.432043s 50% 0.432043s 90% 0.432043s 99% 0.432043s Write: 10% 0.459189s 30% 0.459189s 50% 0.459189s 90% 0.459189s 99% 0.459189s Write: 10% 0.447598s 30% 0.447598s 50% 0.447598s 90% 0.447598s 99% 0.447598s Write: 10% 0.540427s 30% 0.540427s 50% 0.540427s 90% 0.540427s 99% 0.540427s Write: 10% 0.446380s 30% 0.446380s 50% 0.446380s 90% 0.446380s 99% 0.446380s Write: 10% 0.445465s 30% 0.445465s 50% 0.445465s 90% 0.445465s 99% 0.445465s Write: 10% 0.482563s 30% 0.482563s 50% 0.482563s 90% 0.482563s 99% 0.482563s Write: 10% 0.449844s 30% 0.449844s 50% 0.449844s 90% 0.449844s 99% 0.449844s Write: 10% 0.513714s 30% 0.513714s 50% 0.513714s 90% 0.513714s 99% 0.513714s Update: 10% 0.577118s 30% 0.577118s 50% 0.577118s 90% 0.577118s 99% 0.577118s Step 4. read modify write Write: 10% 0.500652s 30% 0.500652s 50% 0.500652s 90% 0.500652s 99% 0.500652s Write: 10% 0.396803s 30% 0.396803s 50% 0.396803s 90% 0.396803s 99% 0.396803s Write: 10% 0.398647s 30% 0.398647s 50% 0.398647s 90% 0.398647s 99% 0.398647s Write: 10% 0.647864s 30% 0.647864s 50% 0.647864s 90% 0.647864s 99% 0.647864s Write: 10% 1.215419s 30% 1.215419s 50% 1.215419s 90% 1.215419s 99% 1.215419s Write: 10% 2.127876s 30% 2.127876s 50% 2.127876s 90% 2.127876s 99% 2.127876s Write: 10% 2.233797s 30% 2.233797s 50% 2.233797s 90% 2.233797s 99% 2.233797s Write: 10% 3.038898s 30% 3.038898s 50% 3.038898s 90% 3.038898s 99% 3.038898s Was written: 87 MiB, Speed: 8 MiB/s Write: 10% 8.373868s 30% 8.373868s 50% 8.373868s 90% 8.373868s 99% 8.373868s Write: 10% 8.620524s 30% 8.620524s 50% 8.620524s 90% 8.620524s 99% 8.620524s Write: 10% 8.885584s 30% 8.885584s 50% 8.885584s 90% 8.885584s 99% 8.885584s Write: 10% 8.988305s 30% 8.988305s 50% 8.988305s 90% 8.988305s 99% 8.988305s Write: 10% 9.143407s 30% 9.143407s 50% 9.143407s 90% 9.143407s 99% 9.143407s Write: 10% 9.298544s 30% 9.298544s 50% 9.298544s 90% 9.298544s 99% 9.298544s Write: 10% 9.582034s 30% 9.582034s 50% 9.582034s 90% 9.582034s 99% 9.582034s Write: 10% 9.638768s 30% 9.638768s 50% 9.638768s 90% 9.638768s 99% 9.638768s Write: 10% 9.753609s 30% 9.753609s 50% 9.753609s 90% 9.753609s 99% 9.753609s Write: 10% 9.654854s 30% 9.654854s 50% 9.654854s 90% 9.654854s 99% 9.654854s Write: 10% 9.887457s 30% 9.887457s 50% 9.887457s 90% 9.887457s 99% 9.887457s Write: 10% 9.925327s 30% 9.925327s 50% 9.925327s 90% 9.925327s 99% 9.925327s Write: 10% 9.771148s 30% 9.771148s 50% 9.771148s 90% 9.771148s 99% 9.771148s Write: 10% 9.883227s 30% 9.883227s 50% 9.883227s 90% 9.883227s 99% 9.883227s Write: 10% 10.214350s 30% 10.214350s 50% 10.214350s 90% 10.214350s 99% 10.214350s Write: 10% 10.183375s 30% 10.183375s 50% 10.183375s 90% 10.183375s 99% 10.183375s Write: 10% 10.434931s 30% 10.434931s 50% 10.434931s 90% 10.434931s 99% 10.434931s Write: 10% 10.454253s 30% 10.454253s 50% 10.454253s 90% 10.454253s 99% 10.454253s Write: 10% 10.522467s 30% 10.522467s 50% 10.522467s 90% 10.522467s 99% 10.522467s Write: 10% 10.777879s 30% 10.777879s 50% 10.777879s 90% 10.777879s 99% 10.777879s Write: 10% 10.729647s 30% 10.729647s 50% 10.729647s 90% 10.729647s 99% 10.729647s Write: 10% 10.694740s 30% 10.694740s 50% 10.694740s 90% 10.694740s 99% 10.694740s Write: 10% 10.753367s 30% 10.753367s 50% 10.753367s 90% 10.753367s 99% 10.753367s Write: 10% 10.758497s 30% 10.758497s 50% 10.758497s 90% 10.758497s 99% 10.758497s Write: 10% 10.877269s 30% 10.877269s 50% 10.877269s 90% 10.877269s 99% 10.877269s Write: 10% 10.898041s 30% 10.898041s 50% 10.898041s 90% 10.898041s 99% 10.898041s Write: 10% 10.789426s 30% 10.789426s 50% 10.789426s 90% 10.789426s 99% 10.789426s Write: 10% 10.793710s 30% 10.793710s 50% 10.793710s 90% 10.793710s 99% 10.793710s Write: 10% 10.834159s 30% 10.834159s 50% 10.834159s 90% 10.834159s 99% 10.834159s Write: 10% 10.823456s 30% 10.823456s 50% 10.823456s 90% 10.823456s 99% 10.823456s Write: 10% 10.823396s 30% 10.823396s 50% 10.823396s 90% 10.823396s 99% 10.823396s Write: 10% 10.837398s 30% 10.837398s 50% 10.837398s 90% 10.837398s 99% 10.837398s Write: 10% 10.848926s 30% 10.848926s 50% 10.848926s 90% 10.848926s 99% 10.848926s Write: 10% 10.831281s 30% 10.831281s 50% 10.831281s 90% 10.831281s 99% 10.831281s Write: 10% 10.605467s 30% 10.605467s 50% 10.605467s 90% 10.605467s 99% 10.605467s Write: 10% 10.889847s 30% 10.889847s 50% 10.889847s 90% 10.889847s 99% 10.889847s Write: 10% 10.846667s 30% 10.846667s 50% 10.846667s 90% 10.846667s 99% 10.846667s Write: 10% 11.543824s 30% 11.543824s 50% 11.543824s 90% 11.543824s 99% 11.543824s Write: 10% 11.653336s 30% 11.653336s 50% 11.653336s 90% 11.653336s 99% 11.653336s Write: 10% 11.555827s 30% 11.555827s 50% 11.555827s 90% 11.555827s 99% 11.555827s Write: 10% 11.766852s 30% 11.766852s 50% 11.766852s 90% 11.766852s 99% 11.766852s Write: 10% 11.567123s 30% 11.567123s 50% 11.567123s 90% 11.567123s 99% 11.567123s Write: 10% 11.757528s 30% 11.757528s 50% 11.757528s 90% 11.757528s 99% 11.757528s Write: 10% 11.570801s 30% 11.570801s 50% 11.570801s 90% 11.570801s 99% 11.570801s Write: 10% 11.684186s 30% 11.684186s 50% 11.684186s 90% 11.684186s 99% 11.684186s Write: 10% 11.830918s 30% 11.830918s 50% 11.830918s 90% 11.830918s 99% 11.830918s Write: 10% 11.960301s 30% 11.960301s 50% 11.960301s 90% 11.960301s 99% 11.960301s Write: 10% 11.805526s 30% 11.805526s 50% 11.805526s 90% 11.805526s 99% 11.805526s Write: 10% 11.719547s 30% 11.719547s 50% 11.719547s 90% 11.719547s 99% 11.719547s Write: 10% 11.799131s 30% 11.799131s 50% 11.799131s 90% 11.799131s 99% 11.799131s Write: 10% 11.978947s 30% 11.978947s 50% 11.978947s 90% 11.978947s 99% 11.978947s Write: 10% 11.875552s 30% 11.875552s 50% 11.875552s 90% 11.875552s 99% 11.875552s Write: 10% 11.850115s 30% 11.850115s 50% 11.850115s 90% 11.850115s 99% 11.850115s Write: 10% 12.160688s 30% 12.160688s 50% 12.160688s 90% 12.160688s 99% 12.160688s Write: 10% 11.994689s 30% 11.994689s 50% 11.994689s 90% 11.994689s 99% 11.994689s Write: 10% 12.137693s 30% 12.137693s 50% 12.137693s 90% 12.137693s 99% 12.137693s Update: 10% 12.689638s 30% 12.689638s 50% 12.689638s 90% 12.689638s 99% 12.689638s Read: 10% 1.146375s 30% 1.474474s 50% 2.327065s 90% 3.984566s 99% 3.984566s |99.1%| [TM] {RESULT} ydb/tests/olap/high_load/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/high_load/unittest >> Vacuum::BorrowerDataCleanedAfterCopyTable [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TMLPConsumerTests::HtmlApp_BadPartition [GOOD] >> TMLPCountersTests::SimpleCounters |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_vacuum/unittest >> Vacuum::BorrowerDataCleanedAfterCopyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:09:52.695655Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:53.006111Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-07T18:09:53.026288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:53.113676Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:316:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:53.114528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:53.114643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:54.107784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:54.108119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:54.345969Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:36:2083] 1767809389186553 != 1767809389186557 2026-01-07T18:09:54.397826Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:54.474422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:54.662956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:55.298952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:55.316283Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:55.493798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:56.216824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:986:2843], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:56.216963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:56.217071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:56.226230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1002:2853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:56.226366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:56.253944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:09:56.412156Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1000:2851], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:09:56.589337Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1058:2890] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-07T18:10:05.538935Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:05.542538Z node 2 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-07T18:10:05.543676Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:10:05.546828Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:10:05.547101Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:10:05.547224Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:10:05.783134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:05.783269Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:05.807683Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:36:2083] 1767809402061579 != 1767809402061583 2026-01-07T18:10:05.811650Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:05.858270Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:05.947652Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:10:06.291021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:06.305545Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:10:06.413848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:10:06.791739Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:979:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:06.791854Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:990:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:06.791933Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:06.797326Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:06.797606Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:06.802418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:10:06.913926Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:993:2845], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-07T18:10:06.950060Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1050:2883] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-07T18:10:12.226560Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cach ... message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-07T18:10:36.958064Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:36.988895Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-07T18:10:36.992771Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:10:36.995348Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:304:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:10:36.995776Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:10:36.995890Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:10:37.503269Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:37.503446Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:37.583950Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:36:2083] 1767809432649362 != 1767809432649365 2026-01-07T18:10:37.598507Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:37.688790Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:37.778673Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:10:38.099631Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:38.117492Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:10:38.243030Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:10:39.036578Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1076:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:39.036738Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1086:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:39.036834Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:39.037952Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1091:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:39.038136Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:39.043667Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:10:39.203851Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:1090:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-07T18:10:39.242102Z node 6 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [6:1149:2963] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-07T18:10:45.558330Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:10:45.575149Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-07T18:10:45.581812Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:10:45.582035Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:10:45.582235Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:10:45.863597Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:45.863755Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:45.895631Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:36:2083] 1767809441812791 != 1767809441812795 2026-01-07T18:10:45.903399Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:45.954329Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:46.043329Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:10:46.366785Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:46.381401Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:10:46.493002Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:10:47.086486Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1076:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:47.086597Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1087:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:47.086961Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:47.088020Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1091:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:47.088125Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:47.093800Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:10:47.245358Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:1090:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T18:10:47.283363Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:1149:2963] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:10:48.706940Z node 7 :TX_DATASHARD WARN: datashard__vacuum.cpp:37: Vacuum of tablet# 72075186224037888: has borrowed parts, requested from [7:832:2727] |99.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_vacuum/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_vacuum/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/common/py3test |99.1%| [TM] {RESULT} ydb/tests/fq/common/py3test >> TopicSessionTests::WrongJson [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:09:22.605140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:22.853070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:22.922235Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:22.922898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:22.922956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:23.655336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:23.655463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:23.751498Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809358864348 != 1767809358864352 2026-01-07T18:09:23.776510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:23.828890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:23.982645Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:24.405641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:24.422258Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:24.588274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:24.664622Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2774] 2026-01-07T18:09:24.664883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:24.719168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:24.719343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:24.722504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T18:09:24.722594Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T18:09:24.722667Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T18:09:24.732973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:24.733382Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:24.733476Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2774] in generation 1 2026-01-07T18:09:24.734768Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-07T18:09:24.734954Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:24.743726Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:910:2779] 2026-01-07T18:09:24.743955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:24.754299Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:24.754451Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:24.755734Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-07T18:09:24.755817Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-07T18:09:24.755861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-07T18:09:24.756156Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:24.756286Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:24.756340Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:961:2776] in generation 1 2026-01-07T18:09:24.756856Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:916:2783] 2026-01-07T18:09:24.757250Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:24.765831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:24.765976Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:24.767276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-07T18:09:24.767339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-07T18:09:24.767381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-07T18:09:24.767653Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:24.767801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:24.767868Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2779] in generation 1 2026-01-07T18:09:24.768333Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:24.768429Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:24.769777Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-07T18:09:24.769841Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-07T18:09:24.769876Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-07T18:09:24.770115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:24.770204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:24.770253Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-07T18:09:24.781394Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:24.817063Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T18:09:24.818700Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:24.818879Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-07T18:09:24.818918Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:09:24.818951Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T18:09:24.818986Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:24.820581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:24.820633Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-07T18:09:24.820714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:24.820783Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-07T18:09:24.820807Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T18:09:24.820850Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-07T18:09:24.820880Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T18:09:24.820966Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:24.820995Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-07T18:09:24.821056Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:24.821134Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-07T18:09:24.821154Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-07T18:09:24.821171Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-07T18:09:24.821197Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-07T18:09:24.821399Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T18:09:24.821510Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T18:09:24.821633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:24.821673Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-07T18:09:24.821719Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: mis ... RD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T18:10:51.784757Z node 6 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [6:1189:2935] TxId: 281474976715664. Ctx: { TraceId: 01kectf1mx525a0eeykb38feq2, Database: , SessionId: ydb://session/3?node_id=6&id=YTBmY2ZkYmEtNzg0ODUyNGQtMzRjMWQ1YjYtMmRmMGY3ZTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-07T18:10:51.785254Z node 6 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=6&id=YTBmY2ZkYmEtNzg0ODUyNGQtMzRjMWQ1YjYtMmRmMGY3ZTQ=, ActorId: [6:1133:2935], ActorState: ExecuteState, LegacyTraceId: 01kectf1mx525a0eeykb38feq2, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-07T18:10:51.785814Z node 6 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [6:68:2115] Handle TEvExecuteKqpTransaction 2026-01-07T18:10:51.785860Z node 6 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [6:68:2115] TxId# 281474976715665 ProcessProposeKqpTransaction 2026-01-07T18:10:51.786110Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [6:1204:2988], Recipient [6:971:2825]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:10:51.786148Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:10:51.786185Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [6:1203:2987], serverId# [6:1204:2988], sessionId# [0:0:0] ... generic readset: Decision: DECISION_ABORT 2026-01-07T18:10:51.786463Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [6:886:2765], Recipient [6:971:2825]: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T18:10:51.786497Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-07T18:10:51.786530Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2026-01-07T18:10:51.786592Z node 6 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-07T18:10:51.786886Z node 6 :TX_DATASHARD TRACE: volatile_tx.cpp:884: Processed readset with decision 2 from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2026-01-07T18:10:51.787010Z node 6 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-07T18:10:51.787584Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [6:1188:2935], Recipient [6:886:2765]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-07T18:10:51.787613Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-07T18:10:51.787785Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [6:886:2765], Recipient [6:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T18:10:51.787820Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-07T18:10:51.787879Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-07T18:10:51.787974Z node 6 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-07T18:10:51.788058Z node 6 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-07T18:10:51.788131Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-07T18:10:51.788175Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T18:10:51.788210Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-07T18:10:51.788252Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T18:10:51.788279Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T18:10:51.788313Z node 6 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3001/0 ImmediateWriteEdgeReplied# v3001/0 2026-01-07T18:10:51.788362Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-07T18:10:51.788400Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T18:10:51.788419Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T18:10:51.788433Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-07T18:10:51.788450Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-07T18:10:51.788468Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T18:10:51.788482Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-07T18:10:51.788496Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-07T18:10:51.788510Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-07T18:10:51.788533Z node 6 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-07T18:10:51.788606Z node 6 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-07T18:10:51.788647Z node 6 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-07T18:10:51.788702Z node 6 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-07T18:10:51.788770Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T18:10:51.788795Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-07T18:10:51.788831Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-07T18:10:51.788865Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T18:10:51.788890Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-07T18:10:51.788913Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-07T18:10:51.788968Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T18:10:51.789000Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-07T18:10:51.789037Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-07T18:10:51.789052Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T18:10:51.789070Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-07T18:10:51.789117Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-07T18:10:51.789150Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-07T18:10:51.789187Z node 6 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-07T18:10:51.789265Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:10:51.789640Z node 6 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715664; 2026-01-07T18:10:51.789805Z node 6 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [3002 : 281474976715664] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2026-01-07T18:10:51.789863Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T18:10:51.789961Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [6:971:2825], Recipient [6:886:2765]: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-07T18:10:51.789998Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-07T18:10:51.790053Z node 6 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 2026-01-07T18:10:51.791193Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [6:70:2117], Recipient [6:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_rs/unittest |99.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest >> TopicSessionTests::WrongJsonOffset |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::DropNonexistingView |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DescribeSchemaSecretsService::MixedGrantsInBatch [GOOD] >> DescribeSchemaSecretsService::SchemeCacheRetryErrors >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_drain.py::TestHive::test_drain_on_stop |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] >> TTxDataShardFilterKMeansScan::BadRequest >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::TestViewer::test_viewer_peers [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/viewer/tests/py3test |99.1%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> TIndexProcesorTests::TestOver1000Queues [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2026-01-07T18:09:55.095304Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592683541103883120:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:09:55.095465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:09:55.628908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:55.629023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:55.754737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:09:55.771412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592683541103882900:2081] 1767809395035424 != 1767809395035427 2026-01-07T18:09:55.790565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:55.792093Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:55.956062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:09:56.081646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:09:56.081677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:09:56.081683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:09:56.081781Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:09:56.096508Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:56.655122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:57.201403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-07T18:10:00.102498Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592683541103883120:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:10:00.102583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:10:00.759799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:10:00.762177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:10:01.414195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:01.500744Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683566873687815:2671] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-07T18:10:01.714829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2026-01-07T18:10:01.742047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:10:01.761268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1767809401520, "myFolder", "{\"k1\": \"v1\"}"); 2026-01-07T18:10:01.953041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683566873687983:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:01.953247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:01.955161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683566873687992:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:01.955231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:01.955910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683566873687998:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:01.978008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:10:01.997650Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592683566873688000:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2026-01-07T18:10:02.072853Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683571168655347:2824] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 52], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:01.520000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:01.520000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-07T18:10:06.044703Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2026-01-07T18:10:06.838312Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683588348524818:2935] txid# 281474976710684, issues: { message: "Check failed: path: \'/Root/SQS/Root/SQS/SingleCreateQueueEvent\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS/Root/SQS/SingleCreateQueueEvent', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-07T18:10:06.874993Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683588348524828:2940] txid# 281474976710685, issues: { message: "Check failed: path: \'/Root/SQS/SingleCreateQueueEvent/.Queues\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:10:06.877024Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683588348524829:2941] txid# 281474976710686, issues: { message: "Check failed: path: \'/Root/SQS/SingleCreateQueueEvent/.Events\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 48], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1767809406648, "myFolder", NULL); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:06.648000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:06.648000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-07T18:10:07.214940Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} 2026-01-07T18:10:09.506341Z node 1 : ... tributes":{"labels":{"k1": "v1"}}} 2026-01-07T18:10:15.712868Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683627003231346:3409] txid# 281474976710725, issues: { message: "Check failed: path: \'/Root/SQS/Root/SQS/DeletedQueueNotReindexed\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS/Root/SQS/DeletedQueueNotReindexed', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-07T18:10:15.754493Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683627003231355:3414] txid# 281474976710726, issues: { message: "Check failed: path: \'/Root/SQS/DeletedQueueNotReindexed/.Queues\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 59], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:10:15.768629Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683627003231362:3419] txid# 281474976710727, issues: { message: "Check failed: path: \'/Root/SQS/DeletedQueueNotReindexed/.Events\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 60], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO `/Root/SQS/DeletedQueueNotReindexed/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "queue2", "myQueueCustomName", 1767809414000, "myFolder", NULL); ===Execute query: UPSERT INTO `/Root/SQS/DeletedQueueNotReindexed/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue2", 0, "myQueueCustomName", 1767809415332, "myFolder", NULL); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:15.332000Z","resource_id":"queue2","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-07T18:10:15.332000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} 2026-01-07T18:10:17.634493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710734:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:17.649682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710734, at schemeshard: 72057594046644480 2026-01-07T18:10:17.656393Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683635593166080:3482] txid# 281474976710735, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-07T18:10:17.939786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710736, at schemeshard: 72057594046644480 2026-01-07T18:10:17.979521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710737:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:10:17.993288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710738:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing1", "myQueueCustomName", 1767809416000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing2", "myQueueCustomName", 1767809416000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing3", "myQueueCustomName", 1767809416000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "deleting1", "myQueueCustomName", 1767809416000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "deleting2", "myQueueCustomName", 1767809416000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "deleting1", 0, "myQueueCustomName", 1767809417689, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "deleting2", 0, "myQueueCustomName", 1767809417689, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "creating1", 1, "myQueueCustomName", 1767809417689, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "creating2", 1, "myQueueCustomName", 1767809417689, "myFolder", "{\"k1\": \"v1\"}"); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:17.689000Z","resource_id":"deleting2","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-07T18:10:17.689000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:17.689000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-07T18:10:17.689000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:17.689000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:17.689000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:16.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-07T18:10:21.724076Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:16.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-07T18:10:21.724145Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:17.689000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-07T18:10:21.724176Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:16.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-07T18:10:21.724207Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-07T18:10:17.689000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-07T18:10:21.724255Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2026-01-07T18:10:22.576216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710763:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:22.586986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710763, at schemeshard: 72057594046644480 2026-01-07T18:10:22.595182Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683657068003139:3768] txid# 281474976710764, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-07T18:10:22.954936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710766:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:10:22.963645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710767:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Started add queue batch |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/ymq/actor/yc_search_ut/unittest |99.1%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TTxDataShardFilterKMeansScan::BadRequest [GOOD] >> TTxDataShardFilterKMeansScan::BuildToPosting >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable >> TopicSessionTests::WrongJsonOffset [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::OneClientOneSession |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> RowDispatcherTests::OneClientOneSession [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/postgresql/py3test |99.1%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test >> RowDispatcherTests::TwoClientOneSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2026-01-07T18:07:53.069405Z node 1 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-07T18:07:53.077170Z node 1 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-07T18:07:53.085456Z node 1 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-07T18:07:53.085538Z node 1 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.132338Z node 2 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-07T18:07:53.132470Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-07T18:07:53.132646Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.132719Z node 2 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-07T18:07:53.132752Z node 2 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.137450Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.160639Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-07T18:07:53.160769Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-07T18:07:53.161278Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-07T18:07:53.161360Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.161643Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.161754Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-07T18:07:53.161783Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.161995Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.162043Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-07T18:07:53.162064Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.162247Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.162307Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-07T18:07:53.162331Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.162485Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.162541Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-07T18:07:53.162568Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.162840Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.162914Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-07T18:07:53.162947Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.163876Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.184601Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-07T18:07:53.184732Z node 4 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-07T18:07:53.185045Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "/resource" 2026-01-07T18:07:53.185106Z node 4 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:493: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2026-01-07T18:07:53.185149Z node 4 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:356: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2026-01-07T18:07:53.185428Z node 4 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.185548Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "resource//resource" 2026-01-07T18:07:53.185630Z node 4 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:493: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2026-01-07T18:07:53.185693Z node 4 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:356: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2026-01-07T18:07:53.235371Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-07T18:07:53.235503Z node 5 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-07T18:07:53.235804Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res" 2026-01-07T18:07:53.241828Z node 5 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.305186Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2026-01-07T18:07:53.305291Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res" 2026-01-07T18:07:53.305353Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res", 42) 2026-01-07T18:07:53.305449Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2026-01-07T18:07:53.325124Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-07T18:07:53.325280Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-07T18:07:53.325652Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res0" 2026-01-07T18:07:53.325819Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.326117Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-07T18:07:53.326169Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res0" 2026-01-07T18:07:53.326230Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res0", 42) 2026-01-07T18:07:53.326302Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2026-01-07T18:07:53.326424Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res1" 2026-01-07T18:07:53.326523Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:517: [/Path/KesusName]: Subscribe on resource "res1" 2026-01-07T18:07:53.326753Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-07T18:07:53.326788Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res1" 2026-01-07T18:07:53.326827Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res1", 43) 2026-01-07T18:07:53.326875Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2026-01-07T18:07:53.326986Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res2" 2026-01-07T18:07:53.327082Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:517: [/Path/KesusName]: Subscribe on resource "res2" 2026-01-07T18:07:53.327323Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-07T18:07:53.327378Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res2" 2026-01-07T18:07:53.327417Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res2", 44) 2026-01-07T18:07:53.327495Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2026-01-07T18:07:53.327749Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2026-01-07T18:07:53.327803Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2026-01-07T18:07:53.327837Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:662: [/Path/KesusName]: Activate session to "res1". Connected: 1 2026-01-07T18:07:53.328659Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:585: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 25769805828 } }) 2026-01-07T18:07:53.328740Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2026-01-07T18:07:53.333271Z node 6 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-07T18:07:53.333335Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-07T18:07:53.333506Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:640: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2026-01-07T18:07:53.333570Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:614: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2026-01-07T18:07:53.333795Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-07T18:07:53.334127Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: ... RACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-07T18:10:29.301150Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-07T18:10:29.301236Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.325853Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-07T18:10:29.325899Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-07T18:10:29.425000Z 2026-01-07T18:10:29.325918Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-07T18:10:29.328447Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2026-01-07T18:10:29.328503Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -2.000100561, QueueWeight: 5 } 2026-01-07T18:10:29.328556Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-07T18:10:29.329117Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.403928Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7592683682837918229:2291]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-07T18:10:29.407848Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-07T18:10:29.407906Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-07T18:10:29.407973Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-07T18:10:29.409730Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.425753Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-07T18:10:29.425809Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-07T18:10:29.525000Z 2026-01-07T18:10:29.425827Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-07T18:10:29.432566Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2026-01-07T18:10:29.432609Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.000100561, QueueWeight: 5 } 2026-01-07T18:10:29.432661Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-07T18:10:29.432773Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.500675Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7592683682837918229:2291]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-07T18:10:29.501514Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-07T18:10:29.501577Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-07T18:10:29.501639Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-07T18:10:29.501930Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.527335Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-07T18:10:29.527381Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-07T18:10:29.625000Z 2026-01-07T18:10:29.527411Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-07T18:10:29.530006Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2026-01-07T18:10:29.530055Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001005612349, QueueWeight: 5 } 2026-01-07T18:10:29.530101Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-07T18:10:29.530558Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.601689Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7592683682837918229:2291]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-07T18:10:29.602291Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-07T18:10:29.602336Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-07T18:10:29.602401Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998994388, 2)} }]) 2026-01-07T18:10:29.604171Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.628495Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0.9998994388. FreeBalance: 0.9998994388 2026-01-07T18:10:29.628552Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-07T18:10:29.725000Z 2026-01-07T18:10:29.628568Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-07T18:10:29.628635Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:275: Charge "Resource" for 5. Balance: 0.9998994388. FreeBalance: 0.9998994388. TicksToFullfill: 5.000502857. DurationToFullfillInUs: 500050.2857. TimeToFullfill: 2026-01-07T18:10:29.222170Z. Now: 2026-01-07T18:10:29.625957Z. LastAllocated: 2026-01-07T18:10:28.722120Z 2026-01-07T18:10:29.630179Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2026-01-07T18:10:29.630203Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000100561, QueueWeight: 0 } 2026-01-07T18:10:29.630238Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-07T18:10:29.630386Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.701736Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7592683682837918229:2291]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-07T18:10:29.710073Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-07T18:10:29.710130Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-07T18:10:29.710178Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-07T18:10:29.711934Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-07T18:10:29.726213Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-07T18:10:29.957123Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7592683678542950426:2388] 2026-01-07T18:10:29.957159Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7592683678542950426:2388] 2026-01-07T18:10:30.955304Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7592683678542950426:2388] 2026-01-07T18:10:30.955342Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7592683678542950426:2388] 2026-01-07T18:10:31.956317Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7592683678542950426:2388] 2026-01-07T18:10:31.956350Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7592683678542950426:2388] 2026-01-07T18:10:32.428885Z node 50 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[50:7592683678542949776:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:10:32.429014Z node 50 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:10:32.605675Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7592683678542950037:2132] 2026-01-07T18:10:32.605705Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7592683678542950037:2132] 2026-01-07T18:10:32.958025Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7592683678542950426:2388] 2026-01-07T18:10:32.958062Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7592683678542950426:2388] 2026-01-07T18:10:32.985884Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7592683678542950037:2132] 2026-01-07T18:10:32.985921Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7592683678542950037:2132] 2026-01-07T18:10:33.956818Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7592683678542950426:2388] 2026-01-07T18:10:33.956855Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7592683678542950426:2388] 2026-01-07T18:10:34.956826Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7592683678542950426:2388] 2026-01-07T18:10:34.956864Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7592683678542950426:2388] 2026-01-07T18:10:35.958515Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7592683678542950426:2388] 2026-01-07T18:10:35.958552Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7592683678542950426:2388] |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/quoter/ut/unittest >> RowDispatcherTests::TwoClientOneSession [GOOD] |99.1%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> RowDispatcherTests::CoordinatorSubscribe >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::TwoClients4Sessions >> RowDispatcherTests::TwoClients4Sessions [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> RowDispatcherTests::HandleTEvUndelivered |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> RowDispatcherTests::SessionFatalError |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::SessionFatalError [GOOD] >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable [GOOD] >> TCreateAndDropViewTest::DropSameViewTwice >> TMLPCountersTests::SimpleCounters [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_ShortMessage >> DescribeSchemaSecretsService::SchemeCacheRetryErrors [GOOD] >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> TTxDataShardFilterKMeansScan::BuildToPosting [GOOD] >> TTxDataShardFilterKMeansScan::BuildToBuild |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::SessionFatalError [GOOD] Test command err: 2026-01-07T18:08:16.445032Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [1:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-07T18:08:16.457836Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-07T18:08:16.457921Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [1:25:2054] 2026-01-07T18:08:16.457963Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [1:25:2054] 2026-01-07T18:08:16.457992Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [1:25:2054] 2026-01-07T18:08:16.458045Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:26:2054] 2026-01-07T18:08:16.458065Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:26:2054] 2026-01-07T18:08:16.458097Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:08:16.458123Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:26:2054] 2026-01-07T18:08:16.458156Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [3:27:2054] 2026-01-07T18:08:16.458184Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [3:27:2054] 2026-01-07T18:08:16.458207Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:08:16.458226Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-07T18:08:16.458252Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [3:27:2054] 2026-01-07T18:08:16.458348Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2026-01-07T18:08:16.458562Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2026-01-07T18:08:16.458693Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2026-01-07T18:08:16.458742Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-07T18:08:16.497932Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2026-01-07T18:08:16.498099Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-07T18:08:16.498282Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:32:2055] 2026-01-07T18:08:16.498327Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:32:2055] 2026-01-07T18:08:16.498370Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:313: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:32:2055] 2026-01-07T18:08:16.498410Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:32:2055] 2026-01-07T18:08:16.498466Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:33:2056] 2026-01-07T18:08:16.498499Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:33:2056] 2026-01-07T18:08:16.498534Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:313: Coordinator: Move all Locations from old actor [2:32:2055] to new [2:33:2056] 2026-01-07T18:08:16.498562Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:33:2056] 2026-01-07T18:08:16.498632Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2026-01-07T18:08:16.498681Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2026-01-07T18:08:16.498799Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2026-01-07T18:08:16.498889Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-07T18:08:16.677528Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [5:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-07T18:08:16.677796Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-07T18:08:16.677856Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [5:25:2054] 2026-01-07T18:08:16.677890Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [5:25:2054] 2026-01-07T18:08:16.677923Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [5:25:2054] 2026-01-07T18:08:16.677978Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [6:26:2054] 2026-01-07T18:08:16.678000Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [6:26:2054] 2026-01-07T18:08:16.678029Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:08:16.678055Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [6:26:2054] 2026-01-07T18:08:16.678082Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [7:27:2054] 2026-01-07T18:08:16.678139Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [7:27:2054] 2026-01-07T18:08:16.678171Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:08:16.678197Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-07T18:08:16.678221Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [7:27:2054] 2026-01-07T18:08:16.678313Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2026-01-07T18:08:16.678436Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2026-01-07T18:08:16.678558Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2026-01-07T18:08:16.678618Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2026-01-07T18:08:16.874984Z node 9 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [9:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-07T18:08:16.875457Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-07T18:08:16.875510Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [10:26:2054] 2026-01-07T18:08:16.875540Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [10:26:2054] 2026-01-07T18:08:16.875562Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:08:16.875600Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [10:26:2054] 2026-01-07T18:08:16.875690Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [9:28:2055], topic1, partIds: 0 2026-01-07T18:08:16.875761Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:514: Coordinator: Not all nodes connected, nodes count: 3, known rd count: 2, add request into pending queue 2026-01-07T18:08:17.879285Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [11:27:2054] 2026-01-07T18:08:17.879383Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [11:27:2054] 2026-01-07T18:08:17.879418Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:08:17.879468Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-07T18:08:17.879588Z node 9 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [9:28:2055] 2026-01-07T18:08:17.879656Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [11:27:2054] 2026-01-07T18:08:18.062326Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [13:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-07T18:08:18.062844Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-07T18:08:18.066212Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [13:25:2054] 2026-01-07T18:08:18.066285Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [13:25:2054] 2026-01-07T18:08:18.066334Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [13:25:2054] 2026-01-07T18:08:18.066394Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [14:26:2054] 2026-01-07T18:08:18.066414Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [14:26:2054] 2026-01-07T18:08:18.066442Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:08:18.066483Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [14:26:2054] 2026-01-07T18:08:18.066520Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [15:27:2054] 2026-01-07T18:08:18.066540Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [15:27:2054] 2026-01-07T18:08:18.066556Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:08:18.066574Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-07T18:08:18.066600Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [15:27:2054] 2026-01-07T18:08:18.066706Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [13:28:2055], topic1, partIds: 0, 1, 2 2026-01-07T18:08:18.066844Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [13:28:2055] 2026-01-07T18:08:18.067024Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [13:28:2055], topic1, partIds: 0, 1, 2 2026-01-07T18:08:18.067174Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [13:28:2055] 2026-01-07T18:08:18.206277Z node 17 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [17:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-07T18:08:18.206607Z node 17 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 1, AssignedNodes: 0 2026-01-07T18:08:18.206652Z node 17 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-07T18:08:18.206690Z ... topic part id 100 query id QueryId cookie 42 2026-01-07T18:11:09.641342Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-07T18:11:09.641914Z node 60 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: C-ares status is not ARES_SUCCESS qtype=A name=YDB_ENDPOINT is_balancer=0: DNS server returned general failure } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2026-01-07T18:11:09.653244Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1052: RowDispatcher: TEvTryConnect to node id 61 2026-01-07T18:11:09.653582Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:587: RowDispatcher: EvNodeConnected, node id 61 2026-01-07T18:11:09.654250Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-07T18:11:09.654608Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [61:16:2053] part id 100 query id QueryId 2026-01-07T18:11:09.654733Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-07T18:11:09.655029Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:967: RowDispatcher: Received TEvNoSession from [61:16:2053], generation 41 2026-01-07T18:11:09.655126Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-07T18:11:09.655413Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [61:16:2053] part id 100 query id QueryId 2026-01-07T18:11:09.655502Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-07T18:11:09.655793Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:967: RowDispatcher: Received TEvNoSession from [61:16:2053], generation 42 2026-01-07T18:11:09.655855Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [61:16:2053] query id QueryId, partitions size 1 2026-01-07T18:11:09.655974Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [60:22:2063] 2026-01-07T18:11:10.046172Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:544: RowDispatcher: Successfully bootstrapped row dispatcher, id [62:17:2058], tenant Tenant 2026-01-07T18:11:10.046279Z node 62 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [62:17:2058] 2026-01-07T18:11:10.046311Z node 62 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:11:10.046562Z node 62 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [62:18:2059], NodesManagerId [0:0:0], rebalancing timeout 120.000000s 2026-01-07T18:11:10.046657Z node 62 :FQ_ROW_DISPATCHER DEBUG: leader_election.cpp:232: TLeaderElection [62:19:2060] Successfully bootstrapped, local coordinator id [62:18:2059], tenant id Tenant, local mode 0, coordination node path YDB_DATABASE/RowDispatcher/Tenant, endpoint YDB_ENDPOINT 2026-01-07T18:11:10.062682Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:71: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2026-01-07T18:11:10.062757Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:113: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-07T18:11:10.062799Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:411: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-07T18:11:10.068984Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:637: RowDispatcher: TEvCoordinatorChangesSubscribe from [62:18:2059] 2026-01-07T18:11:10.077209Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [62:14:2056], read group connection_id1, topicPath topic part id 100 query id QueryId cookie 1 2026-01-07T18:11:10.077458Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-07T18:11:10.077929Z node 62 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [62:22:2063] to [62:14:2056] query id QueryId 2026-01-07T18:11:10.078072Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1001: RowDispatcher: Received TEvStopSession from [62:14:2056] topic topic query id QueryId 2026-01-07T18:11:10.078147Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [62:14:2056] query id QueryId, partitions size 1 2026-01-07T18:11:10.078271Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [62:22:2063] 2026-01-07T18:11:10.382050Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:544: RowDispatcher: Successfully bootstrapped row dispatcher, id [64:17:2058], tenant Tenant 2026-01-07T18:11:10.382156Z node 64 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [64:17:2058] 2026-01-07T18:11:10.382208Z node 64 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-07T18:11:10.382477Z node 64 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [64:18:2059], NodesManagerId [0:0:0], rebalancing timeout 120.000000s 2026-01-07T18:11:10.382555Z node 64 :FQ_ROW_DISPATCHER DEBUG: leader_election.cpp:232: TLeaderElection [64:19:2060] Successfully bootstrapped, local coordinator id [64:18:2059], tenant id Tenant, local mode 0, coordination node path YDB_DATABASE/RowDispatcher/Tenant, endpoint YDB_ENDPOINT 2026-01-07T18:11:10.429366Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:71: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2026-01-07T18:11:10.429440Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:113: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-07T18:11:10.449060Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:411: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-07T18:11:10.449455Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:637: RowDispatcher: TEvCoordinatorChangesSubscribe from [64:18:2059] 2026-01-07T18:11:10.462579Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:14:2056], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-07T18:11:10.462786Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-07T18:11:10.462991Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 101 2026-01-07T18:11:10.463392Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:15:2057], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-07T18:11:10.463881Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1120: RowDispatcher: Forward TEvSessionError from [64:22:2063] to [64:14:2056] query id QueryId 2026-01-07T18:11:10.463959Z node 64 :FQ_ROW_DISPATCHER WARN: row_dispatcher.cpp:1140: RowDispatcher: Fatal session error, remove session [64:22:2063] 2026-01-07T18:11:10.464066Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [64:14:2056] query id QueryId, partitions size 2 2026-01-07T18:11:10.464353Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-07T18:11:10.464463Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 101 query id QueryId 2026-01-07T18:11:10.464572Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-07T18:11:10.464726Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:14:2056], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-07T18:11:10.464861Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-07T18:11:10.465248Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1120: RowDispatcher: Forward TEvSessionError from [64:22:2063] to [64:15:2057] query id QueryId 2026-01-07T18:11:10.465318Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [64:15:2057] query id QueryId, partitions size 2 2026-01-07T18:11:10.465429Z node 64 :FQ_ROW_DISPATCHER ERROR: row_dispatcher.cpp:1033: RowDispatcher: Wrong readActorId [64:15:2057], no such consumer 2026-01-07T18:11:10.465503Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [64:22:2063] 2026-01-07T18:11:10.465741Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:15:2057], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-07T18:11:10.466149Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:24:2065] to [64:14:2056] query id QueryId 2026-01-07T18:11:10.466245Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:14:2056] part id 100 query id QueryId 2026-01-07T18:11:10.466338Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:24:2065] to [64:14:2056] query id QueryId 2026-01-07T18:11:10.466429Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:24:2065] to [64:15:2057] query id QueryId 2026-01-07T18:11:10.466524Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 100 query id QueryId 2026-01-07T18:11:10.466612Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:24:2065] to [64:15:2057] query id QueryId 2026-01-07T18:11:10.466699Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:14:2056] query id QueryId 2026-01-07T18:11:10.466781Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:14:2056] part id 101 query id QueryId 2026-01-07T18:11:10.466873Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:14:2056] query id QueryId 2026-01-07T18:11:10.466959Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-07T18:11:10.467047Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 101 query id QueryId 2026-01-07T18:11:10.467140Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:15:2057] query id QueryId |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/row_dispatcher/ut/unittest |99.1%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TTxDataShardFilterKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BadRequest |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/federated_query/ut_service/fast/unittest >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] Test command err: Trying to start YDB, gRPC: 7816, MsgBus: 23274 2026-01-07T18:08:41.979375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592683223385413119:2227];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:41.979924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:08:42.421941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:42.504235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:42.504354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:42.619813Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592683223385412921:2081] 1767809321964177 != 1767809321964180 2026-01-07T18:08:42.631106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:42.636534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:42.644852Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:42.982239Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:08:43.074818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:08:43.482245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:08:43.482368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:08:43.482376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:08:43.482480Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:08:44.923200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:45.023073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:45.337930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:45.506402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:45.583607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:46.590089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683244860251282:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:46.590227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:46.591793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683244860251291:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:46.591854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:46.977123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592683223385413119:2227];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:46.979959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:08:47.947309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:48.000988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:48.046109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:48.097282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:48.130127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:48.189201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:48.236574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:48.302814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:48.455667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683253450186765:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:48.455755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:48.456183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683253450186771:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:48.456502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683253450186770:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:48.456539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:48.464507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:08:48.478563Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592683253450186774:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T18:08:48.561013Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683253450186825:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:08:51.398548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation ... 5]; 2026-01-07T18:11:12.253158Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:11:12.284047Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:11:12.394544Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:7592683868230698179:2081] 1767809472251698 != 1767809472251701 2026-01-07T18:11:12.410984Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:11:12.417439Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:11:12.417591Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:11:12.435422Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:11:12.498144Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:11:12.526048Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:11:12.526082Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:11:12.526094Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:11:12.526245Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:11:13.266921Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:11:13.325432Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:11:13.343550Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:13.436763Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:13.679957Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:13.850384Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:17.253305Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7592683868230698207:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:11:17.253405Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:11:18.381310Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592683894000503857:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:18.381500Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:18.382084Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592683894000503867:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:18.382209Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:18.513966Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:18.577781Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:18.625223Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:18.672244Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:18.720066Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:18.766284Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:18.810948Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:18.916324Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:11:19.014222Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592683898295472046:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:19.014355Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:19.014376Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592683898295472051:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:19.014708Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7592683898295472053:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:19.014787Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:11:19.019318Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:11:19.032656Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7592683898295472054:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-07T18:11:19.108562Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7592683898295472116:3795] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:11:21.695738Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-07T18:11:21.703351Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 |99.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/federated_query/ut_service/fast/unittest >> TCreateAndDropViewTest::DropViewIfExists [GOOD] >> TCreateAndDropViewTest::DropViewInFolder |99.2%| [TM] {RESULT} ydb/core/kqp/federated_query/ut_service/fast/unittest |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::TooManyClusters >> TMLPDLQMoverTests::MoveToDLQ_ShortMessage [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_BigMessage >> TDqPqRdReadActorTests::Backpressure [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> TTxDataShardLocalKMeansScan::TooManyClusters [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting >> TDqPqRdReadActorTests::TestReadFromTopicFirstWatermark [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqRdReadActorTests::TestReadFromTopicWatermarks1 |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqPqRdReadActorTests::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqRdReadActorTests::TestWatermarksWhere |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> TDqPqRdReadActorTests::TestWatermarksWhere [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToPostingWithOverlap >> TDqPqRdReadActorTests::TestWatermarksWhereFalse [GOOD] >> TDqPqRdReadActorTests::WatermarkCheckpointWithItemsInReadyBuffer >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> TDqPqRdReadActorTests::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqPqRdReadActorTests::RebalanceAfterDistributionReset |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] >> TDqPqRdReadActorTests::RebalanceAfterDistributionReset [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> TDqPqReadActorTest::TestReadFromTopic >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> TTxDataShardLocalKMeansScan::MainToPostingWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_BigMessage [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_ManyMessages |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TDqPqReadActorTest::TestReadFromTopic [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> TDqPqReadActorTest::TestReadFromTopicFromNow |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive >> TDqPqReadActorTest::ReadWithFreeSpace >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuildWithOverlap |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> TDqPqReadActorTest::ReadNonExistentTopic >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> TTxDataShardLocalKMeansScan::MainToBuildWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues >> TMLPDLQMoverTests::MoveToDLQ_ManyMessages [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_TopicNotExists >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_Truncate::Truncate >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> TFlatTableExecutor_Truncate::Truncate [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteWhileCompacting >> TFlatTableExecutor_Truncate::TruncateAndWriteWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncate [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncateAndWrite >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> TFlatTableExecutor_Truncate::CompactThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteThenAttachFollower [GOOD] >> TFlatTableExecutor_Truncate::PartiallyCommitThenTruncateAndWrite >> TFlatTableExecutor_Truncate::PartiallyCommitThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_TryKeepInMemory::FollowerPromoteToLeaderWithTryKeepInMemoryEnabled [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_TryKeepInMemory::FollowerPromoteToLeaderWithTryKeepInMemoryEnabled [GOOD] Test command err: 00000.001 II| FAKE_ENV: Born at 2026-01-07T18:12:10.296870Z 00000.044 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.052 II| FAKE_ENV: Starting storage for BS group 0 00000.053 II| FAKE_ENV: Starting storage for BS group 1 00000.053 II| FAKE_ENV: Starting storage for BS group 2 00000.053 II| FAKE_ENV: Starting storage for BS group 3 00000.092 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.094 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.102 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.102 II| FAKE_ENV: DS.0 gone, left {525b, 8}, put {545b, 9} 00000.102 II| FAKE_ENV: DS.1 gone, left {582b, 8}, put {582b, 8} 00000.102 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.102 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.102 II| FAKE_ENV: All BS storage groups are stopped 00000.102 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.103 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:10.404865Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.008 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 84b annex 0, ~{ } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.009 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.009 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.009 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{2, redo 78b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{3, redo 78b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{4, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{5, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 ...compacting 00000.014 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Full 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.014 II| TABLET_EXECUTOR: Leader{1:2:8} starting compaction 00000.014 II| TABLET_EXECUTOR: Leader{1:2:9} starting Scan{1 on 101, Compact{1.2.8, eph 1}} 00000.014 II| TABLET_EXECUTOR: Leader{1:2:9} started compaction 1 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.016 DD| OPS_COMPACT: Compact{1.2.8, eph 1} saving [1:2:8:1:69632:397:0] left 397b 00000.016 DD| OPS_COMPACT: Compact{1.2.8, eph 1} saving [1:2:8:1:12288:211:0] left 608b 00000.017 DD| OPS_COMPACT: Compact{1.2.8, eph 1} put [1:2:8:1:69632:397:0] result OK flags { Valid } left 211b 00000.017 DD| OPS_COMPACT: Compact{1.2.8, eph 1} put [1:2:8:1:12288:211:0] result OK flags { Valid } left 0b 00000.018 II| OPS_COMPACT: Compact{1.2.8, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (397 0 0)b }, ecr=1.000 00000.019 II| TABLET_EXECUTOR: Leader{1:2:9} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 8, product {1 parts epoch 2} done 00000.019 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.020 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.020 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 8 for step 8 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 3 for step 9 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:10} switch applied on followers, step 9 ...waiting until compacted 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} hope 1 -> done Change{8, redo 72b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} release 4194304b of static, Memory{0 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...making snapshot with concurrent commit 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxM ... mory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...checking rows (expecting new data and no metadata for old transactions) 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.015 II| TABLET_EXECUTOR: Leader{1:2:7} suiciding, Waste{2:0, 357b +(4, 602b), 6 trc, -602b acc} 00000.018 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.018 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 401 bytes, 401 total, blobs: { [1:2:2:1:8192:209:0], [1:2:5:1:32768:130:0], [1:2:6:1:32768:62:0] } 00000.019 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 271 bytes, 271 total, blobs: { [1:2:3:1:24576:123:0], [1:2:6:1:24576:148:0] } 00000.019 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.019 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 177b, wait} done, Waste{2:0, 357b +(4, 602b), 6 trc} 00000.020 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ...checking rows (expecting new) 00000.020 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.020 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.021 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 357b +(0, 0b), 1 trc, -602b acc} 00000.022 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 177 bytes, 177 total, blobs: { [1:3:1:1:28672:177:0] } 00000.022 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 209 bytes, 209 total, blobs: { [1:2:2:1:8192:209:0] } 00000.023 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 148 bytes, 148 total, blobs: { [1:2:6:1:24576:148:0] } 00000.023 II| TABLET_EXECUTOR: Leader{1:4:0} activating executor 00000.023 II| TABLET_EXECUTOR: LSnap{1:4, on 4:1, 177b, wait} done, Waste{2:0, 357b +(0, 0b), 1 trc} 00000.023 DD| TABLET_EXECUTOR: Leader{1:4:2} commited cookie 2 for step 1 ...checking rows (expecting new) 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} 00000.024 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.024 II| TABLET_EXECUTOR: Leader{1:4:2} suiciding, Waste{2:0, 357b +(0, 0b), 1 trc, -602b acc} 00000.025 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.025 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.025 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {736b, 11} 00000.025 II| FAKE_ENV: DS.1 gone, left {534b, 3}, put {1540b, 11} 00000.025 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: All BS storage groups are stopped 00000.025 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.025 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 80}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:18.197984Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.008 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... initializing schema 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} hope 1 -> done Change{2, redo 64b alter 11b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ... inserting rows 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{3, redo 512b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 ...compacting table 00000.012 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Full, forced state None, forced mode Full 00000.012 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.012 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.012 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.012 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.012 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.022 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 5, product {1 parts epoch 2} done 00000.022 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 8 for step 5 ... observed TEvAttach from [63:30:2062] (leader) 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:7} got result TEvResult{3 pages [1:2:5:1:12288:161:0] ok OK}, type 5 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ... starting follower ... wait first follower EvAttach ... waiting for NKikimr::NSharedCache::TEvAttach 00000.024 II| TABLET_EXECUTOR: LSnap{1:2, on 2:7, 251b, wait} done, Waste{2:0, 770b +(2, 226b), 6 trc} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 2 for step 7 ... observed TEvTablet::TEvFBoot, followerSysActor: [63:51:2087] 00000.025 DD| TABLET_EXECUTOR: Follower{1:2:-} sending TEvGet batch 220 bytes, 220 total, blobs: { [1:2:2:1:8192:209:0], [1:2:3:1:8192:11:0] } 00000.025 DD| TABLET_EXECUTOR: Follower{1:2:-} sending TEvGet batch 161 bytes, 161 total, blobs: { [1:2:5:1:12288:161:0] } 00000.026 II| TABLET_EXECUTOR: Follower{1:2:0} activating executor ... observed TEvAttach from [63:58:2091] (follower) ... waiting for NKikimr::NSharedCache::TEvAttach (done) ... stopping leader 00000.026 DD| TABLET_EXECUTOR: Follower{1:2:0} got result TEvResult{3 pages [1:2:5:1:12288:161:0] ok OK}, type 5 00000.026 II| TABLET_EXECUTOR: Leader{1:2:8} suiciding, Waste{2:0, 770b +(0, 0b), 1 trc, -226b acc} ... promoting follower ... wait secont follower EvAttach ... waiting for NKikimr::NSharedCache::TEvAttach 00000.028 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 251 bytes, 251 total, blobs: { [1:2:7:1:28672:251:0] } 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 220 bytes, 220 total, blobs: { [1:2:2:1:8192:209:0], [1:2:3:1:8192:11:0] } 00000.029 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.030 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 251b, wait} done, Waste{2:0, 770b +(0, 0b), 1 trc} ... observed TEvAttach from [63:58:2091] (follower) ... waiting for NKikimr::NSharedCache::TEvAttach (done) 00000.030 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.031 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 770b +(0, 0b), 1 trc, -226b acc} 00000.031 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 27b} miss {0 0b} in-memory miss {0 0b} 00000.031 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.031 II| FAKE_ENV: DS.0 gone, left {119b, 4}, put {673b, 11} 00000.031 II| FAKE_ENV: DS.1 gone, left {1637b, 10}, put {1637b, 10} 00000.031 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.031 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.031 II| FAKE_ENV: All BS storage groups are stopped 00000.031 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.031 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 57}, stopped |99.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuildWithOverlap >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |99.3%| [TA] $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.3%| [TA] {RESULT} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> TieredCache::Touch [GOOD] >> TieredCache::Erase [GOOD] >> TieredCache::EvictNext [GOOD] >> TieredCache::UpdateLimit [GOOD] >> TieredCache::InsertUntouched [GOOD] >> TieredCache::EnsureLimits [GOOD] >> TSharedPageCache_Actor::Request_Basics >> TSharedPageCache_Actor::Request_Basics [GOOD] >> TSharedPageCache_Actor::Request_Failed >> TSharedPageCache_Actor::Request_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue [GOOD] >> TSharedPageCache_Actor::Request_Queue_Failed >> TSharedPageCache_Actor::Request_Queue_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue_Fast >> TSharedPageCache_Actor::Request_Queue_Fast [GOOD] >> TSharedPageCache_Actor::Request_Sequential [GOOD] >> TSharedPageCache_Actor::Request_Cached >> TTxDataShardLocalKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges >> TSharedPageCache_Actor::Request_Cached [GOOD] >> TSharedPageCache_Actor::Request_Different_Collections [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Subset |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> TSharedPageCache_Actor::Request_Subset [GOOD] >> TSharedPageCache_Actor::Request_Subset_Shuffled >> TSharedPageCache_Actor::Request_Subset_Shuffled [GOOD] >> TSharedPageCache_Actor::Request_Superset [GOOD] >> TSharedPageCache_Actor::Request_Superset_Reversed >> TSharedPageCache_Actor::Request_Superset_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing >> TSharedPageCache_Actor::Request_Crossing [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Shuffled >> TSharedPageCache_Actor::Request_Crossing_Shuffled [GOOD] >> TSharedPageCache_Actor::Unregister_Basics >> TSharedPageCache_Actor::Unregister_Basics [GOOD] >> TSharedPageCache_Actor::Unregister_Cached [GOOD] >> TSharedPageCache_Actor::Unregister_Expired >> TSharedPageCache_Actor::Unregister_Expired [GOOD] >> TSharedPageCache_Actor::Unregister_InFly >> TMLPDLQMoverTests::MoveToDLQ_TopicNotExists [GOOD] >> TMLPStorageTests::NextFromEmptyStorage [GOOD] >> TMLPStorageTests::CommitToEmptyStorage [GOOD] >> TMLPStorageTests::UnlockToEmptyStorage [GOOD] >> TMLPStorageTests::ChangeDeadlineEmptyStorage [GOOD] >> TMLPStorageTests::AddMessageToEmptyStorage [GOOD] >> TMLPStorageTests::AddNotFirstMessageToEmptyStorage [GOOD] >> TMLPStorageTests::AddMessageWithSkippedMessage [GOOD] >> TMLPStorageTests::AddMessageWithDelay [GOOD] >> TMLPStorageTests::AddMessageWithBigDelay [GOOD] >> TMLPStorageTests::AddMessageWithZeroDelay [GOOD] >> TMLPStorageTests::AddMessageWithDelay_Unlock [GOOD] >> TMLPStorageTests::NextWithoutKeepMessageOrderStorage [GOOD] >> TMLPStorageTests::NextWithKeepMessageOrderStorage [GOOD] >> TMLPStorageTests::NextWithWriteRetentionPeriod [GOOD] >> TMLPStorageTests::NextWithInfinityRetentionPeriod [GOOD] >> TMLPStorageTests::SkipLockedMessage [GOOD] >> TMLPStorageTests::SkipLockedMessageGroups [GOOD] >> TMLPStorageTests::CommitLockedMessage_WithoutKeepMessageOrder [GOOD] >> TMLPStorageTests::CommitLockedMessage_WithKeepMessageOrder [GOOD] >> TMLPStorageTests::CommitUnlockedMessage [GOOD] >> TMLPStorageTests::CommitCommittedMessage [GOOD] >> TMLPStorageTests::UnlockLockedMessage_WithoutKeepMessageOrder [GOOD] >> TMLPStorageTests::UnlockLockedMessage_WithKeepMessageOrder [GOOD] >> TMLPStorageTests::UnlockUnlockedMessage [GOOD] >> TMLPStorageTests::UnlockCommittedMessage [GOOD] >> TMLPStorageTests::ChangeDeadlineLockedMessage [GOOD] >> TMLPStorageTests::ChangeDeadlineUnlockedMessage [GOOD] >> TMLPStorageTests::EmptyStorageSerialization [GOOD] >> TMLPStorageTests::StorageSerialization [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_Unlocked [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_Locked [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_Committed [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_DLQ [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_DeadLetterPolicy_Delete [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_WithHole [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_WithMoveBaseTime_Deadline [GOOD] >> TMLPStorageTests::CompactStorage_ByCommittedOffset [GOOD] >> TMLPStorageTests::CompactStorage_ByRetention [GOOD] >> TMLPStorageTests::CompactStorage_ByDeadline [GOOD] >> TMLPStorageTests::CompactStorage_WithDLQ [GOOD] >> TMLPStorageTests::ProccessDeadlines [GOOD] >> TMLPStorageTests::MoveBaseDeadline >> TSharedPageCache_Actor::Unregister_InFly [GOOD] >> TSharedPageCache_Actor::Unregister_Queued >> TMLPStorageTests::MoveBaseDeadline [GOOD] >> TMLPStorageTests::SlowZone_MoveUnprocessedToSlowZone [GOOD] >> TMLPStorageTests::SlowZone_MoveLockedToSlowZone [GOOD] >> TMLPStorageTests::SlowZone_MoveCommittedToSlowZone [GOOD] >> TMLPStorageTests::SlowZone_MoveDLQToSlowZone [GOOD] >> TMLPStorageTests::SlowZone_MoveToSlowZoneAndLock [GOOD] >> TMLPStorageTests::SlowZone_MoveToSlowZoneAndCommit [GOOD] >> TMLPStorageTests::SlowZone_MoveToSlowZoneAndDLQ [GOOD] >> TMLPStorageTests::SlowZone_Lock [GOOD] >> TMLPStorageTests::SlowZone_Commit_First [GOOD] >> TMLPStorageTests::SlowZone_Commit [GOOD] >> TMLPStorageTests::SlowZone_DLQ [GOOD] >> TMLPStorageTests::SlowZone_CommitToFast [GOOD] >> TMLPStorageTests::SlowZone_CommitAndAdd [GOOD] >> TMLPStorageTests::SlowZone_Retention_1message [GOOD] >> TMLPStorageTests::SlowZone_Retention_2message [GOOD] >> TMLPStorageTests::SlowZone_Retention_3message [GOOD] >> TMLPStorageTests::ChangeDeadLetterPolicy_Delete [GOOD] >> TMLPStorageTests::ChangeDeadLetterPolicy_Unspecified [GOOD] >> TMLPTests::CreateWithRetentionStorage |99.3%| [TA] $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> TSharedPageCache_Actor::Unregister_Queued [GOOD] >> TSharedPageCache_Actor::Unregister_Queued_Pending >> TSharedPageCache_Actor::Unregister_Queued_Pending [GOOD] >> TSharedPageCache_Actor::InMemory_Preemption |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> TSharedPageCache_Actor::InMemory_Preemption [GOOD] >> TSharedPageCache_Actor::InMemory_Unregister |99.3%| [TA] {RESULT} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> TSharedPageCache_Actor::InMemory_Unregister [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPages >> TSharedPageCache_Actor::InMemory_ReloadPages [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Active >> TSharedPageCache_Actor::IncrementFrequency_Active [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Passive >> TSharedPageCache_Actor::IncrementFrequency_Passive [GOOD] >> TSharedPageCache_Transactions::One_Transaction_One_Key >> TSharedPageCache_Transactions::One_Transaction_One_Key [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys >> TSharedPageCache_Transactions::One_Transaction_Two_Keys [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_One_Key >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_One_Key [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys [GOOD] >> TSharedPageCache_Transactions::Compaction >> TSharedPageCache_Transactions::Compaction [GOOD] >> Vacuum::StartVacuumNoTables [GOOD] >> Vacuum::StartVacuumNoTablesWithRestart >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> Vacuum::StartVacuumNoTablesWithRestart [GOOD] >> Vacuum::StartVacuumLog [GOOD] >> Vacuum::StartVacuum [GOOD] >> Vacuum::StartVacuumMultipleFamilies >> Vacuum::StartVacuumMultipleFamilies [GOOD] >> Vacuum::StartVacuumMultipleTables [GOOD] >> Vacuum::StartVacuumWithFollowers >> Vacuum::StartVacuumWithFollowers [GOOD] >> Vacuum::StartVacuumMultipleTimes [GOOD] >> Vacuum::StartVacuumEmptyTable [GOOD] >> Vacuum::StartVacuumWithRestarts >> Vacuum::StartVacuumWithRestarts [GOOD] >> Vacuum::StartVacuumRetryWithNotGreaterGenerations [GOOD] >> Vacuum::StartVacuumWithTabletGCErrors >> Vacuum::StartVacuumWithTabletGCErrors [GOOD] >> Vacuum::StartVacuumWithSysTabletGCErrors >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> Vacuum::StartVacuumWithSysTabletGCErrors [GOOD] >> TVersions::WreckHead >> TExecutorDb::RandomOps >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> test_drain.py::TestHive::test_drain_on_stop [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/sql/py3test |99.3%| [TM] {RESULT} ydb/tests/sql/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 >> TMLPTests::CreateWithRetentionStorage [GOOD] >> TMLPTests::AddWithRetentionStorage >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::FullScan >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges [GOOD] >> TTxDataShardPrefixKMeansScan::BadRequest >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> TExecutorDb::RandomOps [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TTxDataShardPrefixKMeansScan::BadRequest [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> BuildStatsHistogram::Ten_Mixed >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout |99.3%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.4%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleep >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TMLPTests::AddWithRetentionStorage [GOOD] >> TMLPTests::SetRetentionStorage >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial |99.4%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::WreckPart >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially [GOOD] >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> TPart::WreckPart [GOOD] >> TPart::WreckPartColumnGroups >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> TFlatExecutorLeases::BasicsInitialLeaseSleep [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> DBase::Select [GOOD] >> DBase::Defaults [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::WideKey >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> DBase::WideKey [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPartBtreeIndexIteration::NoNodes >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random [GOOD] >> TS3FIFOCache::InsertUntouched [GOOD] >> TS3FIFOCache::EnsureLimits [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TScreen::Sequential [GOOD] >> TScreen::Random >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout [GOOD] >> TFlatTableDatetime::TestDate >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::S3FIFO >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck [GOOD] >> Memtable::Erased >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> TPqWriterTest::TestWriteToTopic >> TPqWriterTest::TestWriteToTopic [GOOD] >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TPqWriterTest::TestWriteToTopicMultiBatch >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NOther::Blocks [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> TPqWriterTest::TestDeferredWriteToTopic >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Basics >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> TExecutorDb::EncodedPage [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> TPqWriterTest::TestCheckpoints >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Basics [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> TMLPTests::SetRetentionStorage [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Disabling |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... ch 2} done 00000.148 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00000.148 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 1}} 00000.148 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.148 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.149 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{5 on 2, Compact{1.2.7, eph 2}} 00000.149 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 5 00000.149 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00000.149 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.258 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.258 II| OPS_COMPACT: Compact{1.2.7, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.275 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.275 II| OPS_COMPACT: Compact{1.2.6, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.289 II| TABLET_EXECUTOR: Leader{1:2:9} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 7, product {1 parts epoch 3} done 00000.291 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 3 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 6, product {1 parts epoch 0} done 00000.291 II| TABLET_EXECUTOR: Leader{1:2:11} starting compaction 00000.291 II| TABLET_EXECUTOR: Leader{1:2:12} starting Scan{7 on 2, Compact{1.2.11, eph 3}} 00000.291 II| TABLET_EXECUTOR: Leader{1:2:12} started compaction 7 00000.291 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.309 II| TABLET_EXECUTOR: Leader{1:2:12} starting compaction 00000.309 II| TABLET_EXECUTOR: Leader{1:2:13} starting Scan{9 on 2, Compact{1.2.12, eph 2}} 00000.309 II| TABLET_EXECUTOR: Leader{1:2:13} started compaction 9 00000.310 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} begin on TSubset{head 0, 0m 1p 0c} 00000.310 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.310 II| OPS_COMPACT: Compact{1.2.11, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.311 II| TABLET_EXECUTOR: Leader{1:2:13} starting compaction 00000.311 II| TABLET_EXECUTOR: Leader{1:2:14} starting Scan{11 on 2, Compact{1.2.13, eph 1}} 00000.311 II| TABLET_EXECUTOR: Leader{1:2:14} started compaction 11 00000.311 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00000.363 II| TABLET_EXECUTOR: Leader{1:2:14} Compact 7 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 11, product {1 parts epoch 4} done 00000.364 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.365 II| OPS_COMPACT: Compact{1.2.12, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.371 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.371 II| OPS_COMPACT: Compact{1.2.13, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.373 II| TABLET_EXECUTOR: Leader{1:2:15} Compact 9 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 12, product {1 parts epoch 0} done 00000.374 II| TABLET_EXECUTOR: Leader{1:2:16} Compact 11 on TGenCompactionParams{2: gen 2 epoch 0, 1 parts} step 13, product {1 parts epoch 0} done 00000.375 II| TABLET_EXECUTOR: Leader{1:2:17} starting compaction 00000.375 II| TABLET_EXECUTOR: Leader{1:2:18} starting Scan{13 on 2, Compact{1.2.17, eph 3}} 00000.375 II| TABLET_EXECUTOR: Leader{1:2:18} started compaction 13 00000.375 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} begin on TSubset{head 0, 0m 1p 0c} 00000.375 II| TABLET_EXECUTOR: Leader{1:2:18} starting compaction 00000.375 II| TABLET_EXECUTOR: Leader{1:2:19} starting Scan{15 on 2, Compact{1.2.18, eph 2}} 00000.375 II| TABLET_EXECUTOR: Leader{1:2:19} started compaction 15 00000.375 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.411 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.411 II| OPS_COMPACT: Compact{1.2.17, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.454 II| TABLET_EXECUTOR: Leader{1:2:19} Compact 13 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 17, product {1 parts epoch 0} done 00000.455 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=5}, trace 0 of 0 ~3p 00000.455 II| OPS_COMPACT: Compact{1.2.18, eph 2} end=Done, 6 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00000.470 II| TABLET_EXECUTOR: Leader{1:2:20} Compact 15 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 18, product {1 parts epoch 0} done 00000.472 II| TABLET_EXECUTOR: Leader{1:2:21} got result TEvResult{0 pages [1:2:11:2:12288:92:0] fail RACE}, type 1 00000.472 II| TABLET_EXECUTOR: Leader{1:2:21} starting compaction 00000.472 II| TABLET_EXECUTOR: Leader{1:2:22} starting Scan{17 on 2, Compact{1.2.21, eph 3}} 00000.472 II| TABLET_EXECUTOR: Leader{1:2:22} started compaction 17 00000.472 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} begin on TSubset{head 0, 0m 2p 0c} 00000.521 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=4}, trace 0 of 0 ~3p 00000.521 II| OPS_COMPACT: Compact{1.2.21, eph 3} end=Done, 6 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00000.522 II| TABLET_EXECUTOR: Leader{1:2:22} Compact 17 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 21, product {1 parts epoch 0} done 00000.523 II| TABLET_EXECUTOR: Leader{1:2:23} suiciding, Waste{2:0, 20001011b +(44, 90121851b), 22 trc, -90121851b acc} 00000.531 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.531 NN| TABLET_SAUSAGECACHE: Poison cache serviced 24 reqs hit {6 20000188b} miss {18 90000505b} in-memory miss {0 0b} 00000.531 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.531 II| FAKE_ENV: DS.0 gone, left {3604b, 22}, put {3624b, 23} 00000.531 II| FAKE_ENV: DS.1 gone, left {122950b, 32}, put {122950b, 32} 00000.535 II| FAKE_ENV: DS.2 gone, left {110001012b, 29}, put {110001012b, 29} 00000.554 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.554 II| FAKE_ENV: All BS storage groups are stopped 00000.554 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.554 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 83}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:13:00.523677Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.006 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.007 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.064 II| TABLET_EXECUTOR: Leader{1:2:3} starting compaction 00000.064 II| TABLET_EXECUTOR: Leader{1:2:4} starting Scan{1 on 2, Compact{1.2.3, eph 1}} 00000.064 II| TABLET_EXECUTOR: Leader{1:2:4} started compaction 1 00000.064 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.076 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.076 II| OPS_COMPACT: Compact{1.2.3, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (39360 0 0)b }, ecr=0.004 00000.080 II| TABLET_EXECUTOR: Leader{1:2:4} Compact 1 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 3, product {1 parts epoch 2} done 00000.135 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00000.136 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 2}} 00000.136 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.136 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} begin on TSubset{head 3, 1m 1p 0c} 00000.249 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} end=Done, 2r seen, TFwd{fetch=38.3KiB,saved=38.3KiB,usage=38.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.249 II| OPS_COMPACT: Compact{1.2.6, eph 2} end=Done, 2 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.250 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 3 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 6, product {1 parts epoch 3} done 00000.252 II| TABLET_EXECUTOR: Leader{1:2:9} starting compaction 00000.252 II| TABLET_EXECUTOR: Leader{1:2:10} starting Scan{5 on 2, Compact{1.2.9, eph 3}} 00000.252 II| TABLET_EXECUTOR: Leader{1:2:10} started compaction 5 00000.252 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} begin on TSubset{head 4, 1m 1p 0c} 00000.379 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 118705b +(4, 118124b), 9 trc, -118124b acc} 00000.379 II| TABLET_EXECUTOR: Leader{1:2:10} cancelling compaction 5 00000.380 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} end=Term, 2r seen, TFwd{fetch=76.8KiB,saved=76.8KiB,usage=76.8KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.380 EE| OPS_COMPACT: Compact{1.2.9, eph 3} end=Term, 2 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} 00000.385 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.385 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {4 117917b} miss {0 0b} in-memory miss {0 0b} 00000.385 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.385 II| FAKE_ENV: DS.0 gone, left {843b, 9}, put {863b, 10} 00000.386 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.386 II| FAKE_ENV: DS.1 gone, left {316051b, 13}, put {316051b, 13} 00000.391 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.393 II| FAKE_ENV: All BS storage groups are stopped 00000.393 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.393 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 1 Left 39}, stopped |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TSharedPageCache::TryKeepInMemoryMode_Disabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction >> TPqWriterTest::TestCheckpoints [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> TPqWriterTest::TestCheckpointWithEmptyBatch >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest >> TMLPTests::SetRetentionStorage [GOOD] Test command err: 2026-01-07T18:08:41.611700Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592683220814019883:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:41.611761Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:08:41.756476Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:08:42.042507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:08:42.329809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:08:42.330122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:08:42.335653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:08:42.356182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:08:42.377302Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592683220814019849:2081] 1767809321602414 != 1767809321602417 2026-01-07T18:08:42.382686Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:08:42.641365Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:08:43.254379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00155e/r3tmp/yandexCz9hMo.tmp 2026-01-07T18:08:43.254403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00155e/r3tmp/yandexCz9hMo.tmp 2026-01-07T18:08:43.260059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00155e/r3tmp/yandexCz9hMo.tmp 2026-01-07T18:08:43.260221Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:08:43.954136Z INFO: TTestServer started on Port 24437 GrpcPort 24307 PQClient connected to localhost:24307 2026-01-07T18:08:44.280104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:08:44.313010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-07T18:08:44.411004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:44.653327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-07T18:08:44.679251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-07T18:08:45.644316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683237993890074:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:45.644450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:45.646647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683237993890089:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:45.646742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683237993890090:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:45.646822Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:08:45.660550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:08:45.677558Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7592683237993890093:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-07T18:08:45.770592Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7592683237993890157:2643] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-07T18:08:46.612447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592683220814019883:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:08:46.612684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:08:46.678666Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7592683237993890165:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-07T18:08:46.692425Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDMzODg4YWQtMTFjZDg0Ni1kYmQ4N2VmMC1hNTUzNzI3MQ==, ActorId: [1:7592683237993890072:2330], ActorState: ExecuteState, LegacyTraceId: 01kectb6j3fzs6sryyn4hwmx7f, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-07T18:08:46.703521Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-07T18:08:46.938685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:47.001907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:08:47.127585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7592683250878792363:2837] === CheckClustersList. Ok 2026-01-07T18:08:54.266426Z :TODO INFO: TTopicSdkTestSetup started 2026-01-07T18:08:54.325367Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T18:08:54.429203Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7592683276648596359:2942] connected; active server actors: 1 2026-01-07T18:08:54.432508Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-07T18:08:54.434252Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-07T18:08:54.445081Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-07T18:08:54.445261Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-07T18:08:54.473282Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-07T18:08:54.477105Z node 1 :PERSQUEUE DEBUG: pq_impl. ... o.cpp:30: new Cookie src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0 generated for partition 0 topic 'test-topic' owner src 2026-01-07T18:13:00.837159Z node 15 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-07T18:13:00.837182Z node 15 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-07T18:13:00.837203Z node 15 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-07T18:13:00.837217Z node 15 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-07T18:13:00.837238Z node 15 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-07T18:13:00.837266Z node 15 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-07T18:13:00.837287Z node 15 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-07T18:13:00.837307Z node 15 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-07T18:13:00.837339Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-07T18:13:00.837384Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T18:13:00.837500Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'test-topic' requestId: 2026-01-07T18:13:00.837518Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2026-01-07T18:13:00.837573Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-07T18:13:00.837662Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0 2026-01-07T18:13:00.838206Z :INFO: [/Root] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767809580838 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T18:13:00.838305Z :INFO: [/Root] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0" topic: "test-topic" 2026-01-07T18:13:00.838473Z :INFO: [/Root] MessageGroupId [src] SessionId [src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0] Write session: close. Timeout = 0 ms 2026-01-07T18:13:00.838536Z :INFO: [/Root] MessageGroupId [src] SessionId [src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0] Write session will now close 2026-01-07T18:13:00.838593Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0] Write session: aborting 2026-01-07T18:13:00.839070Z :INFO: [/Root] MessageGroupId [src] SessionId [src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0] Write session: gracefully shut down, all writes complete 2026-01-07T18:13:00.839126Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0] Write session: destroy 2026-01-07T18:13:00.839636Z :TODO INFO: Topic created 2026-01-07T18:13:00.840018Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0 grpc read done: success: 0 data: 2026-01-07T18:13:00.840038Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0 grpc read failed 2026-01-07T18:13:00.840062Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0 grpc closed 2026-01-07T18:13:00.840070Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|c66960d6-efd5f47a-71b78abc-32cf6bc8_0 is DEAD 2026-01-07T18:13:00.852750Z node 15 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-07T18:13:00.870258Z node 15 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T18:13:00.870318Z node 15 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037895] doesn't have tx writes info 2026-01-07T18:13:00.870365Z node 15 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037896][] pipe [15:7592684333525439164:2980] connected; active server actors: 1 2026-01-07T18:13:00.872895Z node 15 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T18:13:00.872955Z node 15 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037894] doesn't have tx writes info 2026-01-07T18:13:00.877240Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][1][StateInit] bootstrapping 1 [15:7592684333525439251:2463] 2026-01-07T18:13:00.877250Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][0][StateInit] bootstrapping 0 [15:7592684333525439252:2464] 2026-01-07T18:13:00.878332Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][0][StateInit] init complete for topic 'topic1' partition 0 generation 1 [15:7592684333525439252:2464] 2026-01-07T18:13:00.878335Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][1][StateInit] init complete for topic 'topic1' partition 1 generation 1 [15:7592684333525439251:2463] 2026-01-07T18:13:00.878401Z node 15 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037894][Partition][1][StateIdle] Creating MLP consumer 'mlp-consumer' 2026-01-07T18:13:00.878401Z node 15 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037895][Partition][0][StateIdle] Creating MLP consumer 'mlp-consumer' 2026-01-07T18:13:00.879177Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037894][1][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2026-01-07T18:13:00.879178Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037895][0][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2026-01-07T18:13:00.879310Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037895][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 0 2026-01-07T18:13:00.879311Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][1][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 0 2026-01-07T18:13:00.880575Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037894][1][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2026-01-07T18:13:00.880575Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037895][0][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2026-01-07T18:13:00.880592Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:225: [72075186224037895][0][MLP][mlp-consumer] Initializing new consumer 2026-01-07T18:13:00.880596Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:225: [72075186224037894][1][MLP][mlp-consumer] Initializing new consumer 2026-01-07T18:13:00.880612Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037895][0][MLP][mlp-consumer] Initializing new consumer 2026-01-07T18:13:00.880612Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037894][1][MLP][mlp-consumer] Initializing new consumer 2026-01-07T18:13:00.880631Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][1][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-07T18:13:00.880631Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037895][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-07T18:13:00.880647Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037895][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 0 vs 0 2026-01-07T18:13:00.880647Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][1][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 0 vs 0 2026-01-07T18:13:00.880661Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:295: [72075186224037894][1][MLP][mlp-consumer] Initialized 2026-01-07T18:13:00.880661Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:295: [72075186224037895][0][MLP][mlp-consumer] Initialized 2026-01-07T18:13:00.880678Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][1][MLP][mlp-consumer] ProcessEventQueue 2026-01-07T18:13:00.880678Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037895][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-07T18:13:00.880700Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][1][MLP][mlp-consumer] Persist 2026-01-07T18:13:00.880700Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037895][0][MLP][mlp-consumer] Persist 2026-01-07T18:13:00.880718Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][1][MLP][mlp-consumer] Batch is empty 2026-01-07T18:13:00.880718Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037895][0][MLP][mlp-consumer] Batch is empty 2026-01-07T18:13:00.881769Z node 15 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037895][Partition][0][StateIdle] Updateing MLP consumer 'mlp-consumer' config 2026-01-07T18:13:00.881774Z node 15 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037894][Partition][1][StateIdle] Updateing MLP consumer 'mlp-consumer' config 2026-01-07T18:13:00.882083Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][1][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 0 2026-01-07T18:13:00.882084Z node 15 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037895][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 0 2026-01-07T18:13:00.882247Z node 15 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T18:13:00.882247Z node 15 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-07T18:13:00.900614Z node 15 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:141: new alter topic request |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |99.4%| [TM] {RESULT} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction [GOOD] >> TSharedPageCache_Actor::Attach_Basics [GOOD] >> TSharedPageCache_Actor::Attach_Request >> TSharedPageCache_Actor::Attach_Request [GOOD] >> TSharedPageCache_Actor::Detach_Basics >> TSharedPageCache_Actor::Detach_Basics [GOOD] >> TSharedPageCache_Actor::Detach_Cached >> TSharedPageCache_Actor::Detach_Cached [GOOD] >> TSharedPageCache_Actor::Detach_Expired [GOOD] >> TSharedPageCache_Actor::Detach_InFly >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TSharedPageCache_Actor::Detach_InFly [GOOD] >> TSharedPageCache_Actor::Detach_Queued |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TSharedPageCache_Actor::Detach_Queued [GOOD] >> TSharedPageCache_Actor::InMemory_Basics >> TSharedPageCache_Actor::InMemory_Basics [GOOD] >> TSharedPageCache_Actor::InMemory_NotEnoughMemory [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested [GOOD] >> TSharedPageCache_Actor::InMemory_Disabling >> TSharedPageCache_Actor::InMemory_Disabling [GOOD] >> TSharedPageCache_Actor::InMemory_Detach >> TSharedPageCache_Actor::InMemory_Detach [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular [GOOD] >> TSharedPageCache_Actor::InMemory_AttachRepeated >> TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::Single >> TSharedPageCache_Actor::InMemory_AttachRepeated [GOOD] >> TSharedPageCache_Actor::GC_Manual >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> TSharedPageCache_Actor::GC_Manual [GOOD] >> TSharedPageCache_Actor::GC_Scheduled >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> TSharedPageCache_Actor::GC_Scheduled [GOOD] >> TSharedPageCache_Actor::Evict_Active >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> TSharedPageCache_Actor::Evict_Active [GOOD] >> TSharedPageCache_Actor::Evict_Passive >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> TSharedPageCache_Actor::Evict_Passive [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2026-01-07T18:09:05.815240Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:557: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7592683323083976713:2054], metadatafields: , partitions: 666, skip json errors: 0 2026-01-07T18:09:05.851878Z node 1 :KQP_COMPUTE DEBUG: dq_pq_read_actor_base.cpp:153: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. SessionId: empty Watermarks enabled: 1 granularity: 0.000100s late arrival delay: 0.000000s idle: 0 idle timeout: 86400.000000s 2026-01-07T18:09:05.853036Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2026-01-07T18:09:05.853179Z node 1 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1515: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2026-01-07T18:09:05.853203Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:608: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7592683323083976713:2054]) 2026-01-07T18:09:05.853242Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:635: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [1:7592683323083976719:2048] 2026-01-07T18:09:05.854766Z node 1 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1009: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [1:7592683323083976714:2055] 2026-01-07T18:09:05.854822Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:655: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [1:7592683323083976714:2055], partIds: 666 cookie 1 2026-01-07T18:09:05.855188Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1053: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [1:7592683323083976714:2055], cookie 1 2026-01-07T18:09:05.855202Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1352: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2026-01-07T18:09:05.855210Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1355: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2026-01-07T18:09:05.855248Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1374: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [1:7592683323083976716:2057], generation 1 2026-01-07T18:09:05.855289Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:719: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [1:7592683323083976716:2057], connection id 1 partitions offsets (666 / ), 2026-01-07T18:09:05.855689Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:864: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [1:7592683323083976716:2057], seqNo 0, ConfirmedSeqNo 0, generation 1 2026-01-07T18:09:05.860444Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [1:7592683323083976716:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-07T18:09:05.899238Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [1:7592683323083976716:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-07T18:09:05.902696Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2026-01-07T18:09:05.902721Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2026-01-07T18:09:05.902806Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2026-01-07T18:09:05.902945Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:807: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2026-01-07T18:09:05.902970Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:811: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, watermark (empty maybe), buffer size 0, free space 948, result size 52 2026-01-07T18:09:05.904434Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:744: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2026-01-07T18:09:05.904537Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1242: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 2 NotifyCA 1 [1:7592683323083976716:2057] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=2 has pending data 2026-01-07T18:09:05.904552Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:736: SelfId: [1:7592683323083976719:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [1:7592683323083976716:2057] generation 1 2026-01-07T18:09:06.210819Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:557: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [2:7592683326985544121:2054], metadatafields: , partitions: 666, skip json errors: 0 2026-01-07T18:09:06.211283Z node 2 :KQP_COMPUTE DEBUG: dq_pq_read_actor_base.cpp:153: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. SessionId: empty Watermarks enabled: 1 granularity: 0.000100s late arrival delay: 0.000000s idle: 0 idle timeout: 86400.000000s 2026-01-07T18:09:06.213429Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2026-01-07T18:09:06.213505Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1515: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2026-01-07T18:09:06.213605Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:608: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([2:7592683326985544121:2054]) 2026-01-07T18:09:06.213631Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:635: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [2:7592683326985544127:2048] 2026-01-07T18:09:06.213871Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1009: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [2:7592683326985544122:2055] 2026-01-07T18:09:06.213975Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:655: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [2:7592683326985544122:2055], partIds: 666 cookie 1 2026-01-07T18:09:06.214965Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1053: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [2:7592683326985544122:2055], cookie 1 2026-01-07T18:09:06.214987Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1352: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2026-01-07T18:09:06.214994Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1355: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2026-01-07T18:09:06.215014Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1374: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [2:7592683326985544124:2057], generation 1 2026-01-07T18:09:06.215071Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:719: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [2:7592683326985544124:2057], connection id 1 partitions offsets (666 / ), 2026-01-07T18:09:06.215480Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:864: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [2:7592683326985544124:2057], seqNo 0, ConfirmedSeqNo 0, generation 1 2026-01-07T18:09:06.215758Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [2:7592683326985544124:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-07T18:09:06.216584Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [2:7592683326985544124:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-07T18:09:06.216611Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2026-01-07T18:09:06.216617Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2026-01-07T18:09:06.216885Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2026-01-07T18:09:06.217002Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:807: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2026-01-07T18:09:06.217017Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:811: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, watermark (empty maybe), buffer size 0, free space 948, result size 52 2026-01-07T18:09:06.223081Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1097: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvUndelivered, TSystem::Undelivered from [2:7592683326985544124:2057], reason Disconnected, cookie 999 2026-01-07T18:09:06.223170Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [2:7592683326985544124:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-07T18:09:06.223808Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [2:7592683326985544124:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-07T18:09:06.223830Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7592683326985544127:2048], TxId: query_1, task: 0, Cluster: . PQ ... ing read session. Close timeout: 0.000000s 2026-01-07T18:13:02.001398Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2026-01-07T18:13:02.001463Z :INFO: [local] [local] [14dc07c-11574b65-6ebc8bff-7a31e2fa] Counters: { Errors: 0 CurrentSessionLifetimeMs: 66 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T18:13:02.001581Z :NOTICE: [local] [local] [14dc07c-11574b65-6ebc8bff-7a31e2fa] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T18:13:02.003566Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42|2229f05b-e4b1dccf-901c5f95-1d7940b_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-07T18:13:02.003609Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42|2229f05b-e4b1dccf-901c5f95-1d7940b_0] PartitionId [0] Generation [1] Write session will now close 2026-01-07T18:13:02.003657Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42|2229f05b-e4b1dccf-901c5f95-1d7940b_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-07T18:13:02.004043Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42|2229f05b-e4b1dccf-901c5f95-1d7940b_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-07T18:13:02.004106Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42|2229f05b-e4b1dccf-901c5f95-1d7940b_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-07T18:13:02.437778Z node 46 :KQP_COMPUTE DEBUG: dq_pq_write_actor.cpp:268: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Load state: { SourceId: "f89315a9-fbf65d4b-75d02ee7-3e9b1a42" ConfirmedSeqNo: 3 EgressBytes: 3 } 2026-01-07T18:13:02.437953Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:195: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 2. Checkpoint: 0. Finished: 0 2026-01-07T18:13:02.449335Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:221: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 4 2026-01-07T18:13:02.449405Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:221: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 5 2026-01-07T18:13:02.464974Z :INFO: [local] OnFederationDiscovery fall back to single mode, database=local [] [] Start federated write session to database '' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "" DbInfos: [ { path: "local" endpoint: "localhost:13805" status: AVAILABLE weight: 100 } ] ControlPlaneEndpoint: localhost:13805 }2026-01-07T18:13:02.465521Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: try to update token 2026-01-07T18:13:02.466068Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Start write session. Will connect to nodeId: 0 2026-01-07T18:13:02.466179Z :INFO: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] Starting read session 2026-01-07T18:13:02.466254Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] Starting single session 2026-01-07T18:13:02.467157Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T18:13:02.467244Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T18:13:02.467303Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] Reconnecting session to cluster in 0.000000s 2026-01-07T18:13:02.474697Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] Successfully connected. Initializing session 2026-01-07T18:13:02.474725Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: write to message_group: f89315a9-fbf65d4b-75d02ee7-3e9b1a42 2026-01-07T18:13:02.474888Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: send init request: init_request { path: "Checkpoints" producer_id: "f89315a9-fbf65d4b-75d02ee7-3e9b1a42" message_group_id: "f89315a9-fbf65d4b-75d02ee7-3e9b1a42" } 2026-01-07T18:13:02.474928Z :TRACE: [local] TRACE_EVENT InitRequest 2026-01-07T18:13:02.475204Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: OnWriteDone gRpcStatusCode: 0 2026-01-07T18:13:02.477407Z :INFO: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] Got InitResponse. ReadSessionId: test_client_1_22_5005219141165973762_v1 2026-01-07T18:13:02.477456Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T18:13:02.477598Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-07T18:13:02.480536Z :INFO: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2026-01-07T18:13:02.482597Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] Got ReadResponse, serverBytesSize = 940, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427860 2026-01-07T18:13:02.482733Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427860 2026-01-07T18:13:02.482982Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2026-01-07T18:13:02.483061Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] Returning serverBytesSize = 940 to budget 2026-01-07T18:13:02.483131Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] In ContinueReadingDataImpl, ReadSizeBudget = 940, ReadSizeServerDelta = 52427860 2026-01-07T18:13:02.483348Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-07T18:13:02.483418Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-07T18:13:02.483481Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-07T18:13:02.483533Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 1} (2-2) 2026-01-07T18:13:02.483586Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (3-3) 2026-01-07T18:13:02.483642Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (4-4) 2026-01-07T18:13:02.483769Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2026-01-07T18:13:02.483825Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] Returning serverBytesSize = 0 to budget 2026-01-07T18:13:02.483815Z :INFO: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] Closing read session. Close timeout: 0.000000s 2026-01-07T18:13:02.483901Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2026-01-07T18:13:02.483966Z :INFO: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 17 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T18:13:02.484130Z :NOTICE: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T18:13:02.484207Z :DEBUG: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] [] Abort session to cluster 2026-01-07T18:13:02.484721Z :INFO: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] Closing read session. Close timeout: 0.000000s 2026-01-07T18:13:02.484788Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2026-01-07T18:13:02.484848Z :INFO: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 18 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T18:13:02.484979Z :NOTICE: [local] [local] [bde07dcc-6c1de84b-9af370ca-befe25c0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T18:13:02.486996Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: close. Timeout 0.000000s 2026-01-07T18:13:02.487039Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session will now close 2026-01-07T18:13:02.487087Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: aborting 2026-01-07T18:13:02.487490Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: gracefully shut down, all writes complete 2026-01-07T18:13:02.488067Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-07T18:13:02.488138Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767809582488 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T18:13:02.488183Z :TRACE: [local] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-07T18:13:02.488227Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session is aborting and will not restart 2026-01-07T18:13:02.488289Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f89315a9-fbf65d4b-75d02ee7-3e9b1a42] Write session: destroy 2026-01-07T18:13:03.052024Z node 47 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:195: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2026-01-07T18:13:03.064063Z node 47 :KQP_COMPUTE DEBUG: dq_pq_write_actor.cpp:240: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2026-01-07T18:13:03.064274Z node 47 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:418: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "36952419-4c4fe888-eebdd057-16ae981a" } |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/pq_async_io/ut/unittest |99.4%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TSharedPageCache_Actor::Evict_Passive [GOOD] Test command err: 0.28987 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:56.388676Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.009 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} queued, type NKikimr::NSharedCache::TTxInitSchema 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 500b annex 0, ~{ } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.010 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.010 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.011 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 00000.011 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} commited cookie 1 for step 15 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{15, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:17} commited cookie 1 for step 16 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{16, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:18} commited cookie 1 for step 17 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{17, r ... :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #5 (done) Checking results#5 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:13:05.432854Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 6 class Online from cache [ ] already requested [ ] to request [ 14 ] 2026-01-07T18:13:05.432887Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 13 ] owner [33:5:2052] 2026-01-07T18:13:05.432913Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #6 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #6 (done) Checking fetches#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] ... waiting for results #6 2026-01-07T18:13:05.433019Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 14 ] 2026-01-07T18:13:05.433039Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 14 ] cookie 6 2026-01-07T18:13:05.433063Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #6 (done) Checking results#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:13:05.433125Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 7 class Online from cache [ ] already requested [ ] to request [ 15 ] 2026-01-07T18:13:05.433156Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 14 ] owner [33:5:2052] 2026-01-07T18:13:05.433188Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #7 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #7 (done) Checking fetches#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] ... waiting for results #7 2026-01-07T18:13:05.433273Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 15 ] 2026-01-07T18:13:05.433290Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 15 ] cookie 7 2026-01-07T18:13:05.433314Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #7 (done) Checking results#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:13:05.433394Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 8 class Online from cache [ ] already requested [ ] to request [ 16 ] 2026-01-07T18:13:05.433427Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 15 ] owner [33:5:2052] 2026-01-07T18:13:05.433465Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #8 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #8 (done) Checking fetches#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] ... waiting for results #8 2026-01-07T18:13:05.433558Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 16 ] 2026-01-07T18:13:05.433577Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 16 ] cookie 8 2026-01-07T18:13:05.433607Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #8 (done) Checking results#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:13:05.433677Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 9 class Online from cache [ ] already requested [ ] to request [ 17 ] 2026-01-07T18:13:05.433705Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 16 ] owner [33:5:2052] 2026-01-07T18:13:05.433727Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #9 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #9 (done) Checking fetches#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] ... waiting for results #9 2026-01-07T18:13:05.433800Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 17 ] 2026-01-07T18:13:05.433817Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 17 ] cookie 9 2026-01-07T18:13:05.433841Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #9 (done) Checking results#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:13:05.433922Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 10 class Online from cache [ ] already requested [ ] to request [ 18 ] 2026-01-07T18:13:05.433953Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 17 ] owner [33:5:2052] 2026-01-07T18:13:05.433982Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #10 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #10 (done) Checking fetches#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for results #10 2026-01-07T18:13:05.434067Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 18 ] 2026-01-07T18:13:05.434085Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 18 ] cookie 10 2026-01-07T18:13:05.434108Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #10 (done) Checking results#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:13:05.434177Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 11 class Online from cache [ ] already requested [ ] to request [ 19 ] 2026-01-07T18:13:05.434214Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 18 ] owner [33:5:2052] 2026-01-07T18:13:05.434237Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #11 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #11 (done) Checking fetches#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] ... waiting for results #11 2026-01-07T18:13:05.434315Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 19 ] 2026-01-07T18:13:05.434332Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 19 ] cookie 11 2026-01-07T18:13:05.434354Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #11 (done) Checking results#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] ... waiting for NActors::TEvents::TEvWakeup 2026-01-07T18:13:05.434443Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:865: Wakeup DoGCManual 2026-01-07T18:13:05.434476Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) ... waiting for NActors::TEvents::TEvWakeup 2026-01-07T18:13:05.434567Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:865: Wakeup DoGCManual 2026-01-07T18:13:05.434612Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 1 2 3 ] owner [33:5:2052] 2026-01-07T18:13:05.434658Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> TTopicReaderTests::TestRun_ReadOneMessage >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBRowLocks::LockSurvivesCompactions [GOOD] >> DBRowLocks::LockOverCompactedErase [GOOD] >> DBRowLocks::CommitTxAfterLockThenCompact [GOOD] >> DBRowLocks::CommitLockThenCompactRowVersion [GOOD] >> DBRowLocks::OverwriteLockThenCompact [GOOD] >> DBRowLocks::LockOpenTxAndTxDataAccounting [GOOD] >> DBRowLocks::MultipleCommittedRowLocks [GOOD] >> DBRowLocks::LocksCommittedRemovedIteration [GOOD] >> DBRowLocks::LocksReplay >> DBRowLocks::LocksReplay [GOOD] >> DBRowLocks::LocksMvccCompact [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBRowLocks::LocksMvccCompact [GOOD] Test command err: 10 parts: [0:0:1:0:0:0:0] 23928 rows, 1015 pages, 5 levels: (157741, 52588) (315424, 105149) (479998, 160007) (639733, 213252) (799132, 266385) [0:0:2:0:0:0:0] 24216 rows, 1025 pages, 5 levels: (158968, 52997) (323176, 107733) (478777, 159600) (638131, 212718) (798961, 266328) [0:0:3:0:0:0:0] 23857 rows, 1008 pages, 5 levels: (161719, 53914) (324091, 108038) (482023, 160682) (640741, 213588) (798478, 266167) [0:0:4:0:0:0:0] 24184 rows, 1023 pages, 5 levels: (160366, 53463) (321823, 107282) (478882, 159635) (641413, 213812) (799024, 266349) [0:0:5:0:0:0:0] 23945 rows, 1019 pages, 5 levels: (160006, 53343) (321943, 107322) (483100, 161041) (641107, 213710) (799117, 266380) [0:0:6:0:0:0:0] 23619 rows, 1005 pages, 5 levels: (161371, 53798) (319855, 106626) (480928, 160317) (636934, 212319) (799258, 266427) [0:0:7:0:0:0:0] 23988 rows, 1019 pages, 5 levels: (154531, 51518) (314071, 104698) (475438, 158487) (636523, 212182) (798766, 266263) [0:0:8:0:0:0:0] 23770 rows, 1013 pages, 5 levels: (160948, 53657) (318202, 106075) (477640, 159221) (640657, 213560) (799090, 266371) [0:0:9:0:0:0:0] 24256 rows, 1029 pages, 5 levels: (157747, 52590) (320038, 106687) (482770, 160931) (638905, 212976) (799195, 266406) [0:0:10:0:0:0:0] 24237 rows, 1026 pages, 5 levels: (162409, 54144) (317659, 105894) (477673, 159232) (637528, 212517) (798748, 266257) Checking BTree: Touched 1% bytes, 37 pages RowCountHistogram: 9% (actual 6%) key = (54346, 18123) value = 23288 (actual 16627 - 2% error) 10% (actual 12%) key = (155473, 51832) value = 47414 (actual 46964 - 0% error) 10% (actual 6%) key = (208177, 69400) value = 71513 (actual 62823 - 3% error) 10% (actual 12%) key = (310048, 103357) value = 95735 (actual 93347 - 0% error) 10% (actual 11%) key = (400444, 133489) value = 119910 (actual 120422 - 0% error) 10% (actual 7%) key = (460861, 153628) value = 144061 (actual 138588 - 2% error) 10% (actual 12%) key = (563395, 187806) value = 168190 (actual 169409 - 0% error) 10% (actual 5%) key = (611236, 203753) value = 192378 (actual 183755 - 3% error) 9% (actual 11%) key = (706417, 235480) value = 215613 (actual 212448 - 1% error) 10% (actual 11%) DataSizeHistogram: 9% (actual 6%) key = (54346, 18123) value = 1986792 (actual 1422570 - 2% error) 10% (actual 12%) key = (155473, 51832) value = 4036041 (actual 4004054 - 0% error) 10% (actual 6%) key = (208177, 69400) value = 6085370 (actual 5348583 - 3% error) 10% (actual 12%) key = (310048, 103357) value = 8135866 (actual 7931163 - 1% error) 10% (actual 11%) key = (400444, 133489) value = 10187497 (actual 10227908 - 0% error) 10% (actual 7%) key = (460861, 153628) value = 12238449 (actual 11773611 - 2% error) 10% (actual 12%) key = (563395, 187806) value = 14287401 (actual 14387497 - 0% error) 10% (actual 6%) key = (611236, 203753) value = 16340389 (actual 15610901 - 3% error) 9% (actual 11%) key = (706417, 235480) value = 18309634 (actual 18041898 - 1% error) 10% (actual 11%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (79777, 26600) value = 24001 (actual 24257 - 0% error) 10% (actual 10%) key = (159688, 53237) value = 48010 (actual 48277 - 0% error) 10% (actual 10%) key = (239839, 79954) value = 72013 (actual 72278 - 0% error) 10% (actual 9%) key = (319807, 106610) value = 96022 (actual 96277 - 0% error) 10% (actual 10%) key = (399964, 133329) value = 120041 (actual 120304 - 0% error) 10% (actual 10%) key = (479791, 159938) value = 144061 (actual 144321 - 0% error) 10% (actual 10%) key = (559867, 186630) value = 168077 (actual 168330 - 0% error) 10% (actual 10%) key = (639661, 213228) value = 192085 (actual 192333 - 0% error) 10% (actual 10%) key = (719458, 239827) value = 216091 (actual 216348 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79318, 26447) value = 2038035 (actual 2060169 - 0% error) 10% (actual 10%) key = (159028, 53017) value = 4076033 (actual 4098046 - 0% error) 10% (actual 10%) key = (239581, 79868) value = 6115440 (actual 6137485 - 0% error) 10% (actual 10%) key = (319516, 106513) value = 8153742 (actual 8175567 - 0% error) 10% (actual 10%) key = (399841, 133288) value = 10191957 (actual 10213746 - 0% error) 10% (actual 10%) key = (479734, 159919) value = 12230556 (actual 12252749 - 0% error) 10% (actual 10%) key = (559552, 186525) value = 14269383 (actual 14291350 - 0% error) 10% (actual 10%) key = (639193, 213072) value = 16307737 (actual 16329710 - 0% error) 10% (actual 10%) key = (719326, 239783) value = 18346896 (actual 18369051 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 43 pages RowCountHistogram: 10% (actual 6%) key = (50749, 16924) value = 24065 (actual 15550 - 3% error) 10% (actual 12%) key = (148027, 49350) value = 48098 (actual 44756 - 1% error) 10% (actual 7%) key = (205003, 68342) value = 72300 (actual 61833 - 4% error) 10% (actual 12%) key = (301159, 100394) value = 96516 (actual 90698 - 2% error) 10% (actual 11%) key = (396778, 132267) value = 120685 (actual 119332 - 0% error) 10% (actual 7%) key = (454192, 151405) value = 144842 (actual 136562 - 3% error) 10% (actual 11%) key = (548890, 182971) value = 168942 (actual 165043 - 1% error) 10% (actual 7%) key = (609940, 203321) value = 193089 (actual 183462 - 4% error) 10% (actual 11%) key = (701434, 233819) value = 218665 (actual 210950 - 3% error) 8% (actual 12%) DataSizeHistogram: 10% (actual 6%) key = (50749, 16924) value = 2051869 (actual 1330161 - 3% error) 10% (actual 12%) key = (148027, 49350) value = 4100433 (actual 3812568 - 1% error) 10% (actual 7%) key = (205003, 68342) value = 6148888 (actual 5264750 - 4% error) 10% (actual 11%) key = (301159, 100394) value = 8200933 (actual 7706870 - 2% error) 10% (actual 11%) key = (396778, 132267) value = 10251926 (actual 10135710 - 0% error) 10% (actual 7%) key = (454192, 151405) value = 12302580 (actual 11601475 - 3% error) 10% (actual 11%) key = (548890, 182971) value = 14351377 (actual 14019410 - 1% error) 10% (actual 7%) key = (609940, 203321) value = 16401437 (actual 15584938 - 4% error) 10% (actual 11%) key = (701434, 233819) value = 18568091 (actual 17915901 - 3% error) 8% (actual 12%) 10 parts: [0:0:1:0:0:0:0] 24000 rows, 1023 pages, 5 levels: (15913, 5312) (32008, 10677) (48025, 16016) (64045, 21356) (79984, 26669) [0:0:2:0:0:0:0] 24000 rows, 1022 pages, 5 levels: (95941, 31988) (111937, 37320) (127807, 42610) (143950, 47991) (159964, 53329) [0:0:3:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (175939, 58654) (191968, 63997) (208006, 69343) (224077, 74700) (240169, 80064) [0:0:4:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (256087, 85370) (272023, 90682) (288022, 96015) (304021, 101348) (320014, 106679) [0:0:5:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (336010, 112011) (352159, 117394) (368092, 122705) (384097, 128040) (400177, 133400) [0:0:6:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (416161, 138728) (432139, 144054) (447946, 149323) (463885, 154636) (479950, 159991) [0:0:7:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (495844, 165289) (511843, 170622) (527917, 175980) (543799, 181274) (559849, 186624) [0:0:8:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (575899, 191974) (591895, 197306) (607765, 202596) (623614, 207879) (639565, 213196) [0:0:9:0:0:0:0] 24000 rows, 1016 pages, 5 levels: (655633, 218552) (671707, 223910) (687631, 229218) (703516, 234513) (719437, 239820) [0:0:10:0:0:0:0] 24000 rows, 1015 pages, 5 levels: (735415, 245146) (751432, 250485) (767404, 255809) (783427, 261150) (799303, 266442) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 10% (actual 10%) key = (80038, 26687) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160003, 53342) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240184, 80069) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320035, 106686) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400189, 133404) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559891, 186638) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719461, 239828) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80038, 26687) value = 2046023 (actual 2082181 - 0% error) 10% (actual 10%) key = (160003, 53342) value = 4091184 (actual 4123481 - 0% error) 9% (actual 9%) key = (240184, 80069) value = 6122543 (actual 6150789 - 0% error) 9% (actual 9%) key = (320035, 106686) value = 8157907 (actual 8182094 - 0% error) 9% (actual 9%) key = (400189, 133404) value = 10192958 (actual 10213133 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 12231984 (actual 12248119 - 0% error) 9% (actual 9%) key = (559891, 186638) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 16310625 (actual 16318673 - 0% error) 9% (actual 9%) key = (719461, 239828) value = 18343487 (actual 18347516 - 0% error) 9% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80041, 26688) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160006, 53343) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240187, 80070) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320038, 106687) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400192, 133405) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479959, 159994) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639649, 213224) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719467, 239830) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078803 - 0% error) 10% (actual 9%) key = (159427, 53150) value = 4076220 (actual 4112556 - 0% error) 10% (actual 9%) key = (239872, 79965) value = 6113960 (actual 6146198 - 0% error) 10% (actual 9%) key = (319849, 106624) value = 8153334 (actual 8181530 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192636 (actual 10213133 - 0% error) 10% (actual 9%) key = (479950, 159991) value = 12231529 (actual 12248119 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639481, 213168) value = 16306978 (actual 16318673 - 0% error) 10% (actual 9%) key = (719554, 239859) value = 18345472 (actual 18349551 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 40 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24122 (actual 10941 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 48205 (actual 36464 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 72229 (actual 61965 - 4% error) 10% (actual 10%) key = (290449, 96824) value = 96245 (actual 87436 - 3% error) 10% (actual 11%) key = (380554, 126859) value = 121759 (actual 114432 - 3% error) 10% (actual 11%) ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 29404, MsgBus: 28268 2026-01-07T18:09:49.343995Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592683512551797938:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:09:49.344051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:09:49.434990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:50.042222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:09:50.078158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:50.078266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:50.094716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:50.284054Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:50.298791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:09:50.349701Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:50.807913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:09:51.163766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:09:51.163848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:09:51.163856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:09:51.163965Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:09:52.891012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:54.349045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7592683512551797938:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:09:54.349158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:09:54.403729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683534026635212:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:54.403853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:54.406306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7592683534026635221:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:09:54.406379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 24266, MsgBus: 31385 2026-01-07T18:09:57.457152Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:09:57.457261Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:57.584054Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:57.584671Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:57.603013Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7592683546850216499:2081] 1767809397335838 != 1767809397335841 2026-01-07T18:09:57.604645Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:57.628411Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:57.765287Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:09:57.833709Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:09:57.833726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:09:57.833734Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:09:57.833809Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:09:58.420053Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:58.662829Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:02.797818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592683568325053876:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:02.797945Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:02.799026Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7592683568325053886:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:02.799092Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 8554, MsgBus: 28853 2026-01-07T18:10:03.922519Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7592683574996584596:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:10:03.922560Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:10:04.019986Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:10:04.121004Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7592683574996584569:2081] 1767809403917964 != 1767809403917967 2026-01-07T18:10:04.136207Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:10:04.146832Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:10:04.146917Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:10:04.152605Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:10:04.243014Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:10:04.243036Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:10:04.243043Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:10:04.243123Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:10:04.279636Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:10:04.801697Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:10:04.970326Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:10:07.799520Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7592683592176454635:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:10:07.799601Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Databa ... qpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-RewriteAggregateKqpLogical-RewriteEquiJoinKqpLogical-JoinToIndexLookupKqpLogical-JoinToIndexLookupKqpPhysical-BuildReadTableRangesStageKqpPhysical-PushAggregateCombineToStageKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-BuildShuffleStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushFlatmapToStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushExtractMembersToStageKqpPhysical-PushFlatmapToStageKqpPhysical-BuildSortStageKqpPhysical-RewriteKqpReadTableKqpPeepholeFinal-SetCombinerMemoryLimitKqpPeepholeNewOperator-RewriteWideCombinerToDqHashCombinerCompiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/count_episodes_with_titles"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) '((Bool 'true) (Bool 'true)) (lambda '($6) '((PersistableRepr (Member $6 '"series")) (PersistableRepr (Member $6 '"season")))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (DataSource '"kikimr" '"db")) (let $3 '('"season_id" '"series_id")) (let $4 (AggApply 'count_all (StructType) (lambda '($31) (Void)))) (let $5 (Aggregate (ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/episodes"))) (Void) '())) $3) $3 '('('Count0 $4)) '())) (let $6 '('"season_id" '"series_id" '"title")) (let $7 '('Inner '"episodes" '"series" '('"episodes" '"series_id") '('"series" '"series_id") '())) (let $8 '('"episodes" '"series_id" '"episodes" '"season_id")) (let $9 '('"seasons" '"series_id" '"seasons" '"season_id")) (let $10 '('Inner $7 '"seasons" $8 $9 '())) (let $11 '('"rename" '"episodes.Count0" '"episode_count")) (let $12 '('"rename" '"episodes.season_id" '"")) (let $13 '('"rename" '"episodes.series_id" '"")) (let $14 '('"rename" '"seasons.season_id" '"")) (let $15 '('"rename" '"seasons.series_id" '"")) (let $16 '('"rename" '"seasons.title" '"season")) (let $17 '('"rename" '"series.series_id" '"")) (let $18 '('"rename" '"series.title" '"series")) (let $19 '($11 $12 $13 $14 $15 $16 $17 $18)) (let $20 (EquiJoin '($5 '"episodes") '((ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/series"))) (Void) '())) '('"series_id" '"title")) '"series") '((ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/seasons"))) (Void) '())) $6) '"seasons") $10 $19)) (let $21 (Bool 'true)) (let $22 (Sort $20 '($21 $21) (lambda '($32) '((Member $32 '"series") (Member $32 '"season"))))) (let $23 '('"db" '"/Root/episodes" '"Select")) (let $24 '('"db" '"/Root/series" '"Select")) (let $25 '('"db" '"/Root/seasons" '"Select")) (let $26 '($23 $24 $25)) (let $27 '('('"mode" '"flush"))) (let $28 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($22 '() '0 '0)) (KiEffects) $26 '())) $27 (Void))) (let $29 (DataSink 'result)) (let $30 (ResPull! (Left! $28) $29 (Key) (Nth (Right! $28) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $30 $29) $1 $27)) ) KqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-RewriteAggregateKqpLogical-RewriteEquiJoinKqpLogical-JoinToIndexLookupKqpLogical-JoinToIndexLookupKqpPhysical-BuildReadTableRangesStageKqpPhysical-PushAggregateCombineToStageKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-BuildShuffleStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushFlatmapToStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushExtractMembersToStageKqpPhysical-PushFlatmapToStageKqpPhysical-BuildSortStageKqpPhysical-RewriteKqpReadTableKqpPeepholeFinal-SetCombinerMemoryLimitKqpPeepholeNewOperator-RewriteWideCombinerToDqHashCombinerCompiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes_with_titles")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes_with_titles" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 24705, MsgBus: 17864 2026-01-07T18:13:04.226448Z node 23 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7592684350467010611:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:13:04.226519Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:13:04.370681Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:13:04.373448Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:13:04.375049Z node 23 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [23:7592684350467010583:2081] 1767809584225160 != 1767809584225163 2026-01-07T18:13:04.397833Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:13:04.397963Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:13:04.406020Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:13:04.462882Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-07T18:13:04.462916Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-07T18:13:04.462938Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-07T18:13:04.463078Z node 23 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:13:04.580337Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:13:05.232713Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:13:05.281271Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:13:09.226874Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7592684350467010611:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:13:09.226996Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-07T18:13:09.389062Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7592684371941847950:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.389205Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.389549Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7592684371941847959:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.389616Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.453156Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7592684371941847981:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.453275Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7592684371941847986:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.453299Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.453574Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7592684371941847989:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.453672Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-07T18:13:09.458515Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-07T18:13:09.471841Z node 23 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7592684371941847988:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-07T18:13:09.547471Z node 23 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [23:7592684371941848041:2555] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |99.4%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/view/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Offset |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuildWithOverlap >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> TPart::MassCheck [GOOD] >> TPart::PageFailEnv >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TPart::ForwardEnv [GOOD] >> TPart::ForwardEnvColumnGroups >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::ManyVersions |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 2050b 40r} data 2167b + FlatIndex{1} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 0 0 2050b {0, 1} | 0 39 2050b {5, 7} + BTreeIndex{Empty, PageId: 0 RowCount: 40 DataSize: 2050 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{0} Label{04 rev 1, 2050b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{10} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{21} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 20 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERo ... owOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{29} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPart::CutKeys_CutUtf8String [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:51.632117Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.008 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.009 II| TABLET_SAUSAGECACHE: Limit memory consumer with 8MiB 00000.009 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{3, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{4, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{5, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{6, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{7, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{8, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{9, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{10, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{11, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{12, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{13, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{14, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memor ... {[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{11} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 10 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{3} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{3} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{3} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{3} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 280b 2r} data 904b + FlatIndex{3} Label{3 rev 3, 401b} 3 rec | Page Row Bytes (String) | 0 0 140b {____________________________________________________________________________________________________cccddd} | 1 1 140b {____________________________________________________________________________________________________cd} | 1 1 140b {____________________________________________________________________________________________________cddddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 280 ErasedRowCount: 0} Label{13 rev 1, 204b} | PageId: 0 RowCount: 1 DataSize: 140 ErasedRowCount: 0 | > {____________________________________________________________________________________________________cd} | PageId: 1 RowCount: 2 DataSize: 280 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x89\x91} | 1 1 49b {abc\xF0\x9F\x89\x91\xF0\x9F\x89\x91\xF0\x9F\x89\x91} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x89\x91} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 342b + FlatIndex{3} Label{3 rev 3, 120b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 46b {abc\xE2\x9A\xAB} | 1 1 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xE2\x9A\xAB} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 344b + FlatIndex{3} Label{3 rev 3, 121b} 3 rec | Page Row Bytes (Utf8) | 0 0 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 46 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 332b + FlatIndex{3} Label{3 rev 3, 115b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {abcxxx} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 326b + FlatIndex{3} Label{3 rev 3, 112b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 40b {abcx} | 1 1 40b {abcxxx} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abcx} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x96} | 1 1 49b {abc\xF0\x9F\x98\x96\xF0\x9F\x98\x96\xF0\x9F\x98\x96} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x96} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTxDataShardPrefixKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardRecomputeKMeansScan::BadRequest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicReaderTests::TestRun_ReadMessages_With_Offset [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-07T18:13:04.390037Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592684349739163341:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:13:04.390082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:13:04.449672Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:13:04.452962Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592684350287984313:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:13:04.453021Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:13:04.457057Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:13:04.567544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:13:04.568099Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:13:04.690458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:13:04.690612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:13:04.692830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:13:04.692894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:13:04.697382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:13:04.698475Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T18:13:04.699319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:13:04.729917Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:13:04.734280Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592684349739163289:2082] 1767809584384681 != 1767809584384684 2026-01-07T18:13:04.766296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:13:04.778541Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T18:13:04.779420Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-07T18:13:04.818743Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:13:05.022396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/00312e/r3tmp/yandexNaVWW7.tmp 2026-01-07T18:13:05.022461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/00312e/r3tmp/yandexNaVWW7.tmp 2026-01-07T18:13:05.023640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/00312e/r3tmp/yandexNaVWW7.tmp 2026-01-07T18:13:05.023766Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:13:05.213144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:13:05.282522Z INFO: TTestServer started on Port 25893 GrpcPort 28894 2026-01-07T18:13:05.395673Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:28894 === TenantModeEnabled() = 0 === Init PQ - start server on port 28894 2026-01-07T18:13:05.455086Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:13:05.582508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T18:13:05.582642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.582769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T18:13:05.582791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T18:13:05.583786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T18:13:05.583847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:13:05.585595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T18:13:05.585713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T18:13:05.585880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.585939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T18:13:05.585961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-07T18:13:05.585972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-07T18:13:05.586713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T18:13:05.586741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-07T18:13:05.586759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-07T18:13:05.587266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.587297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T18:13:05.587308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-07T18:13:05.588174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.588192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.588206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T18:13:05.588233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-07T18:13:05.590921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T18:13:05.592120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-07T18:13:05.592234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T18:13:05.593517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767809585640, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T18:13:05.593664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767809585640 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T18:13:05.593709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T18:13:05.593957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-07T18:13:05.593995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-07T18:13 ... se } Cookie: 18446744073709551615 } 2026-01-07T18:13:24.712251Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2026-01-07T18:13:24.712309Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 sending to client partition status 2026-01-07T18:13:24.712922Z :INFO: [] [] [486dc898-d236bc14-17565276-53c88055] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2026-01-07T18:13:24.713337Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2026-01-07T18:13:24.713450Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2026-01-07T18:13:24.713492Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2026-01-07T18:13:24.713515Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:970: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2026-01-07T18:13:24.713569Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 1767809604598, sizeLag# 280 2026-01-07T18:13:24.713586Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2354: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1TEvPartitionReady. Aval parts: 1 2026-01-07T18:13:24.713621Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2277: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 performing read request: guid# e1d5b589-b8282ef-12575492-cb89ac60, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2026-01-07T18:13:24.713701Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1396: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid e1d5b589-b8282ef-12575492-cb89ac60 2026-01-07T18:13:24.715227Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1767809604598 CreateTimestampMS: 1767809604596 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1767809604619 CreateTimestampMS: 1767809604597 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1767809604620 CreateTimestampMS: 1767809604597 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551530 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2026-01-07T18:13:24.715326Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 3 2026-01-07T18:13:24.715354Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid e1d5b589-b8282ef-12575492-cb89ac60 has messages 1 2026-01-07T18:13:24.715448Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 read done: guid# e1d5b589-b8282ef-12575492-cb89ac60, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2026-01-07T18:13:24.715473Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 response to read: guid# e1d5b589-b8282ef-12575492-cb89ac60 2026-01-07T18:13:24.715620Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 Process answer. Aval parts: 0 2026-01-07T18:13:24.715831Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2026-01-07T18:13:24.715938Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2026-01-07T18:13:24.716136Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2026-01-07T18:13:24.716174Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] Returning serverBytesSize = 490 to budget 2026-01-07T18:13:24.716202Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2026-01-07T18:13:24.716437Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-07T18:13:24.716597Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-07T18:13:24.716638Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-07T18:13:24.716661Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-07T18:13:24.716655Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2026-01-07T18:13:24.716712Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2026-01-07T18:13:24.716754Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] Returning serverBytesSize = 0 to budget 2026-01-07T18:13:24.716745Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 got read request: guid# 29580b-cca055b9-cfb49b01-339fb0e5 2026-01-07T18:13:24.716849Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] Requesting status for partition stream id: 1 2026-01-07T18:13:24.717095Z :INFO: [] [] [486dc898-d236bc14-17565276-53c88055] Closing read session. Close timeout: 0.000000s 2026-01-07T18:13:24.717130Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2026-01-07T18:13:24.717169Z :INFO: [] [] [486dc898-d236bc14-17565276-53c88055] Counters: { Errors: 0 CurrentSessionLifetimeMs: 17 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T18:13:24.717174Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 grpc read done: success# 1, data# { partition_session_status_request { partition_session_id: 1 } } 2026-01-07T18:13:24.717261Z :NOTICE: [] [] [486dc898-d236bc14-17565276-53c88055] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T18:13:24.717258Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 sending to client partition status 2026-01-07T18:13:24.717301Z :DEBUG: [] [] [486dc898-d236bc14-17565276-53c88055] [] Abort session to cluster 2026-01-07T18:13:24.717428Z :NOTICE: [] [] [486dc898-d236bc14-17565276-53c88055] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T18:13:24.718923Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 grpc read done: success# 0, data# { } 2026-01-07T18:13:24.718939Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 grpc read failed 2026-01-07T18:13:24.719042Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 closed 2026-01-07T18:13:24.719239Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_10397069637765413103_v1 is DEAD 2026-01-07T18:13:24.719566Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [3:7592684438594587579:2541] disconnected. 2026-01-07T18:13:24.719589Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [3:7592684438594587579:2541] disconnected; active server actors: 1 2026-01-07T18:13:24.719600Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [3:7592684438594587579:2541] client user disconnected session shared/user_3_1_10397069637765413103_v1 2026-01-07T18:13:24.719740Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_10397069637765413103_v1 2026-01-07T18:13:24.918318Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [3:7592684438594587602:2539] TxId: 281474976720687. Ctx: { TraceId: 01kectkq2ze2cta9qtfaqyh2nf, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NDk4MTIwYjAtYTYxMzNhZDYtOGNiZjEwYTgtOWRkOWFiYTE=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 trace_id# 2026-01-07T18:13:24.919037Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7592684438594587609:2539], TxId: 281474976720687, task: 3. Ctx: { CheckpointId : . TraceId : 01kectkq2ze2cta9qtfaqyh2nf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NDk4MTIwYjAtYTYxMzNhZDYtOGNiZjEwYTgtOWRkOWFiYTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7592684438594587602:2539], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> TTxDataShardRecomputeKMeansScan::BadRequest [GOOD] >> TTxDataShardRecomputeKMeansScan::MainTable >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> TTxDataShardRecomputeKMeansScan::MainTable [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable+WithForeign >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> test_public_api.py::TestExplain::test_explain_data_query >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:31.363543Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.010 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.121 II| TABLET_EXECUTOR: LSnap{1:2, on 2:301, 5607b, wait} done, Waste{2:0, 573641b +(0, 0b), 300 trc} 00000.129 II| TABLET_EXECUTOR: Leader{1:2:320} starting compaction 00000.129 II| TABLET_EXECUTOR: Leader{1:2:321} starting Scan{1 on 2, Compact{1.2.320, eph 1}} 00000.129 II| TABLET_EXECUTOR: Leader{1:2:321} started compaction 1 00000.129 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.320, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.131 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.320, eph 1}} end=Done, 94r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 16 of 19 ~1p 00000.131 II| OPS_COMPACT: Compact{1.2.320, eph 1} end=Done, 7 blobs 73r (max 94), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 16 +3, (155261 52924 183173)b }, ecr=1.000 00000.144 II| TABLET_EXECUTOR: Leader{1:2:321} Compact 1 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 320, product {1 parts epoch 2} done 00000.175 II| TABLET_EXECUTOR: Leader{1:2:369} starting compaction 00000.175 II| TABLET_EXECUTOR: Leader{1:2:370} starting Scan{3 on 3, Compact{1.2.369, eph 1}} 00000.175 II| TABLET_EXECUTOR: Leader{1:2:370} started compaction 3 00000.175 II| TABLET_OPS_HOST: Scan{3 on 3, Compact{1.2.369, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.177 II| TABLET_OPS_HOST: Scan{3 on 3, Compact{1.2.369, eph 1}} end=Done, 108r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 13 of 17 ~1p 00000.177 II| OPS_COMPACT: Compact{1.2.369, eph 1} end=Done, 12 blobs 77r (max 108), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 13 +8, (125543 6989 175031)b }, ecr=1.000 00000.189 II| TABLET_EXECUTOR: Leader{1:2:370} Compact 3 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 369, product {1 parts epoch 2} done 00000.311 II| TABLET_EXECUTOR: LSnap{1:2, on 2:601, 8552b, wait} done, Waste{2:0, 1447386b +(156, 83837b), 300 trc} 00000.341 II| TABLET_EXECUTOR: Leader{1:2:645} starting compaction 00000.341 II| TABLET_EXECUTOR: Leader{1:2:646} starting Scan{5 on 2, Compact{1.2.645, eph 2}} 00000.341 II| TABLET_EXECUTOR: Leader{1:2:646} started compaction 5 00000.341 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.645, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.344 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.645, eph 2}} end=Done, 113r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 16 of 18 ~1p 00000.344 II| OPS_COMPACT: Compact{1.2.645, eph 2} end=Done, 12 blobs 113r (max 113), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 16 +8, (163390 20448 210987)b }, ecr=1.000 00000.362 II| TABLET_EXECUTOR: Leader{1:2:647} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 645, product {1 parts epoch 3} done 00000.363 II| TABLET_EXECUTOR: Leader{1:2:649} starting compaction 00000.364 II| TABLET_EXECUTOR: Leader{1:2:650} starting Scan{7 on 2, Compact{1.2.649, eph 2}} 00000.364 II| TABLET_EXECUTOR: Leader{1:2:650} started compaction 7 00000.364 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.649, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.370 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.649, eph 2}} end=Done, 158r seen, TFwd{fetch=310KiB,saved=310KiB,usage=297KiB,after=13KiB,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=6}, trace 36 of 43 ~3p 00000.370 II| OPS_COMPACT: Compact{1.2.649, eph 2} end=Done, 4 blobs 126r (max 186), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 36 +0, (260160 60069 326058)b }, ecr=1.000 00000.372 II| TABLET_EXECUTOR: Leader{1:2:651} Compact 7 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 649, product {1 parts epoch 0} done 00000.444 II| TABLET_EXECUTOR: Leader{1:2:732} starting compaction 00000.444 II| TABLET_EXECUTOR: Leader{1:2:733} starting Scan{9 on 3, Compact{1.2.732, eph 2}} 00000.444 II| TABLET_EXECUTOR: Leader{1:2:733} started compaction 9 00000.444 II| TABLET_OPS_HOST: Scan{9 on 3, Compact{1.2.732, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.447 II| TABLET_OPS_HOST: Scan{9 on 3, Compact{1.2.732, eph 2}} end=Done, 105r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 19 of 20 ~1p 00000.448 II| OPS_COMPACT: Compact{1.2.732, eph 2} end=Done, 12 blobs 105r (max 105), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 19 +8, (104734 13591 247472)b }, ecr=1.000 00000.462 II| TABLET_EXECUTOR: Leader{1:2:734} Compact 9 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 732, product {1 parts epoch 3} done 00000.463 II| TABLET_EXECUTOR: Leader{1:2:735} starting compaction 00000.463 II| TABLET_EXECUTOR: Leader{1:2:736} starting Scan{11 on 3, Compact{1.2.735, eph 2}} 00000.463 II| TABLET_EXECUTOR: Leader{1:2:736} started compaction 11 00000.463 II| TABLET_OPS_HOST: Scan{11 on 3, Compact{1.2.735, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.469 II| TABLET_OPS_HOST: Scan{11 on 3, Compact{1.2.735, eph 2}} end=Done, 156r seen, TFwd{fetch=223KiB,saved=223KiB,usage=223KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=6}, trace 42 of 48 ~3p 00000.469 II| OPS_COMPACT: Compact{1.2.735, eph 2} end=Done, 4 blobs 136r (max 182), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 42 +0, (197388 20580 373272)b }, ecr=1.000 00000.471 II| TABLET_EXECUTOR: Leader{1:2:738} Compact 11 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 735, product {1 parts epoch 0} done 00000.629 II| TABLET_EXECUTOR: LSnap{1:2, on 2:901, 10222b, wait} done, Waste{2:0, 2222666b +(171, 716199b), 300 trc} 00000.709 II| TABLET_EXECUTOR: Leader{1:2:1017} starting compaction 00000.709 II| TABLET_EXECUTOR: Leader{1:2:1018} starting Scan{13 on 2, Compact{1.2.1017, eph 3}} 00000.709 II| TABLET_EXECUTOR: Leader{1:2:1018} started compaction 13 00000.709 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.1017, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.712 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.1017, eph 3}} end=Done, 100r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 12 of 18 ~1p 00000.712 II| OPS_COMPACT: Compact{1.2.1017, eph 3} end=Done, 10 blobs 100r (max 100), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 12 +6, (160895 47263 167562)b }, ecr=1.000 00000.725 II| TABLET_EXECUTOR: Leader{1:2:1019} Compact 13 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 1017, product {1 parts epoch 4} done 00000.766 II| TABLET_EXECUTOR: Leader{1:2:1080} starting compaction 00000.766 II| TABLET_EXECUTOR: Leader{1:2:1081} starting Scan{15 on 3, Compact{1.2.1080, eph 3}} 00000.766 II| TABLET_EXECUTOR: Leader{1:2:1081} started compaction 15 00000.766 II| TABLET_OPS_HOST: Scan{15 on 3, Compact{1.2.1080, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.769 II| TABLET_OPS_HOST: Scan{15 on 3, Compact{1.2.1080, eph 3}} end=Done, 92r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 14 of 19 ~1p 00000.769 II| OPS_COMPACT: Compact{1.2.1080, eph 3} end=Done, 9 blobs 92r (max 92), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 14 +5, (119305 6685 176187)b }, ecr=1.000 00000.771 II| TABLET_EXECUTOR: Leader{1:2:1081} Compact 15 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 1080, product {1 parts epoch 4} done 00000.861 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1201, 11371b, wait} done, Waste{2:0, 3085542b +(154, 118845b), 300 trc} 00000.976 II| TABLET_EXECUTOR: Leader{1:2:1398} starting compaction 00000.976 II| TABLET_EXECUTOR: Leader{1:2:1399} starting Scan{17 on 3, Compact{1.2.1398, eph 4}} 00000.976 II| TABLET_EXECUTOR: Leader{1:2:1399} started compaction 17 00000.976 II| TABLET_OPS_HOST: Scan{17 on 3, Compact{1.2.1398, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00000.977 II| TABLET_OPS_HOST: Scan{17 on 3, Compact{1.2.1398, eph 4}} end=Done, 105r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 18 of 20 ~1p 00000.978 II| OPS_COMPACT: Compact{1.2.1398, eph 4} end=Done, 7 blobs 105r (max 105), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 18 +3, (132858 6618 191716)b }, ecr=1.000 00000.988 II| TABLET_EXECUTOR: Leader{1:2:1399} Compact 17 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 1398, product {1 parts epoch 5} done 00000.990 II| TABLET_EXECUTOR: Leader{1:2:1401} starting compaction 00000.990 II| TABLET_EXECUTOR: Leader{1:2:1402} starting Scan{19 on 3, Compact{1.2.1401, eph 4}} 00000.990 II| TABLET_EXECUTOR: Leader{1:2:1402} started compaction 19 00000.990 II| TABLET_OPS_HOST: Scan{19 on 3, Compact{1.2.1401, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00000.993 II| TABLET_OPS_HOST: Scan{19 on 3, Compact{1.2.1401, eph 4}} end=Done, 160r seen, TFwd{fetch=245KiB,saved=245KiB,usage=245KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 31 of 40 ~3p 00000.994 II| OPS_COMPACT: Compact{1.2.1401, eph 4} end=Done, 4 blobs 160r (max 197), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 31 +0, (191198 13303 286822)b }, ecr=1.000 00000.995 II| TABLET_EXECUTOR: Leader{1:2:1404} Compact 19 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 1401, product {1 parts epoch 0} done 00001.011 II| TABLET_EXECUTOR: Leader{1:2:1408} starting compaction 00001.011 II| TABLET_EXECUTOR: Leader{1:2:1409} starting Scan{21 on 2, Compact{1.2.1408, eph 4}} 00001.011 II| TABLET_EXECUTOR: Leader{1:2:1409} started compaction 21 00001.011 II| TABLET_OPS_HOST: Scan{21 on 2, Compact{1.2.1408, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00001.013 II| TABLET_OPS_HOST: Scan{21 on 2, Compact{1.2.1408, eph 4}} end=Done, 102r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 12 of 14 ~1p 00001.014 II| OPS_COMPACT: Compact{1.2.1408, eph 4} end=Done, 8 blobs 102r (max 102), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 12 +4, (152443 6760 143332)b }, ecr=1.000 00001.026 II| TABLET_EXECUTOR: Leader{1:2:1410} Compact 21 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 1408, product {1 parts epoch 5} done 00001.026 II| TABLET_EXECUTOR: Leader{1:2:1411} starting compaction 00001.027 II| TABLET_EXECUTOR: Leader{1:2:1412} starting Scan{23 on 2, Compact{1.2.1411, eph 4}} 00001.027 II| TABLET_EXECUTOR: Leader{1:2:1412} started compaction 23 00001.027 II| TABLET_OPS_HOST: Scan{23 on 2, Compact{1.2.1411, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00001.031 II| TABLET_OPS_HOST: Scan{23 on 2, Compact{1.2.1411, eph 4}} end=Done, 166r seen, TFwd{fetch=298KiB,saved=298KiB,usage=292KiB,after=6.69KiB,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 27 of 34 ~3p 00001.031 II| OPS_COMPACT: Compact{1.2.1411, eph 4} end=Done, 4 blobs 166r (max 202), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 27 +0, (266755 40320 243797)b }, ecr=1.000 00001.033 II| TABLET_EXECUTOR: Leader{1:2:1412} Compact 23 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 1411, product {1 parts epoch 0} done 00001.035 II| TABLET_EXECUTOR: Leader{1:2:1413} got result TEvResult{0 pages [1:2:1 ... DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [59:30:2062]) to queue queue_compaction_gen0 00000.017 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.390625 (insert task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.017 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.017 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.018 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.018 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.018 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.036 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 8 for step 5 00000.036 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 5, product {tx status + 1 parts epoch 2} done 00000.036 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.036 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.036 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.037 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [59:30:2062]) (release resources {1, 0}) 00000.037 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.390625 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...waiting until compacted 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 1 ...compacting 00000.038 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Mem 00000.039 DD| RESOURCE_BROKER: Submitted new compaction_gen0 task gen0-table-101-tablet-1 (2 by [59:30:2062]) priority=5 resources={1, 0} 00000.039 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.039 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [59:30:2062]) from queue queue_compaction_gen0 00000.039 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.039 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.371094 (insert task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.039 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 2, edge 9223372036854775807/0, generation 0 00000.039 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.039 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{3 on 101, Compact{1.2.7, eph 1}} 00000.039 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 3 00000.039 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 3 generation 0 00000.040 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 3 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 7, product {0 parts epoch 2} done 00000.040 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 3, generation 0 00000.040 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.040 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [59:30:2062]) (release resources {1, 0}) 00000.040 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.371094 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 8 for step 7 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 3 for step 8 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:9} switch applied on followers, step 8 ...waiting until compacted 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 0 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} release 4194304b of static, Memory{0 dyn 0} 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.043 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 478b +(3, 191b), 9 trc, -191b acc} 00000.047 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.047 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 258 bytes, 258 total, blobs: { [1:2:2:1:8192:84:0], [1:2:6:1:32768:124:0], [1:2:8:1:32768:50:0] } 00000.048 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 384 bytes, 384 total, blobs: { [1:2:5:1:12288:158:0], [1:2:3:1:24576:78:0], [1:2:4:1:24576:65:0], [1:2:9:1:24576:83:0] } 00000.049 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.049 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.049 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.049 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.050 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 239b, wait} done, Waste{2:0, 478b +(3, 191b), 9 trc} 00000.050 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ... checking rows 00000.051 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.051 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.051 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} pin 0 (0 b) load 1 (55 b) 00000.051 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} postponed, loading 1 pages, 55 bytes, newly pinned 0 pages, 0 bytes 00000.051 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:5:1:12288:158:0] ok OK}, type 1 00000.051 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} activated 00000.052 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.052 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} 00000.052 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.052 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 478b +(0, 0b), 1 trc, -191b acc} 00000.053 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 91b} miss {0 0b} in-memory miss {0 0b} 00000.053 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.053 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {798b, 12} 00000.053 II| FAKE_ENV: DS.1 gone, left {717b, 5}, put {1117b, 11} 00000.053 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: All BS storage groups are stopped 00000.053 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.053 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 126}, stopped |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-07T18:13:05.365611Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592684354759939770:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:13:05.365655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:13:05.385863Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:13:05.386278Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592684356195152608:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:13:05.386480Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:13:05.390885Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:13:05.512004Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:13:05.523341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:13:05.557235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:13:05.557314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:13:05.559369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:13:05.559403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:13:05.588156Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T18:13:05.589302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:13:05.589582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:13:05.602696Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:13:05.655788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/002f49/r3tmp/yandexO6cTbg.tmp 2026-01-07T18:13:05.655807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/002f49/r3tmp/yandexO6cTbg.tmp 2026-01-07T18:13:05.655948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/002f49/r3tmp/yandexO6cTbg.tmp 2026-01-07T18:13:05.656044Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:13:05.682503Z INFO: TTestServer started on Port 23548 GrpcPort 7732 2026-01-07T18:13:05.720782Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:13:05.774814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:7732 === TenantModeEnabled() = 0 === Init PQ - start server on port 7732 2026-01-07T18:13:05.924778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T18:13:05.924939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.925118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T18:13:05.925154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T18:13:05.925321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T18:13:05.925354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:13:05.927095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T18:13:05.927188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T18:13:05.927309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.927337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T18:13:05.927348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-07T18:13:05.927366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-07T18:13:05.928089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T18:13:05.928103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-01-07T18:13:05.928117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T18:13:05.928551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.928578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T18:13:05.928590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-07T18:13:05.929624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.929642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:05.929684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T18:13:05.929698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-07T18:13:05.940808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T18:13:05.942489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-07T18:13:05.942569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T18:13:05.943978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767809585990, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T18:13:05.944110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767809585990 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T18:13:05.944135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T18:13:05.944396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 128 -> 240 2026-01-07T18:13:05.944441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T18:13:05.944608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T18:13:05.944651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-07T18:13:05.945774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T18:13:05.945796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T18:13:05.945942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046 ... ed/user session shared/user_5_1_17140568270293813882_v1 Handle describe topics response 2026-01-07T18:13:40.411855Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 auth is DEAD 2026-01-07T18:13:40.411933Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 auth ok: topics# 1, initDone# 0 2026-01-07T18:13:40.413694Z :INFO: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] [] Got InitResponse. ReadSessionId: shared/user_5_1_17140568270293813882_v1 2026-01-07T18:13:40.413736Z :DEBUG: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-07T18:13:40.413901Z :DEBUG: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-07T18:13:40.414114Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1229: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 register session: topic# rt3.dc1--topic1 2026-01-07T18:13:40.414584Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2026-01-07T18:13:40.414664Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 got read request: guid# e9ea18ac-7f988393-9dd85c1e-efd78822 2026-01-07T18:13:40.418891Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037898][rt3.dc1--topic1] pipe [5:7592684504406419379:2579] connected; active server actors: 1 2026-01-07T18:13:40.418992Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037898][rt3.dc1--topic1] consumer "user" register session for pipe [5:7592684504406419379:2579] session shared/user_5_1_17140568270293813882_v1 2026-01-07T18:13:40.419048Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037898][rt3.dc1--topic1] consumer user register readable partition 0 2026-01-07T18:13:40.419107Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037898][rt3.dc1--topic1] consumer user family created family=1 (Status=Free, Partitions=[0]) 2026-01-07T18:13:40.419156Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037898][rt3.dc1--topic1] consumer user register reading session ReadingSession "shared/user_5_1_17140568270293813882_v1" (Sender=[5:7592684504406419376:2579], Pipe=[5:7592684504406419379:2579], Partitions=[], ActiveFamilyCount=0) 2026-01-07T18:13:40.419186Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037898][rt3.dc1--topic1] consumer user rebalancing was scheduled 2026-01-07T18:13:40.419239Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037898][rt3.dc1--topic1] consumer user balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2026-01-07T18:13:40.419302Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037898][rt3.dc1--topic1] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_5_1_17140568270293813882_v1" (Sender=[5:7592684504406419376:2579], Pipe=[5:7592684504406419379:2579], Partitions=[], ActiveFamilyCount=0) 2026-01-07T18:13:40.419366Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037898][rt3.dc1--topic1] consumer user family 1 status Active partitions [0] session "shared/user_5_1_17140568270293813882_v1" sender [5:7592684504406419376:2579] lock partition 0 for ReadingSession "shared/user_5_1_17140568270293813882_v1" (Sender=[5:7592684504406419376:2579], Pipe=[5:7592684504406419379:2579], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2026-01-07T18:13:40.419430Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037898][rt3.dc1--topic1] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2026-01-07T18:13:40.419460Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037898][rt3.dc1--topic1] consumer user balancing duration: 0.000196s 2026-01-07T18:13:40.424604Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1347: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/user_5_1_17140568270293813882_v1" ClientId: "user" PipeClient { RawX1: 7592684504406419379 RawX2: 4503621102209555 } Path: "/Root/PQ/rt3.dc1--topic1" } 2026-01-07T18:13:40.424776Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1144: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2026-01-07T18:13:40.425438Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7592684504406419381:2582] 2026-01-07T18:13:40.426563Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_5_1_17140568270293813882_v1:1 with generation 1 2026-01-07T18:13:40.434859Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 6 WriteTimestampMS: 1767809620173 CreateTimestampMS: 1767809620171 SizeLag: 830 WriteTimestampEstimateMS: 1767809620425 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-07T18:13:40.434911Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 2026-01-07T18:13:40.434981Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 sending to client partition status 2026-01-07T18:13:40.437535Z :INFO: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 10 2026-01-07T18:13:40.438137Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 10 } } 2026-01-07T18:13:40.438183Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-07T18:13:40.438263Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 10, commitOffset# (empty maybe) 2026-01-07T18:13:40.438285Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 48 shard idx 72057594046644480:4 data size 656 row count 2 2026-01-07T18:13:40.438321Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037891 maps to shardIdx: 72057594046644480:4 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 48], pathId map=Versions, is column=0, is olap=0, RowCount 2, DataSize 656 2026-01-07T18:13:40.438326Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 10 2026-01-07T18:13:40.438373Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 10 2026-01-07T18:13:40.438401Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037891: SplitByLoadNotEnabledForTable 2026-01-07T18:13:40.438638Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-07T18:13:40.505077Z :INFO: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] Closing read session. Close timeout: 0.000000s 2026-01-07T18:13:40.505157Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:0:0 2026-01-07T18:13:40.505218Z :INFO: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] Counters: { Errors: 0 CurrentSessionLifetimeMs: 111 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T18:13:40.505336Z :NOTICE: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T18:13:40.505393Z :DEBUG: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] [] Abort session to cluster 2026-01-07T18:13:40.505917Z :NOTICE: [] [] [b8760ca3-b4686008-2ea5fef8-ce7ceeaf] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T18:13:40.508756Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 grpc read done: success# 0, data# { } 2026-01-07T18:13:40.508786Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 grpc read failed 2026-01-07T18:13:40.508816Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 grpc closed 2026-01-07T18:13:40.508865Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_17140568270293813882_v1 is DEAD 2026-01-07T18:13:40.509678Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_17140568270293813882_v1 2026-01-07T18:13:40.510863Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [5:7592684504406419379:2579] disconnected. 2026-01-07T18:13:40.510907Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [5:7592684504406419379:2579] disconnected; active server actors: 1 2026-01-07T18:13:40.510934Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [5:7592684504406419379:2579] client user disconnected session shared/user_5_1_17140568270293813882_v1 |99.5%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable+WithForeign [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable-WithForeign >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] |99.5%| [TA] $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-07T18:13:08.236288Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592684369466971789:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:13:08.236402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:13:08.262742Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:13:08.262951Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7592684368418260110:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:13:08.263548Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:13:08.268293Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-07T18:13:08.388328Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:13:08.398984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:13:08.419740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:13:08.419828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:13:08.421461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:13:08.421582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:13:08.445559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:13:08.446898Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-07T18:13:08.448994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:13:08.463903Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:13:08.516549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ec9d/002f41/r3tmp/yandexhORo4N.tmp 2026-01-07T18:13:08.516576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ec9d/002f41/r3tmp/yandexhORo4N.tmp 2026-01-07T18:13:08.516768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ec9d/002f41/r3tmp/yandexhORo4N.tmp 2026-01-07T18:13:08.516886Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-07T18:13:08.542305Z INFO: TTestServer started on Port 15036 GrpcPort 18348 2026-01-07T18:13:08.642751Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:18348 === TenantModeEnabled() = 0 === Init PQ - start server on port 18348 2026-01-07T18:13:08.748237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-07T18:13:08.748370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:08.748528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-07T18:13:08.748553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-07T18:13:08.748700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-07T18:13:08.748736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:13:08.750478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-07T18:13:08.750609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-07T18:13:08.750728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:08.750760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-07T18:13:08.750781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-07T18:13:08.750801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-07T18:13:08.751557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T18:13:08.751575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-01-07T18:13:08.751587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-07T18:13:08.752040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:08.752068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-07T18:13:08.752078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-07T18:13:08.753193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:08.753233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-07T18:13:08.753248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T18:13:08.753283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-07T18:13:08.756147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-07T18:13:08.757430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-07T18:13:08.757508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-07T18:13:08.758709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767809588804, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-07T18:13:08.758807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767809588804 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-07T18:13:08.758837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T18:13:08.758997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 128 -> 240 2026-01-07T18:13:08.759028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-07T18:13:08.759141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-07T18:13:08.759175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-07T18:13:08.760233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-07T18:13:08.760248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-07T18:13:08.760423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-07T18:13:08.760445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7592684369466972416:2394], at schemeshard: 72057594046644480, txId: 281474976715657, path id: ... bletId: 72075186224037897 Generation: 1, pipe: [5:7592684519347677134:2580] 2026-01-07T18:13:43.972119Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_5_1_10562016644659185711_v1:1 with generation 1 2026-01-07T18:13:43.974743Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 4 WriteTimestampMS: 1767809623858 CreateTimestampMS: 1767809623856 SizeLag: 280 WriteTimestampEstimateMS: 1767809623959 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-07T18:13:43.974783Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 2026-01-07T18:13:43.974850Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 sending to client partition status 2026-01-07T18:13:43.975454Z :INFO: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2026-01-07T18:13:43.975778Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2026-01-07T18:13:43.975878Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2026-01-07T18:13:43.975933Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2026-01-07T18:13:43.975964Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:970: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2026-01-07T18:13:43.976052Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1767809623858, sizeLag# 280 2026-01-07T18:13:43.976075Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2354: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1TEvPartitionReady. Aval parts: 1 2026-01-07T18:13:43.976119Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2277: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 performing read request: guid# 3fafd099-ddc5c746-9d078a73-1ede87ec, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2026-01-07T18:13:43.976181Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1396: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 3fafd099-ddc5c746-9d078a73-1ede87ec 2026-01-07T18:13:43.977509Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1767809623858 CreateTimestampMS: 1767809623856 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1767809623894 CreateTimestampMS: 1767809623857 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1767809623895 CreateTimestampMS: 1767809623857 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 4 WriteTimestampMS: 1767809623895 CreateTimestampMS: 1767809623857 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551408 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2026-01-07T18:13:43.977685Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-07T18:13:43.977731Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 3fafd099-ddc5c746-9d078a73-1ede87ec has messages 1 2026-01-07T18:13:43.977808Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 read done: guid# 3fafd099-ddc5c746-9d078a73-1ede87ec, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 531 2026-01-07T18:13:43.977845Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 response to read: guid# 3fafd099-ddc5c746-9d078a73-1ede87ec 2026-01-07T18:13:43.978011Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 Process answer. Aval parts: 0 2026-01-07T18:13:43.978312Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] Got ReadResponse, serverBytesSize = 531, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428269 2026-01-07T18:13:43.978482Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428269 2026-01-07T18:13:43.978694Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2026-01-07T18:13:43.978737Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] Returning serverBytesSize = 531 to budget 2026-01-07T18:13:43.978762Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] In ContinueReadingDataImpl, ReadSizeBudget = 531, ReadSizeServerDelta = 52428269 2026-01-07T18:13:43.978987Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-07T18:13:43.979113Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-07T18:13:43.979180Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-07T18:13:43.979224Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-07T18:13:43.979206Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 grpc read done: success# 1, data# { read_request { bytes_size: 531 } } 2026-01-07T18:13:43.979251Z :DEBUG: [] Take Data. Partition 0. Read: {2, 1} (3-3) 2026-01-07T18:13:43.979315Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] The application data is transferred to the client. Number of messages 4, size 32 bytes 2026-01-07T18:13:43.979334Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 got read request: guid# 57ba057b-294baa6f-bc8d6b6a-ff11e9ea 2026-01-07T18:13:43.979364Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] Returning serverBytesSize = 0 to budget 2026-01-07T18:13:43.979502Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] Requesting status for partition stream id: 1 2026-01-07T18:13:43.979816Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 grpc read done: success# 1, data# { partition_session_status_request { partition_session_id: 1 } } 2026-01-07T18:13:43.979967Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 sending to client partition status 2026-01-07T18:13:44.079653Z :INFO: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] Closing read session. Close timeout: 0.000000s 2026-01-07T18:13:44.079730Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:3:0 2026-01-07T18:13:44.079786Z :INFO: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 118 BytesRead: 32 MessagesRead: 4 BytesReadCompressed: 32 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-07T18:13:44.079896Z :NOTICE: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-07T18:13:44.079942Z :DEBUG: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] [] Abort session to cluster 2026-01-07T18:13:44.080500Z :NOTICE: [] [] [813f893a-e7f94cb2-2d48bcc7-980cf66b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-07T18:13:44.081070Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 grpc read done: success# 0, data# { } 2026-01-07T18:13:44.081111Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 grpc read failed 2026-01-07T18:13:44.081163Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 closed 2026-01-07T18:13:44.081734Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_10562016644659185711_v1 is DEAD 2026-01-07T18:13:44.081963Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_10562016644659185711_v1 2026-01-07T18:13:44.082254Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [5:7592684519347677132:2577] disconnected. 2026-01-07T18:13:44.082426Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [5:7592684519347677132:2577] disconnected; active server actors: 1 2026-01-07T18:13:44.082456Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [5:7592684519347677132:2577] client user disconnected session shared/user_5_1_10562016644659185711_v1 |99.5%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.5%| [TA] {RESULT} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.5%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTxDataShardRecomputeKMeansScan::BuildTable-WithForeign [GOOD] >> TTxDataShardRecomputeKMeansScan::EmptyCluster |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_vdisks.py::TestTinyVDisks::test_enabled >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTxDataShardRecomputeKMeansScan::EmptyCluster [GOOD] >> TTxDataShardReshuffleKMeansScan::BadRequest >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> Backup::GenerationDirs [GOOD] >> Backup::SnapshotIOError >> Backup::SnapshotIOError [GOOD] >> Backup::EmptyData >> Backup::EmptyData [GOOD] >> Backup::SnapshotData [GOOD] >> Backup::SnapshotLargeData |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPostingWithOverlap >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> TTxDataShardReshuffleKMeansScan::MainToPostingWithOverlap [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/restarts/py3test |99.5%| [TM] {RESULT} ydb/tests/fq/restarts/py3test >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuildWithOverlap >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 >> TTxDataShardReshuffleKMeansScan::MainToBuildWithOverlap [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> TVersions::Wreck0Reverse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: 2026-01-07T18:12:25.445705Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:12:25.448275Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-07T18:12:25.448318Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [1:5:2052] 2026-01-07T18:12:25.448385Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [1:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-07T18:12:25.448468Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #1 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #1 (done) Checking fetches#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] ... waiting for results #1 2026-01-07T18:12:25.449128Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 3 ] 2026-01-07T18:12:25.449197Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [1:5:2052] class Online pages [ 1 2 3 ] cookie 1 2026-01-07T18:12:25.449243Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 366B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #1 (done) Checking results#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] 2026-01-07T18:12:25.502221Z node 2 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:12:25.502466Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-07T18:12:25.502490Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [2:5:2052] 2026-01-07T18:12:25.502563Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [2:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-07T18:12:25.502617Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2026-01-07T18:12:25.502698Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:2] 2026-01-07T18:12:25.502719Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [2:5:2052] 2026-01-07T18:12:25.502754Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [2:5:2052] cookie 2 class Online from cache [ ] already requested [ ] to request [ 4 5 ] 2026-01-07T18:12:25.502789Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 610B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:12:25.502838Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [2:6:2053] 2026-01-07T18:12:25.502868Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [2:6:2053] cookie 3 class Online from cache [ ] already requested [ ] to request [ 5 6 ] 2026-01-07T18:12:25.502904Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 854B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:12:25.502970Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [2:6:2053] 2026-01-07T18:12:25.503005Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [2:6:2053] cookie 4 class Online from cache [ ] already requested [ ] to request [ 6 7 ] 2026-01-07T18:12:25.503038Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 1.07KiB EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #4 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #4 (done) Checking fetches#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] ... waiting for results #4 2026-01-07T18:12:25.503335Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status ERROR pages [ 1 2 3 ] 2026-01-07T18:12:25.503361Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1076: Drop page collection [1:0:256:0:0:0:1] error ERROR 2026-01-07T18:12:25.503385Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1020: Send page collection error [1:0:256:0:0:0:1] owner [2:5:2052] class Online error ERROR cookie 1 2026-01-07T18:12:25.503413Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1020: Send page collection error [1:0:256:0:0:0:1] owner [2:6:2053] class Online error ERROR cookie 3 2026-01-07T18:12:25.503451Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 732B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] 2026-01-07T18:12:25.503589Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 5 6 ] 2026-01-07T18:12:25.503614Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 488B EvictedInMemoryBytes: 0B Checking results#4 Expected: Actual: ... waiting for results #4 2026-01-07T18:12:25.513952Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 6 7 ] 2026-01-07T18:12:25.514005Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:2] owner [2:6:2053] class Online pages [ 6 7 ] cookie 4 2026-01-07T18:12:25.514044Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B 2026-01-07T18:12:25.514093Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 4 5 ] 2026-01-07T18:12:25.514119Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:2] owner [2:5:2052] class Online pages [ 4 5 ] cookie 2 2026-01-07T18:12:25.514143Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 488B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] 2026-01-07T18:12:25.565857Z node 3 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-07T18:12:25.566121Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-07T18:12:25.566161Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [3:5:2052] 2026-01-07T18:12:25.566217Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [3:5:2052] cookie 1 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 4 5 ] 2026-01-07T18:12:25.566242Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:573: Request page collection [1:0:256:0:0:0:1] async queue pages [ 1 2 ] 2026-01-07T18:12:25.566313Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2026-01-07T18:12:25.566414Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:2] 2026-01-07T18:12:25.566449Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [3:6:2053] 2026-01-07T18:12:25.566515Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [3:6:2053] cookie 2 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-07T18:12:25.566543Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) Checking fetches#2 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] ... waiting for fetches #2 2026-01-07T18:12:25.566674Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 ] 2026-01-07T18:12:25.566721Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:573: Request page collection [1:0:256:0:0:0:1] async queue pages [ 3 4 ] 2026-01-07T18:12:25.566770Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedI ... 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.025 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.026 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} in-memory miss {0 0b} 00000.026 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.026 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.026 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.026 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.026 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.026 II| FAKE_ENV: All BS storage groups are stopped 00000.026 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.026 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.501676Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.060 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.061 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.062 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} in-memory miss {0 0b} 00000.062 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.062 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.062 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.062 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.062 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.062 II| FAKE_ENV: All BS storage groups are stopped 00000.062 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.062 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.569643Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.038 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.040 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} in-memory miss {0 0b} 00000.040 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.040 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1826b, 23} 00000.040 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.040 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.040 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.040 II| FAKE_ENV: All BS storage groups are stopped 00000.040 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.040 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.616266Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.034 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.035 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} in-memory miss {0 0b} 00000.035 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.035 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.036 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.036 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.036 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.036 II| FAKE_ENV: All BS storage groups are stopped 00000.036 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.036 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.658089Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.031 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.032 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} in-memory miss {0 0b} 00000.033 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.033 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1766b, 27} 00000.033 II| FAKE_ENV: DS.1 gone, left {732b, 6}, put {197813b, 24} 00000.033 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.033 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.033 II| FAKE_ENV: All BS storage groups are stopped 00000.033 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.033 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.696517Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.012 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.013 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.013 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.013 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.013 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.013 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.013 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.013 II| FAKE_ENV: All BS storage groups are stopped 00000.013 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.013 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.716029Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.051 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.052 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.053 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} in-memory miss {0 0b} 00000.053 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.053 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1492b, 23} 00000.053 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.053 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: All BS storage groups are stopped 00000.053 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.053 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.775654Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.022 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.023 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} in-memory miss {0 0b} 00000.023 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.023 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.023 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.023 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.024 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.024 II| FAKE_ENV: All BS storage groups are stopped 00000.024 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.024 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.810812Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR cookie 0 00000.048 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult (done) 00000.059 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.060 NN| TABLET_SAUSAGECACHE: Poison cache serviced 5 reqs hit {8 205278b} miss {0 0b} in-memory miss {0 0b} 00000.060 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.060 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1353b, 17} 00000.060 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {105547b, 14} 00000.060 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {102560b, 2} 00000.060 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.061 II| FAKE_ENV: All BS storage groups are stopped 00000.061 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.061 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:12:29.896623Z 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.239 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled 00000.250 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.252 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.252 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.252 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {501b, 9} 00000.252 II| FAKE_ENV: DS.1 gone, left {425b, 4}, put {460b, 5} 00000.252 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.252 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.252 II| FAKE_ENV: All BS storage groups are stopped 00000.252 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 17.71s 00000.252 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuildWithOverlap >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts >> TTxDataShardReshuffleKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardSampleKScan::BadRequest >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData >> Backup::SnapshotLargeData [GOOD] >> Backup::SnapshotSchema >> Backup::SnapshotSchema [GOOD] >> Backup::ChangelogData >> Backup::ChangelogData [GOOD] >> Backup::ChangelogLargeData >> TTxDataShardSampleKScan::BadRequest [GOOD] >> TTxDataShardSampleKScan::RunScan >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_public_api.py::TestSessionNotFound::test_session_not_found >> TTxDataShardSampleKScan::RunScan [GOOD] >> TTxDataShardSampleKScan::SkipForeign >> TTxDataShardSampleKScan::SkipForeign [GOOD] >> TTxDataShardValidateUniqueIndexScan::BadRequest |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TTxDataShardValidateUniqueIndexScan::BadRequest [GOOD] >> TTxDataShardValidateUniqueIndexScan::RunScan >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> TTxDataShardValidateUniqueIndexScan::RunScan [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/build_index/ut/unittest >> TTxDataShardValidateUniqueIndexScan::RunScan [GOOD] Test command err: 2026-01-07T18:09:17.587547Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7592683377625386246:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-07T18:09:17.587626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-07T18:09:17.686884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:18.172638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:18.172716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:18.174766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:18.199278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-07T18:09:18.305853Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7592683377625386107:2081] 1767809357562956 != 1767809357562959 2026-01-07T18:09:18.359252Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:18.394420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:18.403350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-07T18:09:18.467805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:18.557572Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7592683381920354215:2283] 2026-01-07T18:09:18.557898Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:18.593765Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:18.597818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:18.597920Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:18.601046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T18:09:18.601089Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T18:09:18.601130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T18:09:18.604365Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:18.604477Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:18.604516Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7592683381920354239:2283] in generation 1 2026-01-07T18:09:18.606691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:18.669315Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T18:09:18.673094Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:18.673183Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7592683381920354242:2285] 2026-01-07T18:09:18.673194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:09:18.673203Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T18:09:18.673216Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:18.677303Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592683381920354213:2498], serverId# [1:7592683381920354217:2499], sessionId# [0:0:0] 2026-01-07T18:09:18.677434Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T18:09:18.677540Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T18:09:18.677577Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:09:18.677600Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:18.677682Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T18:09:18.677705Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:18.677731Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T18:09:18.678126Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-07T18:09:18.693045Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-07T18:09:18.701139Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T18:09:18.701828Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-07T18:09:18.701911Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-07T18:09:18.712111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7592683381920354255:2517], serverId# [1:7592683381920354256:2518], sessionId# [0:0:0] 2026-01-07T18:09:18.717202Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1767809358756 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767809358756 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-07T18:09:18.717245Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:18.717440Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T18:09:18.717544Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:09:18.717567Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-07T18:09:18.717626Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767809358756:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-07T18:09:18.717948Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1767809358756:281474976710657 keys extracted: 0 2026-01-07T18:09:18.718121Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-07T18:09:18.718239Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:09:18.718329Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-07T18:09:18.737239Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-07T18:09:18.741593Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:18.744112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1767809358755 2026-01-07T18:09:18.744135Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:18.744162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1767809358770 2026-01-07T18:09:18.744833Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1767809358756} 2026-01-07T18:09:18.744881Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:18.744917Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:18.744955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:09:18.745003Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-07T18:09:18.745145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767809358756 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7592683377625386473:2149], exec latency: 23 ms, propose latency: 26 ms 2026-01-07T18:09:18.745185Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-07T18:09:18.745222Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:18.755194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-07T18:09:18.755251Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72 ... _DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T18:15:22.794285Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T18:15:22.794337Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-07T18:15:22.794439Z node 50 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-07T18:15:22.794552Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3000 : 281474976715662] from 72075186224037889 at tablet 72075186224037889 send result to client [50:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-07T18:15:22.794628Z node 50 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2026-01-07T18:15:22.794812Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T18:15:22.797666Z node 50 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-07T18:15:22.797828Z node 50 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T18:15:22.799085Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 3000 2026-01-07T18:15:22.799750Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2026-01-07T18:15:22.799854Z node 50 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-07T18:15:22.810049Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1172:2967], serverId# [50:1173:2968], sessionId# [0:0:0] 2026-01-07T18:15:22.810310Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 45 2026-01-07T18:15:22.810427Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:22.811180Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:22.811295Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:22.811420Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:22.811634Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 45 MeteringStats { ReadRows: 0 ReadBytes: 0 } 2026-01-07T18:15:22.812560Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T18:15:22.812641Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:15:22.812702Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T18:15:22.812785Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T18:15:23.037078Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1193:2982], serverId# [50:1194:2983], sessionId# [0:0:0] 2026-01-07T18:15:23.037699Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-07T18:15:23.037899Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=2 2026-01-07T18:15:23.049313Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T18:15:23.056587Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1200:2988], serverId# [50:1201:2989], sessionId# [0:0:0] 2026-01-07T18:15:23.056969Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 46 2026-01-07T18:15:23.057124Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.057772Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.057892Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.058407Z node 50 :BUILD_INDEX ERROR: unique_index.cpp:126: Failed TValidateUniqueIndexScan Id: 1 Status: BUILD_ERROR Issues: {
: Error: Duplicate key found: (key_part1=1, key_part2=1) } Id: 1 TabletId: 72075186224037889 Status: BUILD_ERROR Issues { message: "Duplicate key found: (key_part1=1, key_part2=1)" severity: 1 } RequestSeqNoGeneration: 42 RequestSeqNoRound: 46 MeteringStats { ReadRows: 2 ReadBytes: 42 } 2026-01-07T18:15:23.059013Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T18:15:23.059096Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:15:23.059184Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T18:15:23.059278Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T18:15:23.334317Z node 50 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037889 Acquired lock# 281474976715664, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 43] 2026-01-07T18:15:23.341518Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037889 2026-01-07T18:15:23.341786Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037889, row count=2 2026-01-07T18:15:23.353557Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T18:15:23.518641Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:5] at 72075186224037889 2026-01-07T18:15:23.518817Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:5] at 72075186224037889, row count=2 2026-01-07T18:15:23.530192Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T18:15:23.537183Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1247:3020], serverId# [50:1248:3021], sessionId# [0:0:0] 2026-01-07T18:15:23.537595Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 47 2026-01-07T18:15:23.537755Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.538742Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.538869Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.539082Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.539353Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 47 MeteringStats { ReadRows: 2 ReadBytes: 37 } 2026-01-07T18:15:23.539869Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T18:15:23.539958Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:15:23.540068Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T18:15:23.540163Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-07T18:15:23.676598Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037889 2026-01-07T18:15:23.676805Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:6] at 72075186224037889, row count=3 2026-01-07T18:15:23.688238Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-07T18:15:23.695179Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1272:3038], serverId# [50:1273:3039], sessionId# [0:0:0] 2026-01-07T18:15:23.695573Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 48 2026-01-07T18:15:23.695700Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.696472Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.696579Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.696779Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-07T18:15:23.697020Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 48 MeteringStats { ReadRows: 5 ReadBytes: 45 } 2026-01-07T18:15:23.697739Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-07T18:15:23.697828Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:15:23.697919Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-07T18:15:23.698018Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/build_index/ut/unittest |99.5%| [TM] {RESULT} ydb/core/tx/datashard/build_index/ut/unittest |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> Backup::ChangelogLargeData [GOOD] >> Backup::ChangelogManyCommits |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> Backup::ChangelogManyCommits [GOOD] >> Backup::ChangelogSchema >> Backup::ChangelogSchema [GOOD] >> Backup::ChangelogSchemaAndData >> Backup::ChangelogSchemaAndData [GOOD] >> Backup::ChangelogSchemaNewColumn >> Backup::ChangelogSchemaNewColumn [GOOD] >> Backup::ChangelogTornWrite [GOOD] >> Backup::ExcludeTablet >> Backup::ExcludeTablet [GOOD] >> Backup::RecoveryModeKeepsData [GOOD] >> Backup::NoBackupTable >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex >> Backup::NoBackupTable [GOOD] >> Backup::NoBackupColumn >> Backup::NoBackupColumn [GOOD] >> Backup::NewSnapshotChangelogSize >> Backup::NewSnapshotChangelogSize [GOOD] >> Bloom::Conf [GOOD] >> Bloom::Hashes [GOOD] >> Bloom::Rater >> Bloom::Rater [GOOD] >> Bloom::Dipping >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_public_api.py::TestBadSession::test_simple >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestBadSession::test_simple [GOOD] >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Many_Serial [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:14:05.790333Z ...starting tablet 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.012 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZ1kYU4/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.014 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZ1kYU4/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...restarting tablet 00000.019 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZ1kYU4/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.020 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZ1kYU4/dummy/1/backup_19700101000000Z_g3_s2/changelog.json ...restarting tablet again 00000.023 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZ1kYU4/dummy/1/backup_19700101000000Z_g4_s2/snapshot 00000.023 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZ1kYU4/dummy/1/backup_19700101000000Z_g4_s2/changelog.json 00000.032 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.033 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.033 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.033 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {186b, 6} 00000.033 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.033 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.033 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.033 II| FAKE_ENV: All BS storage groups are stopped 00000.033 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.033 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:14:05.830292Z ...starting tablet 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZoy51j/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.010 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZoy51j/dummy/1/backup_19700101000000Z_g2_s2/changelog.json 00000.079 C1| TABLET_EXECUTOR: Tablet 1 unhandled exception yexception: ydb/core/tablet_flat/flat_executor.cpp:5177: Backup snapshot failed: Failed to create snapshot dir /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZoy51j/dummy/1/backup_19700101000000Z_g2_s2/snapshot: (Error 13: Permission denied) util/folder/path.cpp:424: could not create directory /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpZoy51j/dummy/1 ??+0 (0x1241B9FD) __cxa_throw+221 (0x1241B81D) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&)+1049 (0x188B8A29) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+1443 (0x1883DAC3) NActors::IActor::Receive(TAutoPtr&)+744 (0x13B4A338) ??+0 (0x1241B9FD) __cxa_rethrow_primary_exception+340 (0x1241BC44) std::rethrow_exception(std::exception_ptr)+28 (0x1245D9BC) NActors::IActorExceptionHandler::OnUnhandledException(std::exception_ptr const&)+183 (0x11279D37) ...waiting tablet death 00000.080 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.080 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.080 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.080 II| FAKE_ENV: DS.0 gone, left {62b, 2}, put {62b, 2} 00000.080 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.080 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.080 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.080 II| FAKE_ENV: All BS storage groups are stopped 00000.080 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.080 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 1 Error 0 Left 17}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:14:05.913973Z ...starting tablet 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.006 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpwCqbua/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpwCqbua/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.009 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.015 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpwCqbua/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.019 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpwCqbua/dummy/1/backup_19700101000000Z_g3_s2/changelog.json 00000.020 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.020 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot ...restarting dummy tablet in recovery mode ...restoring backup ...restarting tablet in normal mode 00000.056 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpwCqbua/dummy/1/backup_19700101000000Z_g5_s2/snapshot 00000.060 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpwCqbua/dummy/1/backup_19700101000000Z_g5_s2/changelog.json 00000.060 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.061 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.064 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.064 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.065 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.065 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {440b, 14} 00000.065 II| FAKE_ENV: DS.1 gone, left {395b, 2}, put {860b, 6} 00000.065 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.065 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.065 II| FAKE_ENV: All BS storage groups are stopped 00000.065 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.001s 00000.065 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 26}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:14:05.993305Z ...starting tablet 00000.004 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.006 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpzAjYf5/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpzAjYf5/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.009 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns 00000.010 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.011 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns simultaneously 00000.011 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...erasing row 00000.012 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.012 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...replacing row 00000.012 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.013 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing different values in one column 00000.013 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.014 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing composite primary key 00000.014 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.019 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpzAjYf5/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.022 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpzAjYf5/dummy/1/backup_19700101000000Z_g3_s2/changelog.json 00000.023 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.023 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot ...restarting dummy tablet in recovery mode ...restoring backup ...restarting tablet in normal mode 00000.057 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpzAjYf5/dummy/1/backup_19700101000000Z_g5_s2/snapshot 00000.061 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpzAjYf5/dummy/1/backup_19700101000000Z_g5_s2/changelog.json 00000.061 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.061 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.069 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.070 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.070 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.070 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {1268b, 26} 00000.070 II| FAKE_ENV: DS.1 gone, left {668b, 4}, put {2057b, 18} 00000.070 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.070 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.070 II| FAKE_ENV: All BS storage groups are stopped 00000.070 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.001s 00000.070 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 36}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-07T18:14:06.068312Z ...starting tablet 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.006 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpF8JHBL/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/ec9d/002541/r3tmp/tmpF8JHBL/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.009 DD| LOCAL_DB_BACKUP: Handle NKikimr::NT ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) |99.6%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.6%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.6%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> DataShardStats::HasSchemaChanges_Families [GOOD] >> DataShardStats::BackupTableStatsReportInterval |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> DataShardStats::BackupTableStatsReportInterval [GOOD] >> DataShardStats::CollectKeySampleLeader >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> DataShardStats::CollectKeySampleLeader [GOOD] >> DataShardStats::CollectKeySampleFollower >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> DataShardStats::CollectKeySampleFollower [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::CollectKeySampleFollower [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-07T18:09:27.123238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-07T18:09:27.392221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-07T18:09:27.471826Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-07T18:09:27.472541Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-07T18:09:27.472603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-07T18:09:28.179513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-07T18:09:28.179648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-07T18:09:28.313274Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767809363404263 != 1767809363404267 2026-01-07T18:09:28.345118Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-07T18:09:28.405240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-07T18:09:28.538489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-07T18:09:29.015268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-07T18:09:29.040896Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-07T18:09:29.211106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-07T18:09:29.298285Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-07T18:09:29.299484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-07T18:09:29.299873Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-07T18:09:29.300109Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-07T18:09:29.422879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-07T18:09:29.423644Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-07T18:09:29.423790Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-07T18:09:29.430329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-07T18:09:29.430442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-07T18:09:29.430508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-07T18:09:29.437503Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-07T18:09:29.437811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-07T18:09:29.437930Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-07T18:09:29.449577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-07T18:09:29.490750Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-07T18:09:29.493664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-07T18:09:29.493878Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-07T18:09:29.493945Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-07T18:09:29.493990Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-07T18:09:29.494028Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-07T18:09:29.494284Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:29.495453Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-07T18:09:29.496911Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-07T18:09:29.497049Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-07T18:09:29.497139Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-07T18:09:29.497181Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:09:29.497234Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T18:09:29.497293Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T18:09:29.497333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T18:09:29.497365Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-07T18:09:29.497418Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-07T18:09:29.497594Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2769], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:29.497644Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:09:29.497689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:892:2769], sessionId# [0:0:0] 2026-01-07T18:09:29.498117Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2769] 2026-01-07T18:09:29.498162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-07T18:09:29.498288Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-07T18:09:29.499055Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-07T18:09:29.499120Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-07T18:09:29.499218Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-07T18:09:29.499269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-07T18:09:29.499311Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-07T18:09:29.499349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-07T18:09:29.499386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-07T18:09:29.499695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-07T18:09:29.499730Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-07T18:09:29.499766Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-07T18:09:29.499806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-07T18:09:29.499880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-07T18:09:29.499910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-07T18:09:29.499947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-07T18:09:29.499994Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-07T18:09:29.500037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-07T18:09:29.502664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-07T18:09:29.502734Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-07T18:09:29.513629Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 46644480 FollowerId: 1 2026-01-07T18:17:54.007752Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:54.018208Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:54.018286Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:54.750843Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:54.751105Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 38 2026-01-07T18:17:54.754038Z node 16 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-07T18:17:54.754080Z node 16 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2026-01-07T18:17:54.754461Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 268828680, Sender [16:916:2782], Recipient [16:924:2786]: NKikimr::TEvTablet::TEvFUpdate 2026-01-07T18:17:54.754755Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 268828680, Sender [16:916:2782], Recipient [16:924:2786]: NKikimr::TEvTablet::TEvFUpdate 2026-01-07T18:17:54.755197Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [16:877:2759], Recipient [16:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-07T18:17:54.765475Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:54.765527Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:54.765647Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 1, tableId 38 2026-01-07T18:17:55.545409Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-07T18:17:55.545479Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-07T18:17:55.545571Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 76000 last cleanup 0 2026-01-07T18:17:55.545629Z node 16 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-07T18:17:55.545670Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-07T18:17:55.545728Z node 16 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-07T18:17:55.545793Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-07T18:17:55.545923Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:55.556661Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:55.556739Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-07T18:17:56.516453Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553215, Sender [16:1626:3417], Recipient [16:924:2786]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 KeysSize: 3 2026-01-07T18:17:56.516546Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3305: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2026-01-07T18:17:56.516711Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 1 2026-01-07T18:17:56.516778Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to repeatable v2500/18446744073709551615 2026-01-07T18:17:56.516862Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-07T18:17:56.516998Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T18:17:56.517051Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-07T18:17:56.517104Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-07T18:17:56.517151Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-07T18:17:56.517183Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-07T18:17:56.517238Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T18:17:56.517261Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-07T18:17:56.517279Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-07T18:17:56.517296Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-07T18:17:56.517415Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T18:17:56.517690Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Restart 2026-01-07T18:17:56.517728Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037888 2026-01-07T18:17:56.517980Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 1 2026-01-07T18:17:56.518009Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-07T18:17:56.518075Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-07T18:17:56.518297Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[16:1626:3417], 0} after executionsCount# 2 2026-01-07T18:17:56.518382Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[16:1626:3417], 0} sends rowCount# 3, bytes# 48, quota rows left# 998, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-07T18:17:56.518472Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[16:1626:3417], 0} finished in read 2026-01-07T18:17:56.518542Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T18:17:56.518563Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-07T18:17:56.518584Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-07T18:17:56.518605Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-07T18:17:56.518642Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-07T18:17:56.518660Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-07T18:17:56.518685Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-07T18:17:56.518725Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-07T18:17:56.518836Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-07T18:17:56.519649Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553219, Sender [16:1626:3417], Recipient [16:924:2786]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-07T18:17:56.519693Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3308: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2026-01-07T18:17:56.519762Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 42 } }, { items { uint32_value: 44 } }, { items { uint32_value: 46 } } TEST 9: EvGetTableStats(collectKeySample=false) after the collected key sample becomes invalid TEST Sending the EvGetTableStats message to the tablet 72075186224037888, tableId=38, collectKeySample=0, toFollower=1 2026-01-07T18:17:56.521221Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269877761, Sender [16:1628:3420], Recipient [16:924:2786]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-07T18:17:56.521289Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3303: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2026-01-07T18:17:56.521357Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at follower 1 tablet# 72075186224037888, clientId# [16:1627:3419], serverId# [16:1628:3420], sessionId# [0:0:0] 2026-01-07T18:17:56.521469Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553160, Sender [16:829:2724], Recipient [16:924:2786]: NKikimrTxDataShard.TEvGetTableStats TableId: 38 CollectKeySample: false TEST Received the TEvGetTableStatsResult response from the tablet 72075186224037888, tableId=38, collectKeySample=0, toFollower=1 DatashardId: 72075186224037888 TableLocalId: 38 TableStats { InMemSize: 0 LastAccessTime: 80518 LastUpdateTime: 0 } TabletMetrics { CPU: 102 } FullStatsReady: false TableOwnerId: 72057594046644480 FollowerId: 1 >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units |99.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_stats/unittest |99.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest >> test_public_api.py::TestJsonExample::test_json_unexpected_failure >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_public_api.py::TestRecursiveCreation::test_mkdir >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestAttributes::test_create_table >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_self_heal.py::TestEnableSelfHeal::test_replication |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestDocApiTables::test_create_table |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] 2026-01-07 18:20:02,525 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-07 18:20:02,939 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 811860 831M 828M 754M ydb-tests-functional-serverless --basetemp /home/runner/.ya/build/build_root/ec9d/001a0b/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 909342 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ec9d/001a0b/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 910618 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ec9d/001a0b/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 910672 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ec9d/001a0b/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 911183 1.6G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ec9d/001a0b/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 591, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/functional/serverless/test_serverless.py", line 200, in test_turn_on_serverless_storage_billing time.sleep(15) File "library/python/pytest/plugins/ya.py", line 345, in _graceful_shutdown stack = traceback.format_stack() Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...runner/.ya/build/build_root/ec9d/001a0b/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ec9d/001a0b', '--source-root', '/home/runner/.ya/build/build_root/ec9d/001a0b/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ec9d/001a0b/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '9', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...runner/.ya/build/build_root/ec9d/001a0b/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ec9d/001a0b', '--source-root', '/home/runner/.ya/build/build_root/ec9d/001a0b/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ec9d/001a0b/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '9', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-None] >> test_kafka_streams.py::TestYdbTopicWorkload::test >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled [GOOD] >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> ConsistentIndexRead::InteractiveTx >> test_workload.py::TestYdbMixedWorkload::test[row] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_encryption.py::TestEncryption::test_simple_encryption |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_workload.py::TestYdbWorkload::test[row-local] >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-None] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_value_workload.py::TestResultSetValueWorkload::test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/viewer/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/viewer/tests/py3test |99.8%| [TM] {RESULT} ydb/tests/stress/viewer/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.8%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload_topic.py::TestYdbTopicWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/show_create/table/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/show_create/table/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/show_create/table/tests/py3test >> test_workload.py::TestYdbWorkload::test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled [GOOD] >> test_workload.py::TestYdbTestShardWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] >> test_vdisks.py::TestTinyVDisks::test_disabled [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_workload.py::TestYdbWorkload::test[row-local] [GOOD] >> KqpQueryService::ReplyPartLimitProxyNode >> Transfer_RowTable::KeyColumnFirst >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> test_kafka_streams.py::TestYdbTopicWorkload::test [FAIL] >> test_workload.py::TestYdbMixedWorkload::test[row] [GOOD] >> Transfer_RowTable::KeyColumnFirst [GOOD] >> Transfer_RowTable::KeyColumnLast >> Transfer::BaseScenario_Local >> test_workload.py::TestYdbWorkload::test[row-remote] >> Transfer_RowTable::KeyColumnLast [GOOD] >> Transfer_RowTable::ComplexKey |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/encryption/py3test |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kafka/tests/py3test >> test_kafka_streams.py::TestYdbTopicWorkload::test [FAIL] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/kafka/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/kafka/tests/py3test >> Transfer_RowTable::ComplexKey [GOOD] >> Transfer_RowTable::NullableColumn >> Transfer::BaseScenario_Local [GOOD] >> Transfer::BaseScenario_Remote >> test_workload.py::TestYdbWorkload::test >> Transfer_RowTable::NullableColumn [GOOD] >> Transfer_RowTable::WriteNullToKeyColumn >> Transfer_RowTable::WriteNullToKeyColumn [GOOD] >> Transfer_RowTable::WriteNullToColumn >> Transfer::BaseScenario_Remote [GOOD] >> Transfer::CreateTransfer_TargetNotFound >> Transfer::CreateTransfer_TargetNotFound [GOOD] >> Transfer::ConnectionString_BadChar >> Transfer_RowTable::WriteNullToColumn [GOOD] >> Transfer_RowTable::Upsert_DifferentBatch >> Transfer::ConnectionString_BadChar [GOOD] >> Transfer::ConnectionString_BadDNSName >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [FAIL] >> Transfer::ConnectionString_BadDNSName [GOOD] >> Transfer::Create_WithPermission >> Transfer::Create_WithPermission [GOOD] >> Transfer::Create_WithoutTablePermission >> Transfer_RowTable::Upsert_DifferentBatch [GOOD] >> Transfer_RowTable::Upsert_OneBatch >> Transfer::Create_WithoutTablePermission [GOOD] >> Transfer::Create_WithoutAlterTopicPermission_AndGrant >> Transfer_ColumnTable::KeyColumnFirst >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] >> test_workload.py::TestYdbMixedWorkload::test[column] >> Transfer_RowTable::Upsert_OneBatch [GOOD] >> Transfer_RowTable::ColumnType_Bool >> NodeIdDescribe::HasDistribution [GOOD] >> Transfer_RowTable::ColumnType_Bool [GOOD] >> Transfer_RowTable::ColumnType_Date |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_query_svc/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest >> Transfer::Create_WithoutAlterTopicPermission_AndGrant [GOOD] >> Transfer::LocalTopic_WithPermission >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board [GOOD] >> Transfer_ColumnTable::KeyColumnFirst [GOOD] >> Transfer_ColumnTable::KeyColumnLast >> Transfer_RowTable::ColumnType_Date [GOOD] >> Transfer_RowTable::ColumnType_Double >> Transfer::LocalTopic_WithPermission [GOOD] >> Transfer::LocalTopic_BigMessage |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/topic/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/topic/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/topic/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test >> Transfer_RowTable::ColumnType_Double [GOOD] >> Transfer_RowTable::ColumnType_Int8 >> Transfer::LocalTopic_BigMessage [GOOD] >> Transfer::AlterLambda >> Transfer_ColumnTable::KeyColumnLast [GOOD] >> Transfer_ColumnTable::ComplexKey >> test_workload.py::TestYdbWorkload::test >> Transfer_RowTable::ColumnType_Int8 [GOOD] >> Transfer_RowTable::ColumnType_Int16 >> Transfer_RowTable::ColumnType_Int16 [GOOD] >> Transfer_RowTable::ColumnType_Int32 >> Transfer_ColumnTable::ComplexKey [GOOD] >> Transfer_ColumnTable::NullableColumn >> Transfer::AlterLambda [GOOD] >> Transfer::EnsureError >> Transfer_RowTable::ColumnType_Int32 [GOOD] >> Transfer_RowTable::ColumnType_Int64 |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board [GOOD] >> Transfer::EnsureError [GOOD] >> Transfer::CheckCommittedOffset_Local >> Transfer_RowTable::ColumnType_Int64 [GOOD] >> Transfer_RowTable::ColumnType_Utf8_LongValue >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> Transfer::CheckCommittedOffset_Local [GOOD] >> Transfer::CheckCommittedOffset_Remote >> Transfer_ColumnTable::NullableColumn [GOOD] >> Transfer_ColumnTable::WriteNullToKeyColumn >> Transfer_RowTable::ColumnType_Utf8_LongValue [GOOD] >> Transfer_RowTable::ColumnType_Uuid >> Transfer::CheckCommittedOffset_Remote [GOOD] >> Transfer::DropTransfer >> Transfer_ColumnTable::WriteNullToKeyColumn [GOOD] >> Transfer_ColumnTable::WriteNullToColumn >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_RowTable::ColumnType_Uuid [GOOD] >> Transfer_RowTable::MessageField_Attributes |99.9%| [TM] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> Transfer::DropTransfer [GOOD] >> Transfer::CreateAndDropConsumer |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> Transfer::CreateAndDropConsumer [GOOD] >> Transfer::DescribeError_OnLambdaCompilation >> test_workload.py::TestYdbKvWorkload::test[column] >> Transfer_RowTable::MessageField_Attributes [GOOD] >> Transfer_RowTable::MessageField_CreateTimestamp >> Transfer_ColumnTable::WriteNullToColumn [GOOD] >> Transfer_ColumnTable::Upsert_DifferentBatch >> Transfer::DescribeError_OnLambdaCompilation [GOOD] >> Transfer::PausedAfterError >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-None] >> Transfer_RowTable::MessageField_CreateTimestamp [GOOD] >> Transfer_RowTable::MessageField_Partition >> Transfer::PausedAfterError [GOOD] >> Transfer::DescribeTransferWithErrorTopicNotFound >> Transfer::DescribeTransferWithErrorTopicNotFound [GOOD] >> Transfer::CustomConsumer >> Transfer_RowTable::MessageField_Partition [GOOD] >> Transfer_RowTable::MessageField_SeqNo >> test_workload.py::TestYdbTestShardWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [FAIL] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> Transfer_ColumnTable::Upsert_DifferentBatch [GOOD] >> Transfer_ColumnTable::Upsert_OneBatch >> Transfer::CustomConsumer [GOOD] >> Transfer::CustomConsumer_NotExists_Remote |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/s3_backups/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/s3_backups/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/s3_backups/tests/py3test >> Transfer_RowTable::MessageField_SeqNo [GOOD] >> Transfer_RowTable::MessageField_ProducerId >> Transfer::CustomConsumer_NotExists_Remote [GOOD] >> Transfer::CustomConsumer_NotExists_Local >> test_workload.py::TestYdbWorkload::test >> Transfer_ColumnTable::Upsert_OneBatch [GOOD] >> Transfer_ColumnTable::ColumnType_Date >> Transfer::CustomConsumer_NotExists_Local [GOOD] >> Transfer::CustomFlushInterval >> Transfer_RowTable::MessageField_ProducerId [GOOD] >> Transfer_RowTable::MessageField_MessageGroupId >> test_workload.py::TestYdbWorkload::test >> Transfer_RowTable::MessageField_MessageGroupId [GOOD] >> Transfer_RowTable::MessageField_WriteTimestamp >> Transfer::CustomFlushInterval [GOOD] >> Transfer::AlterFlushInterval >> Transfer_ColumnTable::ColumnType_Date [GOOD] >> Transfer_ColumnTable::ColumnType_Double >> Transfer_RowTable::MessageField_WriteTimestamp [GOOD] >> Transfer_RowTable::ProcessingJsonMessage |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/testshard_workload/tests/py3test >> test_workload.py::TestYdbTestShardWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/testshard_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/testshard_workload/tests/py3test >> test_value_workload.py::TestResultSetValueWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test[row-remote] [GOOD] >> Transfer::AlterFlushInterval [GOOD] >> Transfer::AlterBatchSize >> Transfer_ColumnTable::ColumnType_Double [GOOD] >> Transfer_ColumnTable::ColumnType_Int8 >> Transfer_RowTable::ProcessingJsonMessage [GOOD] >> Transfer_RowTable::ProcessingCDCMessage >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-None] [GOOD] >> test_workload.py::TestYdbWorkload::test >> test_workload.py::TestYdbWorkload::test[column-local] >> Transfer::AlterBatchSize [GOOD] >> Transfer::CreateTransferSourceNotExists >> Transfer::CreateTransferSourceNotExists [GOOD] >> Transfer::CreateTransferSourceNotExists_LocalTopic >> Transfer::CreateTransferSourceNotExists_LocalTopic [GOOD] >> Transfer::CreateTransferSourceDirNotExists >> Transfer_RowTable::ProcessingCDCMessage [GOOD] >> Transfer_RowTable::ProcessingTargetTable >> Transfer::CreateTransferSourceDirNotExists [GOOD] >> Transfer::CreateTransferSourceDirNotExists_LocalTopic >> Transfer_ColumnTable::ColumnType_Int8 [GOOD] >> Transfer_ColumnTable::ColumnType_Int16 >> Transfer::CreateTransferSourceDirNotExists_LocalTopic [GOOD] >> Transfer::TransferSourceDropped >> test_arrow_workload.py::TestResultSetArrowWorkload::test >> Transfer::TransferSourceDropped [GOOD] >> Transfer::TransferSourceDropped_LocalTopic >> Transfer_RowTable::ProcessingTargetTable [GOOD] >> Transfer_RowTable::ProcessingTargetTableOtherType >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> Transfer::TransferSourceDropped_LocalTopic [GOOD] >> Transfer::CreateTransferSourceIsNotTopic >> Transfer_ColumnTable::ColumnType_Int16 [GOOD] >> Transfer_ColumnTable::ColumnType_Int32 >> Transfer::CreateTransferSourceIsNotTopic [GOOD] >> Transfer::CreateTransferSourceIsNotTopic_LocalTopic >> Transfer::CreateTransferSourceIsNotTopic_LocalTopic [GOOD] >> Transfer::CreateTransferTargetIsNotTable >> Transfer::CreateTransferTargetIsNotTable [GOOD] >> Transfer::CreateTransferTargetNotExists >> Transfer::CreateTransferTargetNotExists [GOOD] >> Transfer::PauseAndResumeTransfer >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_RowTable::ProcessingTargetTableOtherType [GOOD] >> Transfer_RowTable::DropColumn >> Transfer_ColumnTable::ColumnType_Int32 [GOOD] >> Transfer_ColumnTable::ColumnType_Int64 >> Transfer_RowTable::DropColumn [GOOD] >> Transfer_RowTable::TableWithSyncIndex >> Transfer_ColumnTable::ColumnType_Int64 [GOOD] >> Transfer_ColumnTable::ColumnType_Utf8_LongValue |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/cdc/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/cdc/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/cdc/tests/py3test >> Transfer::PauseAndResumeTransfer [GOOD] >> Transfer::TargetTableWithoutDirectory >> Transfer_RowTable::TableWithSyncIndex [GOOD] >> Transfer_RowTable::TableWithAsyncIndex >> Transfer::TargetTableWithoutDirectory [GOOD] >> Transfer::TargetTableWriteOutsideDirectory >> Transfer_RowTable::TableWithAsyncIndex [GOOD] >> Transfer_ColumnTable::ColumnType_Utf8_LongValue [GOOD] >> Transfer_ColumnTable::MessageField_Attributes >> Transfer::TargetTableWriteOutsideDirectory [GOOD] >> Transfer::TargetTableWriteInsideDirectory >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/row_table/unittest >> Transfer_RowTable::TableWithAsyncIndex [GOOD] Test command err: DDL: CREATE TABLE `Table_4658847444756071035` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4658847444756071035` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_4658847444756071035` FROM `Topic_4658847444756071035` TO `Table_4658847444756071035` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_4658847444756071035` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4658847444756071035` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4658847444756071035` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4658847444756071035` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4658847444756071035` ORDER BY `Key`, `Message` Attempt=15 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4658847444756071035` ORDER BY `Key`, `Message` Attempt=14 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4658847444756071035` ORDER BY `Key`, `Message` Attempt=13 count=1 DDL: DROP TRANSFER `Transfer_4658847444756071035`; DDL: DROP TABLE `Table_4658847444756071035` DDL: DROP TOPIC `Topic_4658847444756071035` DDL: CREATE TABLE `Table_6067851372446420726` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_6067851372446420726` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_6067851372446420726` FROM `Topic_6067851372446420726` TO `Table_6067851372446420726` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_6067851372446420726` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_6067851372446420726` ORDER BY `Key`, `Message` Attempt=18 count=1 DDL: DROP TRANSFER `Transfer_6067851372446420726`; DDL: DROP TABLE `Table_6067851372446420726` DDL: DROP TOPIC `Topic_6067851372446420726` DDL: CREATE TABLE `Table_4487686930982154240` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, ___Value3 Utf8, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4487686930982154240` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key1:Unwrap(CAST(1 AS Uint64)), Key2:Unwrap(CAST(2 AS Uint64)), Value2:CAST("value-2" AS Utf8), Key4:Unwrap(CAST(4 AS Uint64)), Key3:Unwrap(CAST(3 AS Uint64)), Value1:CAST("value-1" AS Utf8), ___Value3:CAST("value-3" AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4487686930982154240` FROM `Topic_4487686930982154240` TO `Table_4487686930982154240` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_4487686930982154240` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_4487686930982154240` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=18 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_4487686930982154240` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_4487686930982154240`; DDL: DROP TABLE `Table_4487686930982154240` DDL: DROP TOPIC `Topic_4487686930982154240` DDL: CREATE TABLE `Table_16409214449228128199` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16409214449228128199` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16409214449228128199` FROM `Topic_16409214449228128199` TO `Table_16409214449228128199` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_16409214449228128199` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16409214449228128199` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16409214449228128199` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_16409214449228128199`; DDL: DROP TABLE `Table_16409214449228128199` DDL: DROP TOPIC `Topic_16409214449228128199` DDL: CREATE TABLE `Table_3350963903432727557` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_3350963903432727557` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:Unwrap(Nothing(Uint64?), "The value of the 'Key' column must be non-NULL"), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_3350963903432727557` FROM `Topic_3350963903432727557` TO `Table_3350963903432727557` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(102): generated.sql:4:29: Failed to unwrap empty optional: The value of the 'Key' column must be non-NULL } >>>>> EXPECTED: The value of the 'Key' column must be non-NULL DDL: DROP TRANSFER `Transfer_3350963903432727557`; DDL: DROP TABLE `Table_3350963903432727557` DDL: DROP TOPIC `Topic_3350963903432727557` DDL: CREATE TABLE `Table_4371082878215269440` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4371082878215269440` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:Unwrap(Nothing(Utf8?), "The value of the 'Message' column must be non-NULL") |> ]; }; ; CREATE TRANSFER `Transfer_4371082878215269440` FROM `Topic_4371082878215269440` TO `Table_4371082878215269440` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(106): generated.sql:5:33: Failed to unwrap empty optional: The value of the 'Message' column must be non-NULL } >>>>> EXPECTED: The value of the 'Message' column must be non-NULL DDL: DROP TRANSFER `Transfer_4371082878215269440`; DDL: DROP TABLE `Table_4371082878215269440` DDL: DROP TOPIC `Topic_4371082878215269440` DDL: CREATE TABLE `Table_4589874627207731977` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4589874627207731977` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4589874627207731977` FROM `Topic_4589874627207731977` TO `Table_4589874627207731977` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_4589874627207731977` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_4589874627207731977` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_4589874627207731977` ORDER BY `Message` Attempt=17 count=1 >>>>> Query: SELECT `Message` FROM `Table_4589874627207731977` ORDER BY `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_4589874627207731977`; DDL: DROP TABLE `Table_4589874627207731977` DDL: CREATE TABLE `Table_6802872591478173116` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_6802872591478173116` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { retur ... , timestamp) ) WITH ( STORE = ROW ) DDL: ALTER TABLE `SourceTable_16777080375697090911` ADD CHANGEFEED `cdc_16777080375697090911` WITH ( MODE = 'UPDATES', FORMAT = 'JSON' ) DDL: CREATE TABLE `Table_16777080375697090911` ( timestamp Datetime NOT NULL, object_id Utf8 NOT NULL, operation Utf8, PRIMARY KEY (timestamp, object_id) ) WITH ( STORE = ROW ) DDL: $l = ($x) -> { $d = CAST($x._data AS JSON); return [ <| timestamp: Unwrap(DateTime::MakeDatetime(DateTime::ParseIso8601(CAST(Yson::ConvertToString($d.key[1]) AS Utf8)))), object_id: Unwrap(CAST(Yson::ConvertToString($d.key[0]) AS Utf8)), operation: CAST(Yson::ConvertToString($d.update.operation) AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16777080375697090911` FROM `SourceTable_16777080375697090911/cdc_16777080375697090911` TO `Table_16777080375697090911` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: INSERT INTO `SourceTable_16777080375697090911` (`object_id`, `timestamp`, `operation`) VALUES ('id_1', Datetime('2019-01-01T15:30:00Z'), 'value_1'); >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_16777080375697090911` ORDER BY `operation`, `object_id`, `timestamp` Attempt=19 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_16777080375697090911` ORDER BY `operation`, `object_id`, `timestamp` Attempt=18 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_16777080375697090911` ORDER BY `operation`, `object_id`, `timestamp` Attempt=17 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_16777080375697090911` ORDER BY `operation`, `object_id`, `timestamp` Attempt=16 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_16777080375697090911` ORDER BY `operation`, `object_id`, `timestamp` Attempt=15 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_16777080375697090911` ORDER BY `operation`, `object_id`, `timestamp` Attempt=14 count=1 DDL: DROP TRANSFER `Transfer_16777080375697090911`; DDL: DROP TABLE `Table_16777080375697090911` DDL: DROP TABLE `SourceTable_16777080375697090911` DDL: CREATE TABLE `Table_17549745668155687225` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_17549745668155687225_1` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_17549745668155687225_2` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_17549745668155687225` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: $x._offset, Message:CAST($x._data AS Utf8) |>, <| __ydb_table: "Table_17549745668155687225_1", Key: $x._offset, Message:CAST($x._data || "_1" AS Utf8) |>, <| __ydb_table: "Table_17549745668155687225_2", Key: $x._offset, Message:CAST($x._data || "_2" AS Utf8) |>, ]; }; ; CREATE TRANSFER `Transfer_17549745668155687225` FROM `Topic_17549745668155687225` TO `Table_17549745668155687225` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608, DIRECTORY = '/local' ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_17549745668155687225` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17549745668155687225` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17549745668155687225` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17549745668155687225` ORDER BY `Key`, `Message` Attempt=16 count=1 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17549745668155687225_1` ORDER BY `Key`, `Message` Attempt=19 count=1 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17549745668155687225_2` ORDER BY `Key`, `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_17549745668155687225`; DDL: DROP TABLE `Table_17549745668155687225` DDL: DROP TOPIC `Topic_17549745668155687225` DDL: CREATE TABLE `Table_8301593046356226726` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_8301593046356226726_1` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_8301593046356226726` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: $x._offset, Message:CAST($x._data AS Utf8) |>, <| __ydb_table: "Table_8301593046356226726_1", Key: $x._offset, Message:CAST($x._data || "_1" AS Utf8) |>, ]; }; ; CREATE TRANSFER `Transfer_8301593046356226726` FROM `Topic_8301593046356226726` TO `Table_8301593046356226726` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608, DIRECTORY = '/local' ); >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table '/local/Table_8301593046356226726_1' Only the OLTP table is supported } } >>>>> EXPECTED: Error: Bulk upsert to table '/local/Table_ DDL: DROP TRANSFER `Transfer_8301593046356226726`; DDL: DROP TABLE `Table_8301593046356226726` DDL: DROP TOPIC `Topic_8301593046356226726` DDL: CREATE TABLE `Table_11869979723496284544` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_11869979723496284544` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_11869979723496284544` FROM `Topic_11869979723496284544` TO `Table_11869979723496284544` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_11869979723496284544` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_11869979723496284544` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_11869979723496284544` ORDER BY `Message` Attempt=17 count=1 DDL: ALTER TABLE Table_11869979723496284544 DROP COLUMN Message >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table 'local/Table_11869979723496284544' Unknown column: Message } } >>>>> EXPECTED: Unknown column: Message DDL: CREATE TABLE `Table_3938811871759530449` ( Key Uint64 NOT NULL, Message Utf8, INDEX `title_index` GLOBAL SYNC ON (`Message`), PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_3938811871759530449` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_3938811871759530449` FROM `Topic_3938811871759530449` TO `Table_3938811871759530449` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table 'local/Table_3938811871759530449' Only async-indexed tables are supported by BulkUpsert } } >>>>> EXPECTED: Only async-indexed tables are supported by BulkUpsert DDL: DROP TRANSFER `Transfer_3938811871759530449`; DDL: DROP TABLE `Table_3938811871759530449` DDL: DROP TOPIC `Topic_3938811871759530449` DDL: CREATE TABLE `Table_16404360848488913411` ( Key Uint64 NOT NULL, Message Utf8, INDEX `title_index` GLOBAL ASYNC ON (`Message`), PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16404360848488913411` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16404360848488913411` FROM `Topic_16404360848488913411` TO `Table_16404360848488913411` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:22770/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_16404360848488913411` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16404360848488913411` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16404360848488913411` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_16404360848488913411`; DDL: DROP TABLE `Table_16404360848488913411` DDL: DROP TOPIC `Topic_16404360848488913411` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/row_table/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/row_table/unittest >> Transfer::TargetTableWriteInsideDirectory [GOOD] >> Transfer::AlterTargetDirectory >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] >> Transfer_ColumnTable::MessageField_Attributes [GOOD] >> Transfer_ColumnTable::MessageField_CreateTimestamp |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/show_create/view/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/show_create/view/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/show_create/view/tests/py3test >> Transfer::AlterTargetDirectory [GOOD] >> Transfer::WriteToNotExists |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/kv/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> S3PathStyleBackup::DisableVirtualAddressing >> Transfer_ColumnTable::MessageField_CreateTimestamp [GOOD] >> Transfer_ColumnTable::MessageField_Partition >> Transfer::WriteToNotExists [GOOD] >> Transfer::WriteToNotTable >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> Transfer::WriteToNotTable [GOOD] >> Transfer::AlterLambdaOnWork |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/backup/s3_path_style/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest >> Transfer_ColumnTable::MessageField_Partition [GOOD] >> Transfer_ColumnTable::MessageField_SeqNo >> Backup::UuidValue >> Transfer::AlterLambdaOnWork [GOOD] >> Transfer::CreateAndAlterTransferInDirectory >> test_workload.py::TestYdbWorkload::test[column-local] [GOOD] >> Replication::Types >> Transfer::CreateAndAlterTransferInDirectory [GOOD] >> Transfer::Alter_WithSecret >> KqpQuerySession::NoLocalAttach >> test_workload.py::TestYdbWorkload::test >> Backup::UuidValue [GOOD] >> Replication::Types [GOOD] >> Replication::PauseAndResumeReplication ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] 2026-01-07 18:26:36,870 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-07 18:26:38,670 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 895174 1.1G 1.1G 1.1G ydb-tests-functional-serverless --basetemp /home/runner/.ya/build/build_root/ec9d/001e69/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 1020177 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ec9d/001e69/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1022015 1.4G 1.4G 1006M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ec9d/001e69/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1022190 1.3G 1.4G 972M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ec9d/001e69/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1022562 1.3G 1.4G 943M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/ec9d/001e69/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 591, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/functional/serverless/test_serverless.py", line 525, in test_database_with_column_disk_quotas pool.retry_operation_sync(create_table, None, path) File "contrib/python/ydb/py3/ydb/table.py", line 2655, in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 137, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 98, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/table.py", line 2652, in wrapped_callee with self.checkout(timeout=retry_settings.get_session_client_timeout) as session: File "contrib/python/ydb/py3/ydb/table.py", line 2752, in __enter__ self._acquired = self._pool.acquire(self._blocking, self._timeout) File "contrib/python/ydb/py3/ydb/tracing.py", line 70, in wrapper return f(self, *args, **kwargs) File "contrib/python/ydb/py3/ydb/table.py", line 2687, in acquire return self._pool_impl.acquire(blocking, timeout) File "contrib/python/ydb/py3/ydb/_sp_impl.py", line 348, in acquire session = waiter.result(timeout=timeout) File "contrib/tools/python3/Lib/concurrent/futures/_base.py", line 451, in result self._condition.wait(timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 345, in _graceful_shutdown stack = traceback.format_stack() Thread 0x00007b2f3b2d4640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 198 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 279 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 693 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 707 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f3f8d5640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f3d8e8640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f3c5de640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f3d0e7640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f39f81640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f5084b640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f4b988640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f4eaa5640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2f56842640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f2f86dd2940 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 718 in stop File "ydb/tests/library/fixtures/__init__.py", line 59 in ydb_cluster File "contrib/python/pytest/py3/_pytest/fixtures.py", line 926 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1042 in finish File "contrib/python/pytest/py3/_pytest/runner.py", line 543 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 109 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 308 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...runner/.ya/build/build_root/ec9d/001e69/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ec9d/001e69', '--source-root', '/home/runner/.ya/build/build_root/ec9d/001e69/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ec9d/001e69/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...runner/.ya/build/build_root/ec9d/001e69/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/ec9d/001e69', '--source-root', '/home/runner/.ya/build/build_root/ec9d/001e69/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/ec9d/001e69/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout",), {}) 2026-01-07 18:27:09,823 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2026-01-07 18:27:09,824 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.9%| [TM] {BAZEL_UPLOAD, SKIPPED} ydb/tests/functional/serverless/py3test >> Transfer::Alter_WithSecret [GOOD] >> Transfer::MessageField_Key >> test_workload.py::TestYdbWorkload::test[column-remote] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/data_00.csv.sha256" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/metadata.json.sha256" Found S3 object: "ProducerUuidValueBackup/permissions.pb" Found S3 object: "ProducerUuidValueBackup/permissions.pb.sha256" Found S3 object: "ProducerUuidValueBackup/scheme.pb" Found S3 object: "ProducerUuidValueBackup/scheme.pb.sha256" |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/backup/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/backup/unittest >> Transfer_ColumnTable::MessageField_SeqNo [GOOD] >> Transfer_ColumnTable::MessageField_ProducerId >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::MessageField_Key [GOOD] >> Transfer::MessageField_Key_Empty >> Replication::PauseAndResumeReplication [GOOD] >> Replication::TopicAutopartitioning >> Transfer::MessageField_Key_Empty [GOOD] >> Transfer::ErrorInMultiLine >> test_workload.py::TestYdbWorkload::test[row] >> Transfer_ColumnTable::MessageField_ProducerId [GOOD] >> Transfer_ColumnTable::MessageField_MessageGroupId >> Transfer::ErrorInMultiLine [GOOD] >> Transfer::ReadFromCDC_Remote >> KqpQuerySession::NoLocalAttach [GOOD] >> Transfer::ReadFromCDC_Remote [GOOD] >> Transfer::ReadFromCDC_Local >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_query_session/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest >> test_workload.py::TestDeltaProtocol::test >> Transfer::ReadFromCDC_Local [GOOD] >> Transfer::MessageField_CreateTimestamp_Remote >> Transfer_ColumnTable::MessageField_MessageGroupId [GOOD] >> Transfer_ColumnTable::MessageField_WriteTimestamp >> Transfer::MessageField_CreateTimestamp_Remote [GOOD] >> Transfer::MessageField_CreateTimestamp_Local |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbBackupWorkload::test >> Transfer_ColumnTable::MessageField_WriteTimestamp [GOOD] >> Transfer_ColumnTable::ProcessingJsonMessage >> Transfer::MessageField_CreateTimestamp_Local [GOOD] >> Transfer::MessageField_WriteTimestamp_Remote |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/topic_kafka/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/topic_kafka/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/topic_kafka/tests/py3test >> Transfer::MessageField_WriteTimestamp_Remote [GOOD] >> Transfer::MessageField_WriteTimestamp_Local |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::MessageField_WriteTimestamp_Local [GOOD] >> Transfer::MessageField_Attributes_Remote >> Transfer_ColumnTable::ProcessingJsonMessage [GOOD] >> Transfer_ColumnTable::ProcessingCDCMessage >> Transfer::MessageField_Attributes_Remote [GOOD] >> Transfer::MessageField_Attributes_Local >> Transfer::MessageField_Attributes_Local [GOOD] >> Transfer::MessageField_Partition_Remote >> Transfer::MessageField_Partition_Remote [GOOD] >> Transfer::MessageField_Partition_Local >> Transfer_ColumnTable::ProcessingCDCMessage [GOOD] >> Transfer_ColumnTable::ProcessingTargetTable >> test_workload.py::TestYdbMixedWorkload::test[column] [GOOD] >> Transfer::MessageField_Partition_Local [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/oltp_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/functional/unittest >> Transfer::MessageField_Partition_Local [GOOD] Test command err: DDL: CREATE TABLE `Table_13959826155455742117` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_13959826155455742117` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_13959826155455742117` FROM `Topic_13959826155455742117` TO `Table_13959826155455742117` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_13959826155455742117` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_13959826155455742117` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_13959826155455742117` ORDER BY `Message` Attempt=17 count=0 >>>>> Query: SELECT `Message` FROM `Table_13959826155455742117` ORDER BY `Message` Attempt=16 count=0 >>>>> Query: SELECT `Message` FROM `Table_13959826155455742117` ORDER BY `Message` Attempt=15 count=0 >>>>> Query: SELECT `Message` FROM `Table_13959826155455742117` ORDER BY `Message` Attempt=14 count=0 >>>>> Query: SELECT `Message` FROM `Table_13959826155455742117` ORDER BY `Message` Attempt=13 count=1 DDL: DROP TRANSFER `Transfer_13959826155455742117`; DDL: DROP TABLE `Table_13959826155455742117` DDL: DROP TOPIC `Topic_13959826155455742117` DDL: CREATE TABLE `Table_10044219133199560855` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10044219133199560855` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_10044219133199560855` FROM `Topic_10044219133199560855` TO `Table_10044219133199560855` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:19821/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_10044219133199560855` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_10044219133199560855` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_10044219133199560855` ORDER BY `Message` Attempt=17 count=0 >>>>> Query: SELECT `Message` FROM `Table_10044219133199560855` ORDER BY `Message` Attempt=16 count=0 >>>>> Query: SELECT `Message` FROM `Table_10044219133199560855` ORDER BY `Message` Attempt=15 count=0 >>>>> Query: SELECT `Message` FROM `Table_10044219133199560855` ORDER BY `Message` Attempt=14 count=0 >>>>> Query: SELECT `Message` FROM `Table_10044219133199560855` ORDER BY `Message` Attempt=13 count=1 DDL: DROP TRANSFER `Transfer_10044219133199560855`; DDL: DROP TABLE `Table_10044219133199560855` DDL: DROP TOPIC `Topic_10044219133199560855` DDL: CREATE TOPIC `Topic_6567846116081321767` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; ; CREATE TRANSFER `Transfer_6567846116081321767` FROM `Topic_6567846116081321767` TO `Table_6567846116081321767` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:19821/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: [ {
: Error: Executing ESchemeOpCreateTransfer, code: 2003 subissue: {
: Error: Path does not exist, code: 2003 } } {
: Error: Query invalidated on scheme/internal error during Scheme execution, code: 2019 } ] >>>>> EXPECTED: Path does not exist DDL: DROP TOPIC `Topic_6567846116081321767` DDL: CREATE TABLE `Table_11258709932585369362` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_11258709932585369362` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: 1, Message:CAST("Message-1" AS Utf8) |> ]; }; CREATE TRANSFER Transfer_11258709932585369362 FROM Topic_11258709932585369362 TO Table_11258709932585369362 USING $l WITH ( CONNECTION_STRING = "grp§c://localhost:2135/?database=/Root" ) >>>>> ACTUAL: {
: Error: Discovery error: /Root/Topic_11258709932585369362: TRANSPORT_UNAVAILABLE ([ {
: Error: GRpc error: (14): DNS resolution failed for grp§c://localhost:2135: C-ares status is not ARES_SUCCESS qtype=A name=grp§c://localhost:2135 is_balancer=0: Misformatted domain name } {
: Error: Grpc error response on endpoint grp§c://localhost:2135 } ]) } >>>>> EXPECTED: DNS resolution failed for grp§c://localhost:2135 DDL: DROP TRANSFER `Transfer_11258709932585369362`; DDL: DROP TABLE `Table_11258709932585369362` DDL: DROP TOPIC `Topic_11258709932585369362` DDL: CREATE TABLE `Table_250527265197096273` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_250527265197096273` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: 1, Message:CAST("Message-1" AS Utf8) |> ]; }; CREATE TRANSFER Transfer_250527265197096273 FROM Topic_250527265197096273 TO Table_250527265197096273 USING $l WITH ( CONNECTION_STRING = "grpc://domain-not-exists-localhost.com.moc:2135/?database=/Root" ) >>>>> ACTUAL: {
: Error: Discovery error: /Root/Topic_250527265197096273: TRANSPORT_UNAVAILABLE ([ {
: Error: GRpc error: (14): DNS resolution failed for domain-not-exists-localhost.com.moc:2135: C-ares status is not ARES_SUCCESS qtype=A name=domain-not-exists-localhost.com.moc is_balancer=0: Domain name not found } {
: Error: Grpc error response on endpoint domain-not-exists-localhost.com.moc:2135 } ]) } >>>>> EXPECTED: Grpc error response on endpoint domain-not-exists-localhost.com.moc:2135 DDL: DROP TRANSFER `Transfer_250527265197096273`; DDL: DROP TABLE `Table_250527265197096273` DDL: DROP TOPIC `Topic_250527265197096273` DDL: CREATE USER u8176 DDL: GRANT 'ydb.granular.create_table', 'ydb.granular.create_queue' ON `/local` TO `u8176@builtin` DDL: CREATE TABLE `Table_15494503196429751371` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: GRANT 'ydb.generic.write', 'ydb.generic.read' ON `/local/Table_15494503196429751371` TO `u8176@builtin` DDL: CREATE TOPIC `Topic_15494503196429751371` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT ALL ON `/local/Topic_15494503196429751371` TO `u8176@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15494503196429751371` FROM `Topic_15494503196429751371` TO `Table_15494503196429751371` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:19821/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); DDL: DROP TOPIC `Topic_15494503196429751371` DDL: DROP TRANSFER `Transfer_15494503196429751371`; DDL: CREATE USER u16260 DDL: GRANT 'ydb.granular.create_table', 'ydb.granular.create_queue' ON `/local` TO `u16260@builtin` DDL: CREATE TABLE `Table_308236398761391086` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: GRANT 'ydb.generic.read' ON `/local/Table_308236398761391086` TO `u16260@builtin` DDL: CREATE TOPIC `Topic_308236398761391086` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT ALL ON `/local/Topic_308236398761391086` TO `u16260@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_308236398761391086` FROM `Topic_308236398761391086` TO `Table_308236398761391086` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:19821/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Executing ESchemeOpCreateTransfer, code: 2018 subissue: {
: Error: Access denied for scheme request, code: 2018 subissue: {
: Error: Access denied. } } } >>>>> EXPECTED: Access denied for scheme request DDL: DROP TOPIC `Topic_308236398761391086` DDL: CREATE USER u64497 DDL: CREATE TABLE `Table_15381375295137588691` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_15381375295137588691` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT 'ydb.generic.read' ON `/local/Topic_15381375295137588691` TO `u64497@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) ... e_11266626349850158343` DDL: CREATE TABLE `Table_6613317715355517959` ( Offset Uint64 NOT NULL, CreateTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_6613317715355517959` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), CreateTimestamp:$x._create_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_6613317715355517959` FROM `Topic_6613317715355517959` TO `Table_6613317715355517959` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:19821/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `CreateTimestamp` FROM `Table_6613317715355517959` ORDER BY `CreateTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_6613317715355517959` ORDER BY `CreateTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_6613317715355517959` ORDER BY `CreateTimestamp` Attempt=17 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_6613317715355517959` ORDER BY `CreateTimestamp` Attempt=16 count=1 DDL: DROP TRANSFER `Transfer_6613317715355517959`; DDL: DROP TABLE `Table_6613317715355517959` DDL: DROP TOPIC `Topic_6613317715355517959` DDL: CREATE TABLE `Table_10721894561534752103` ( Offset Uint64 NOT NULL, CreateTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10721894561534752103` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), CreateTimestamp:$x._create_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_10721894561534752103` FROM `Topic_10721894561534752103` TO `Table_10721894561534752103` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `CreateTimestamp` FROM `Table_10721894561534752103` ORDER BY `CreateTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_10721894561534752103` ORDER BY `CreateTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_10721894561534752103` ORDER BY `CreateTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_10721894561534752103`; DDL: DROP TABLE `Table_10721894561534752103` DDL: DROP TOPIC `Topic_10721894561534752103` DDL: CREATE TABLE `Table_16983140097102600904` ( Offset Uint64 NOT NULL, WriteTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16983140097102600904` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), WriteTimestamp:$x._write_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_16983140097102600904` FROM `Topic_16983140097102600904` TO `Table_16983140097102600904` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:19821/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `WriteTimestamp` FROM `Table_16983140097102600904` ORDER BY `WriteTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_16983140097102600904` ORDER BY `WriteTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_16983140097102600904` ORDER BY `WriteTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_16983140097102600904`; DDL: DROP TABLE `Table_16983140097102600904` DDL: DROP TOPIC `Topic_16983140097102600904` DDL: CREATE TABLE `Table_7358802786002922948` ( Offset Uint64 NOT NULL, WriteTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_7358802786002922948` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), WriteTimestamp:$x._write_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_7358802786002922948` FROM `Topic_7358802786002922948` TO `Table_7358802786002922948` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `WriteTimestamp` FROM `Table_7358802786002922948` ORDER BY `WriteTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_7358802786002922948` ORDER BY `WriteTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_7358802786002922948` ORDER BY `WriteTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_7358802786002922948`; DDL: DROP TABLE `Table_7358802786002922948` DDL: DROP TOPIC `Topic_7358802786002922948` DDL: CREATE TABLE `Table_14768906586558218771` ( Offset Uint64 NOT NULL, Value Utf8, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_14768906586558218771` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), Value:CAST($x._attributes['attribute_key'] AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_14768906586558218771` FROM `Topic_14768906586558218771` TO `Table_14768906586558218771` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:19821/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Value` FROM `Table_14768906586558218771` ORDER BY `Value` Attempt=19 count=0 >>>>> Query: SELECT `Value` FROM `Table_14768906586558218771` ORDER BY `Value` Attempt=18 count=0 >>>>> Query: SELECT `Value` FROM `Table_14768906586558218771` ORDER BY `Value` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_14768906586558218771`; DDL: DROP TABLE `Table_14768906586558218771` DDL: DROP TOPIC `Topic_14768906586558218771` DDL: CREATE TABLE `Table_16148039605888462547` ( Offset Uint64 NOT NULL, Value Utf8, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16148039605888462547` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), Value:CAST($x._attributes['attribute_key'] AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16148039605888462547` FROM `Topic_16148039605888462547` TO `Table_16148039605888462547` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Value` FROM `Table_16148039605888462547` ORDER BY `Value` Attempt=19 count=0 >>>>> Query: SELECT `Value` FROM `Table_16148039605888462547` ORDER BY `Value` Attempt=18 count=0 >>>>> Query: SELECT `Value` FROM `Table_16148039605888462547` ORDER BY `Value` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_16148039605888462547`; DDL: DROP TABLE `Table_16148039605888462547` DDL: DROP TOPIC `Topic_16148039605888462547` DDL: CREATE TABLE `Table_3107171489919513922` ( Partition Uint32 NOT NULL, Message Utf8, PRIMARY KEY (Partition) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_3107171489919513922` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Partition:CAST($x._partition AS Uint32), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_3107171489919513922` FROM `Topic_3107171489919513922` TO `Table_3107171489919513922` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:19821/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Partition`, `Message` FROM `Table_3107171489919513922` ORDER BY `Partition`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_3107171489919513922` ORDER BY `Partition`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_3107171489919513922` ORDER BY `Partition`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_3107171489919513922`; DDL: DROP TABLE `Table_3107171489919513922` DDL: DROP TOPIC `Topic_3107171489919513922` DDL: CREATE TABLE `Table_14270141145248297154` ( Partition Uint32 NOT NULL, Message Utf8, PRIMARY KEY (Partition) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_14270141145248297154` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Partition:CAST($x._partition AS Uint32), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_14270141145248297154` FROM `Topic_14270141145248297154` TO `Table_14270141145248297154` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Partition`, `Message` FROM `Table_14270141145248297154` ORDER BY `Partition`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_14270141145248297154` ORDER BY `Partition`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_14270141145248297154` ORDER BY `Partition`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_14270141145248297154`; DDL: DROP TABLE `Table_14270141145248297154` DDL: DROP TOPIC `Topic_14270141145248297154` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/functional/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/functional/unittest >> Transfer_ColumnTable::ProcessingTargetTable [GOOD] >> Transfer_ColumnTable::ProcessingTargetTableOtherType |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/mixedpy/tests/py3test >> test_workload.py::TestYdbMixedWorkload::test[column] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/mixedpy/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/mixedpy/tests/py3test >> test_workload.py::TestYdbWorkload::test[column-remote] [GOOD] >> Transfer_ColumnTable::ProcessingTargetTableOtherType [GOOD] >> Transfer_ColumnTable::DropColumn |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/transfer/tests/py3test >> test_workload.py::TestYdbWorkload::test[column-remote] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/transfer/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/transfer/tests/py3test >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> Transfer_ColumnTable::DropColumn [GOOD] >> Transfer_ColumnTable::BigBatchSize_Remote >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_indexes/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test[column] >> Transfer_ColumnTable::BigBatchSize_Remote [GOOD] >> Transfer_ColumnTable::BigBatchSize_Local >> test_arrow_workload.py::TestResultSetArrowWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/ctas/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/ctas/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/ctas/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/result_set_format/tests/py3test >> test_arrow_workload.py::TestResultSetArrowWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/result_set_format/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/result_set_format/tests/py3test >> Transfer_ColumnTable::BigBatchSize_Local [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/column_table/unittest >> Transfer_ColumnTable::BigBatchSize_Local [GOOD] Test command err: DDL: CREATE TABLE `Table_11840203997472525321` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_11840203997472525321` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_11840203997472525321` FROM `Topic_11840203997472525321` TO `Table_11840203997472525321` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:65443/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_11840203997472525321` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11840203997472525321` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11840203997472525321` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11840203997472525321` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11840203997472525321` ORDER BY `Key`, `Message` Attempt=15 count=1 DDL: DROP TRANSFER `Transfer_11840203997472525321`; DDL: DROP TABLE `Table_11840203997472525321` DDL: DROP TOPIC `Topic_11840203997472525321` DDL: CREATE TABLE `Table_13626560296218758955` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_13626560296218758955` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_13626560296218758955` FROM `Topic_13626560296218758955` TO `Table_13626560296218758955` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:65443/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_13626560296218758955` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13626560296218758955` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13626560296218758955` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_13626560296218758955`; DDL: DROP TABLE `Table_13626560296218758955` DDL: DROP TOPIC `Topic_13626560296218758955` DDL: CREATE TABLE `Table_8107880098600784984` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, ___Value3 Utf8, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_8107880098600784984` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key1:Unwrap(CAST(1 AS Uint64)), Key2:Unwrap(CAST(2 AS Uint64)), Value2:CAST("value-2" AS Utf8), Key4:Unwrap(CAST(4 AS Uint64)), Key3:Unwrap(CAST(3 AS Uint64)), Value1:CAST("value-1" AS Utf8), ___Value3:CAST("value-3" AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_8107880098600784984` FROM `Topic_8107880098600784984` TO `Table_8107880098600784984` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:65443/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_8107880098600784984` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_8107880098600784984` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=18 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_8107880098600784984` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_8107880098600784984`; DDL: DROP TABLE `Table_8107880098600784984` DDL: DROP TOPIC `Topic_8107880098600784984` DDL: CREATE TABLE `Table_222438517717892956` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_222438517717892956` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_222438517717892956` FROM `Topic_222438517717892956` TO `Table_222438517717892956` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:65443/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_222438517717892956` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_222438517717892956` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_222438517717892956` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_222438517717892956`; DDL: DROP TABLE `Table_222438517717892956` DDL: DROP TOPIC `Topic_222438517717892956` DDL: CREATE TABLE `Table_8651287799561290682` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_8651287799561290682` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:Unwrap(Nothing(Uint64?), "The value of the 'Key' column must be non-NULL"), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_8651287799561290682` FROM `Topic_8651287799561290682` TO `Table_8651287799561290682` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:65443/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(102): generated.sql:4:29: Failed to unwrap empty optional: The value of the 'Key' column must be non-NULL } >>>>> EXPECTED: The value of the 'Key' column must be non-NULL DDL: DROP TRANSFER `Transfer_8651287799561290682`; DDL: DROP TABLE `Table_8651287799561290682` DDL: DROP TOPIC `Topic_8651287799561290682` DDL: CREATE TABLE `Table_1542090883698149211` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_1542090883698149211` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:Unwrap(Nothing(Utf8?), "The value of the 'Message' column must be non-NULL") |> ]; }; ; CREATE TRANSFER `Transfer_1542090883698149211` FROM `Topic_1542090883698149211` TO `Table_1542090883698149211` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:65443/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(106): generated.sql:5:33: Failed to unwrap empty optional: The value of the 'Message' column must be non-NULL } >>>>> EXPECTED: The value of the 'Message' column must be non-NULL DDL: DROP TRANSFER `Transfer_1542090883698149211`; DDL: DROP TABLE `Table_1542090883698149211` DDL: DROP TOPIC `Topic_1542090883698149211` DDL: CREATE TABLE `Table_4411995111202434671` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_4411995111202434671` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4411995111202434671` FROM `Topic_4411995111202434671` TO `Table_4411995111202434671` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:65443/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_4411995111202434671` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_4411995111202434671` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_4411995111202434671` ORDER BY `Message` Attempt=17 count=1 >>>>> Query: SELECT `Message` FROM `Table_4411995111202434671` ORDER BY `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_4411995111202434671`; DDL: DROP TABLE `Table_4411995111202434671` DDL: CREATE TABLE `Table_16409357900692077361` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_16409357900692077361` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Messa ... ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------' |>; }; return ListMap($lines, $m); }; ; CREATE TRANSFER `Transfer_13310362766906959467` FROM `Topic_13310362766906959467` TO `Table_13310362766906959467` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 1073741824 ); >>>>> Query: SELECT `offset`, `line` FROM `Table_13310362766906959467` ORDER BY `offset`, `line` Attempt=19 count=1802 DDL: DROP TRANSFER `Transfer_13310362766906959467`; DDL: DROP TABLE `Table_13310362766906959467` DDL: DROP TOPIC `Topic_13310362766906959467` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/column_table/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/column_table/unittest >> test_workload.py::TestDeltaProtocol::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/streaming/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_workload.py::TestYdbBackupWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/node_broker/tests/py3test >> test_workload.py::TestDeltaProtocol::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/streaming/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/node_broker/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/node_broker/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/streaming/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/backup/tests/py3test >> test_workload.py::TestYdbBackupWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/backup/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/backup/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/olap_workload/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD, SKIPPED} ydb/tests/stress/simple_queue/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test >> Replication::TopicAutopartitioning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::TopicAutopartitioning [GOOD] Test command err: DDL: CREATE TABLE `SourceTable_10644994518423083477` ( Key Uint32, Key2 Uuid, v01 Uuid, v02 Uuid NOT NULL, v03 Double, PRIMARY KEY (Key, Key2) ); >>>>> Query: UPSERT INTO `SourceTable_10644994518423083477` (Key,Key2,v01,v02,v03) VALUES ( 1, CAST("00078af5-0000-0000-6c0b-040000000000" as Uuid), CAST("00078af5-0000-0000-6c0b-040000000001" as Uuid), UNWRAP(CAST("00078af5-0000-0000-6c0b-040000000002" as Uuid)), CAST("311111111113.222222223" as Double) ); DDL: CREATE ASYNC REPLICATION `Replication_10644994518423083477` FOR `SourceTable_10644994518423083477` AS `Table_10644994518423083477` WITH ( CONNECTION_STRING = 'grpc://localhost:4942/?database=local' ); >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_10644994518423083477` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_10644994518423083477]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_10644994518423083477` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_10644994518423083477]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=18 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_10644994518423083477` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=17 count=1 DDL: DROP ASYNC REPLICATION `Replication_10644994518423083477`; DDL: DROP TABLE `SourceTable_10644994518423083477` DDL: CREATE TABLE `SourceTable_10611671779680147785` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_10611671779680147785` FOR `SourceTable_10611671779680147785` AS `Table_10611671779680147785` WITH ( CONNECTION_STRING = 'grpc://localhost:4942/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_10611671779680147785` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_10611671779680147785` ORDER BY `Message` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_10611671779680147785]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Message` FROM `Table_10611671779680147785` ORDER BY `Message` Attempt=18 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_10611671779680147785` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_10611671779680147785` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_10611671779680147785` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_10611671779680147785` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_10611671779680147785` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_10611671779680147785` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_10611671779680147785` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_10611671779680147785` SET ( STATE = "StandBy" ); DDL: DROP ASYNC REPLICATION `Replication_10611671779680147785`; DDL: DROP TABLE `SourceTable_10611671779680147785` DDL: CREATE TABLE `SourceTable_17356922028930930823` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( AUTO_PARTITIONING_BY_SIZE = ENABLED, AUTO_PARTITIONING_PARTITION_SIZE_MB = 1, AUTO_PARTITIONING_BY_LOAD = ENABLED, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, AUTO_PARTITIONING_MAX_PARTITIONS_COUNT = 10, UNIFORM_PARTITIONS = 1 ); DDL: CREATE ASYNC REPLICATION `Replication_17356922028930930823` FOR `SourceTable_17356922028930930823` AS `Table_17356922028930930823` WITH ( CONNECTION_STRING = 'grpc://localhost:4942/?database=local' ); Insert much data to trigger split >>>>> Query: UPSERT INTO `SourceTable_17356922028930930823` (Key, Message) VALUES (0, 'Message-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'), (4999, 'Message-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'); Wait for partition split and data replication Verify data replicated from all partitions >>>>> Query: SELECT `Message` FROM `Table_17356922028930930823` ORDER BY `Message` Attempt=19 count=5000 DDL: DROP ASYNC REPLICATION `Replication_17356922028930930823`; DDL: DROP TABLE `SourceTable_17356922028930930823` |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/replication/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 191 ydb/tests/functional/audit [size:medium] nchunks:500 ------ [test_canonical_records.py 5/100] chunk ran 1 test (total:90.28s - setup:0.01s test:90.18s) [fail] test_canonical_records.py::test_dstool_evict_vdisk_grpc [default-linux-x86_64-release-asan] (84.19s) ydb/tests/functional/audit/test_canonical_records.py:348: in test_dstool_evict_vdisk_grpc execute_dstool_grpc(ydb_cluster, TOKEN, ['vdisk', 'evict', '--vdisk-ids', vdisk_id, '--ignore-degraded-group-check', '--ignore-failure-model-group-check']) ydb/tests/functional/audit/helpers.py:60: in execute_dstool_grpc assert False, f'Command\n{full_cmd}\n finished with exit code {proc_result.exit_code}, stderr:\n\n{proc_result.std_err.decode("utf-8")}\n\nstdout:\n{proc_result.std_out.decode("utf-8")}' E AssertionError: Command E ['/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/apps/dstool/ydb-dstool', '--endpoint', 'grpc://localhost:4714', 'vdisk', 'evict', '--vdisk-ids', '[82000000:1:0:0:0]', '--ignore-degraded-group-check', '--ignore-failure-model-group-check'] E finished with exit code 1, stderr: E E error, add --verbose for more info E E E stdout: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_canonical_records.py.test_dstool_evict_vdisk_grpc.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff ------ FAIL: 33 - GOOD, 1 - FAIL ydb/tests/functional/audit ydb/tests/fq/streaming_optimize [size:medium] nchunks:8 ------ [test_sql_negative.py 0/4] chunk ran 1 test (total:31.09s - recipes:1.58s test:28.49s recipes:0.85s) [fail] test_sql_negative.py::test[watermarks-bad_column-default.txt] [default-linux-x86_64-release-asan] (23.92s) ydb/tests/fq/streaming_optimize/test_sql_negative.py:48: in test result = fq_run.yql_exec(check_error=False, action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_039z22fu/topic_3.txt' has failed with code 100. E Errors: E ...fee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b5567afee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b5567afed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b5567c55cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b5567c55cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b5567bd5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b5567bd5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b5567b59b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b5567afe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b5567afe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b5567afe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b5567c557c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b5567c555fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b556821d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_negative.py.test.watermarks-bad_column-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_negative.py 1/4] chunk ran 1 test (total:30.47s - setup:0.03s recipes:1.55s test:28.22s recipes:0.60s) [fail] test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [default-linux-x86_64-release-asan] (23.68s) ydb/tests/fq/streaming_optimize/test_sql_negative.py:48: in test result = fq_run.yql_exec(check_error=False, action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ehg5tv7h/topic_3.txt' has failed with code 100. E Errors: E ...bee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b273d8bee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b273d8bed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b273da15cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b273da15cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b273d995977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b273d995977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b273d919b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b273d8be36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b273d8be36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b273d8be272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b273da157c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b273da155fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b273dfdd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459771 byte(s) leaked in 8658 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_negative.py.test.watermarks-bad_pushdown-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 0/4] chunk ran 8 tests (total:159.00s - setup:0.03s recipes:0.97s test:157.42s recipes:0.46s) [fail] test_sql_streaming.py::test[hop-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (23.72s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d6mvrfh2/topic_3.txt' has failed with code 100. E Errors: E ...cee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b86eabcee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b86eabced08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b86ead25cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b86ead25cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b86eaca5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b86eaca5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b86eac29b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b86eabce36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b86eabce36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b86eabce272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b86ead257c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b86ead255fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b86eb2ed0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459773 byte(s) leaked in 8658 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (19.52s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coo6hj3s/topic_3.txt' has failed with code 100. E Errors: E ...eee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b52842eee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b52842eed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b5284445cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b5284445cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b52843c5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b52843c5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b5284349b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b52842ee36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b52842ee36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b52842ee272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b52844457c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b52844455fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b5284a0d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (17.73s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1eq2z71m/topic_3.txt' has failed with code 100. E Errors: E ...cee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bf17d6cee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bf17d6ced08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bf17d825cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bf17d825cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bf17d7a5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bf17d7a5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bf17d729b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bf17d6ce36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bf17d6ce36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bf17d6ce272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bf17d8257c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bf17d8255fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bf17dded0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (18.03s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fi5r40db/topic_3.txt' has failed with code 100. E Errors: E ...9ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b30b909ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b30b909ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b30b91f5cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b30b91f5cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b30b9175977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b30b9175977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b30b90f9b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b30b909e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b30b909e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b30b909e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b30b91f57c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b30b91f55fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b30b97bd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (17.95s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4dlsd684/topic_3.txt' has failed with code 100. E Errors: E ...fee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b9e14ffee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b9e14ffed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b9e15155cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b9e15155cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b9e150d5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b9e150d5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b9e15059b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b9e14ffe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b9e14ffe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b9e14ffe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b9e151557c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b9e151555fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b9e1571d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (17.23s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o__n_3vk/topic_3.txt' has failed with code 100. E Errors: E ...3ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b748d83ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b748d83ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b748d995cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b748d995cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b748d915977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b748d915977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b748d899b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b748d83e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b748d83e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b748d83e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b748d9957c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b748d9955fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b748df5d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (19.11s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d2101zpp/topic_3.txt' has failed with code 100. E Errors: E ...eee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b76da0eee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b76da0eed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b76da245cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b76da245cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b76da1c5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b76da1c5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b76da149b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b76da0ee36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b76da0ee36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b76da0ee272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b76da2457c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b76da2455fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b76da80d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (18.44s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2_813ft6/topic_3.txt' has failed with code 100. E Errors: E ...aee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b84062aee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b84062aed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b8406405cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b8406405cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b8406385977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b8406385977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b8406309b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b84062ae36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b84062ae36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b84062ae272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b84064057c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b84064055fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b84069cd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 8 tests (total:156.84s - recipes:1.54s test:154.64s recipes:0.49s) [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (23.92s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_deof74mp/topic_3.txt' has failed with code 100. E Errors: E ...2ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b4933d2ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b4933d2ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b4933e85cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b4933e85cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b4933e05977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b4933e05977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b4933d89b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b4933d2e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b4933d2e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b4933d2e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b4933e857c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b4933e855fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b493444d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459770 byte(s) leaked in 8658 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (18.33s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qsnlvb5q/topic_3.txt' has failed with code 100. E Errors: E ...fee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b37579fee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b37579fed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b3757b55cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b3757b55cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b3757ad5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b3757ad5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b3757a59b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b37579fe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b37579fe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b37579fe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b3757b557c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b3757b555fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b375811d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (17.61s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kpzvplw8/topic_3.txt' has failed with code 100. E Errors: E ...bee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b0e94abee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b0e94abed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b0e94c15cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b0e94c15cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b0e94b95977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b0e94b95977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b0e94b19b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b0e94abe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b0e94abe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b0e94abe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b0e94c157c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b0e94c155fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b0e951dd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (17.12s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frzwlbam/topic_3.txt' has failed with code 100. E Errors: E ...aee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b798e8aee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b798e8aed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b798ea05cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b798ea05cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b798e985977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b798e985977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b798e909b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b798e8ae36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b798e8ae36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b798e8ae272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b798ea057c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b798ea055fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b798efcd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (17.36s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_j7l0j4lz/topic_3.txt' has failed with code 100. E Errors: E ...eee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7be1337eee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7be1337eed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7be133945cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7be133945cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7be1338c5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7be1338c5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7be133849b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7be1337ee36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7be1337ee36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7be1337ee272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7be1339457c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7be1339455fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7be133f0d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459681 byte(s) leaked in 8656 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (17.43s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7cxni3ye/topic_3.txt' has failed with code 100. E Errors: E ...7ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bd968e7ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bd968e7ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bd968fd5cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bd968fd5cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bd968f55977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bd968f55977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bd968ed9b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bd968e7e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bd968e7e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bd968e7e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bd968fd57c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bd968fd55fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bd96959d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (18.96s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0yuxz621/topic_3.txt' has failed with code 100. E Errors: E ...dee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bbf739dee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bbf739ded08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bbf73b35cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bbf73b35cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bbf73ab5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bbf73ab5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bbf73a39b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bbf739de36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bbf739de36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bbf739de272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bbf73b357c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bbf73b355fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bbf740fd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (18.11s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xvl6ydr7/topic_3.txt' has failed with code 100. E Errors: E ...3ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b5897b3ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b5897b3ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b5897c95cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b5897c95cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b5897c15977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b5897c15977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b5897b99b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b5897b3e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b5897b3e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b5897b3e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b5897c957c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b5897c955fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b589825d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 8 tests (total:154.98s - setup:0.01s recipes:1.24s test:153.06s recipes:0.52s) [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (23.06s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_inh5ri6j/topic_3.txt' has failed with code 100. E Errors: E ...6ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b295de6ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b295de6ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b295dfc5cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b295dfc5cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b295df45977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b295df45977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b295dec9b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b295de6e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b295de6e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b295de6e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b295dfc57c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b295dfc55fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b295e58d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (18.97s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u1wque7i/topic_3.txt' has failed with code 100. E Errors: E ...7ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b57f497ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b57f497ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b57f4ad5cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b57f4ad5cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b57f4a55977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b57f4a55977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b57f49d9b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b57f497e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b57f497e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b57f497e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b57f4ad57c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b57f4ad55fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b57f509d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (17.35s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_h0ba1vfr/topic_3.txt' has failed with code 100. E Errors: E ...2ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bdda202ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bdda202ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bdda2185cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bdda2185cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bdda2105977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bdda2105977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bdda2089b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bdda202e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bdda202e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bdda202e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bdda21857c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bdda21855fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bdda274d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (16.89s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_icchghso/topic_3.txt' has failed with code 100. E Errors: E ...fee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b6f198fee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b6f198fed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b6f19a55cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b6f19a55cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b6f199d5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b6f199d5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b6f19959b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b6f198fe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b6f198fe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b6f198fe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b6f19a557c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b6f19a555fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b6f1a01d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 454845 byte(s) leaked in 8563 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (17.41s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8usjv7_x/topic_3.txt' has failed with code 100. E Errors: E ...bee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7ba4fd6bee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7ba4fd6bed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7ba4fd815cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7ba4fd815cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7ba4fd795977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7ba4fd795977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7ba4fd719b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7ba4fd6be36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7ba4fd6be36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7ba4fd6be272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7ba4fd8157c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7ba4fd8155fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7ba4fdddd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (16.99s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ra0s2w3e/topic_3.txt' has failed with code 100. E Errors: E ...fee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bcb502fee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bcb502fed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bcb50455cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bcb50455cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bcb503d5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bcb503d5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bcb50359b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bcb502fe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bcb502fe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bcb502fe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bcb504557c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bcb504555fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bcb50a1d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (18.20s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_viz0xl61/topic_3.txt' has failed with code 100. E Errors: E ...fee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bf6730fee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bf6730fed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bf673255cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bf673255cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bf6731d5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bf6731d5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bf673159b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bf6730fe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bf6730fe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bf6730fe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bf6732557c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bf6732555fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bf67381d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459085 byte(s) leaked in 8645 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (18.28s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n_f2ti35/topic_3.txt' has failed with code 100. E Errors: E ...8ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b0fb3c8ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b0fb3c8ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b0fb3de5cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b0fb3de5cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b0fb3d65977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b0fb3d65977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b0fb3ce9b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b0fb3c8e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b0fb3c8e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b0fb3c8e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b0fb3de57c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b0fb3de55fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b0fb43ad0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:137.18s - recipes:1.08s test:135.31s recipes:0.52s) [fail] test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (22.24s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l8wewqz2/topic_3.txt' has failed with code 100. E Errors: E ...bee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b299fcbee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b299fcbed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b299fe15cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b299fe15cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b299fd95977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b299fd95977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b299fd19b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b299fcbe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b299fcbe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b299fcbe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b299fe157c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b299fe155fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b29a03dd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (19.22s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9n4wp630/topic_3.txt' has failed with code 100. E Errors: E ...6ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bd4e726ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bd4e726ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bd4e73c5cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bd4e73c5cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bd4e7345977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bd4e7345977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bd4e72c9b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bd4e726e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bd4e726e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bd4e726e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bd4e73c57c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bd4e73c55fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bd4e798d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (18.14s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_df1coyzv/topic_3.txt' has failed with code 100. E Errors: E ...9ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b89cf39ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b89cf39ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b89cf4f5cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b89cf4f5cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b89cf475977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b89cf475977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b89cf3f9b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b89cf39e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b89cf39e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b89cf39e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b89cf4f57c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b89cf4f55fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b89cfabd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.solomon-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks-default.txt] [default-linux-x86_64-release-asan] (16.98s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_01y3v3yv/topic_3.txt' has failed with code 100. E Errors: E ...fee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b24ea6fee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b24ea6fed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b24ea855cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b24ea855cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b24ea7d5977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b24ea7d5977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b24ea759b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b24ea6fe36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b24ea6fe36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b24ea6fe272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b24ea8557c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b24ea8555fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b24eae1d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [default-linux-x86_64-release-asan] (17.24s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_twe25vy1/topic_3.txt' has failed with code 100. E Errors: E ...aee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bcb225aee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bcb225aed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bcb22705cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bcb22705cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bcb22685977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bcb22685977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bcb22609b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bcb225ae36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bcb225ae36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bcb225ae272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bcb227057c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bcb227055fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bcb22ccd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_adjust-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [default-linux-x86_64-release-asan] (16.43s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_lzszc0qo/topic_3.txt' has failed with code 100. E Errors: E ...9ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b5c33b9ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b5c33b9ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b5c33cf5cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b5c33cf5cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b5c33c75977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b5c33c75977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b5c33bf9b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b5c33b9e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b5c33b9e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b5c33b9e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b5c33cf57c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b5c33cf55fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b5c342bd0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459773 byte(s) leaked in 8658 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_as-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [default-linux-x86_64-release-asan] (18.65s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c5doyxu2/topic_3.txt' has failed with code 100. E Errors: E ...2ee9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b072fd2ee9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b072fd2ed08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b072fe85cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b072fe85cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b072fe05977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b072fe05977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b072fd89b35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b072fd2e36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b072fd2e36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b072fd2e272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b072fe857c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b072fe855fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b073044d0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_drop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 33 - FAIL ydb/tests/fq/streaming_optimize ydb/tests/functional/dstool [size:medium] ------ sole chunk ran 4 tests (total:221.90s - setup:0.02s test:221.64s canon:0.10s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.3G (14992624K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 732428 54.8M 54.8M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 732630 40.6M 23.3M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 732688 838M 841M 761M └─ ydb-tests-functional-dstool --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 788435 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 806110 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 788859 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 789455 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 806177 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 789987 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 806124 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 790566 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 806125 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 791118 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 806114 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 791791 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 806172 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 792365 1.8G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 806170 1.8G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff/stderr [fail] test_canonical_requests.py::Test::test_essential [default-linux-x86_64-release-asan] (100.95s) ydb/tests/functional/dstool/test_canonical_requests.py:152: in test_essential self._wait_pdisk_metrics_collected() ydb/tests/functional/dstool/test_canonical_requests.py:86: in _wait_pdisk_metrics_collected wait_for_and_assert(all_pdisk_metrics_collected, is_(True), message='All pdisk metrics collected') ydb/tests/library/common/wait_for.py:73: in wait_for_and_assert assert_that(result, matcher, message) E AssertionError: All pdisk metrics collected E Expected: E but: was Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff/test_canonical_requests.py.Test.test_essential.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff ------ FAIL: 3 - GOOD, 1 - FAIL ydb/tests/functional/dstool ydb/core/quoter/ut [size:medium] ------ sole chunk ran 38 tests (total:204.50s - setup:0.05s test:204.29s) [fail] QuoterWithKesusTest::AllocationStatistics [default-linux-x86_64-release-asan] (12.87s) assertion failed at ydb/core/quoter/kesus_quoter_ut.cpp:152, auto NKikimr::NTestSuiteQuoterWithKesusTest::TTestCaseAllocationStatistics::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (allocated > prevValue) { ResourceCounters { ResourcePath: "Resource" } ResourceCounters { ResourcePath: "root" } ResourceCounters { ResourcePath: "root/0" } ResourceCounters { ResourcePath: "root/1" } ResourceCounters { ResourcePath: "root/2" } ResourceCounters { ResourcePath: "root/3" } ResourceCounters { ResourcePath: "root/4" } } TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:13 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.AllocationStatistics.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.AllocationStatistics.out ------ FAIL: 37 - GOOD, 1 - FAIL ydb/core/quoter/ut ------ [0/10] chunk ran 2 tests (total:462.86s - test:462.45s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 20.2G (21192564K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 716971 54.8M 54.7M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 717117 40.8M 23.4M 10.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 717128 1.0G 1.0G 974M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 718987 2.2G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 719231 2.1G 2.1G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 719732 2.1G 2.1G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 720573 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 721235 2.3G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 721937 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 722769 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 723289 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 724045 2.1G 2.0G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [2/10] chunk ran 2 tests (total:321.56s - test:321.40s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 22.1G (23128768K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 716499 54.9M 54.9M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 716841 40.8M 23.6M 10.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 716876 906M 910M 809M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 718149 2.3G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 718533 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 718941 2.3G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 719147 2.3G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 719533 2.4G 2.4G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 720161 2.3G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 720985 2.1G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 825956 2.1G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 721613 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 722346 2.3G 2.3G 1.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [3/10] chunk ran 2 tests (total:172.63s - test:172.26s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 16.1G (16834676K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 659661 54.9M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 659975 40.3M 22.3M 10.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 660021 1.0G 1.0G 964M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 662609 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 662771 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 663190 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 699097 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 663578 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 664014 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 664305 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 664684 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 664956 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 665280 1.7G 1.5G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [4/10] chunk ran 2 tests (total:315.04s - setup:0.18s test:314.13s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 17.2G (17997992K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 15461 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 15611 39.5M 22.2M 9.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 15657 797M 785M 707M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 17806 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 18893 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 19867 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 20744 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 21308 1.8G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 22370 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 23675 1.8G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 25099 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 25869 1.8G 1.8G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [5/10] chunk ran 8 tests (total:123.16s - test:122.99s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 17.4G (18222260K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 42758 54.8M 54.4M 7.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 43036 40.2M 23.1M 10.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 43040 802M 779M 701M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 45346 2.0G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 67430 2.0G 2.0G 1.4G │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/c 45502 2.0G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 67299 2.0G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/c 45675 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 46030 1.8G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 67377 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/c 46287 2.1G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 67358 2.1G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/c 46537 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 46941 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 67320 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/c 47085 2.0G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 67321 2.0G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/c 47249 2.0G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chun 67367 2.0G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/c Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ydb/core/fq/libs/row_dispatcher/format_handler/ut [size:medium] ------ sole chunk ran 53 tests (total:329.84s - setup:0.05s test:329.28s) [fail] TestPurecalcFilter::Simple2 [default-linux-x86_64-release-asan] (43.16s) (TWithBackTrace) Event queue is still empty.ydb/library/actors/testlib/test_runtime.cpp:1391: 0. /-S/util/system/backtrace.cpp:284: ?? @ 0xF3EE92B 1. /-S/util/generic/yexception.h:145: TWithBackTrace<> @ 0x2D216250 2. /tmp//-S/ydb/library/actors/testlib/test_runtime.cpp:1391: DispatchEventsInternal @ 0x2D2120D1 3. /tmp//-S/ydb/library/actors/testlib/test_runtime.cpp:1570: WaitForEdgeEvents @ 0x2D218573 4. /-S/ydb/library/actors/testlib/test_runtime.h:506: GrabEdgeEventIf @ 0xEF9D463 5. /-S/ydb/library/actors/testlib/test_runtime.h:555: GrabEdgeEvent @ 0xEF9CFEF 6. /-S/ydb/library/actors/testlib/test_runtime.h:561: GrabEdgeEvent @ 0xEF9CFEF 7. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:218: CompileProgram @ 0xEF8A42C 8. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:157: MakeProgram @ 0xEF8A42C 9. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:374: Execute_ @ 0xEF08AC6 10. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:324: operator() @ 0xEF92627 11. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:324:1) &> @ 0xEF92627 12. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:324:1) &> @ 0xEF92627 13. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0xEF92627 14. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0xEF92627 15. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0xF90F149 16. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0xF90F149 17. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0xF90F149 18. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0xF8E69C7 19. /tmp//-S/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp:324: Execute @ 0xEF9197C 20. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0xF8E817F 21. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0xF908FAC 22. ??:0: ?? @ 0x7F70EB3C4D8F 23. ??:0: ?? @ 0x7F70EB3C4E3F 24. ??:0: ?? @ 0xC8A3028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/fq/libs/row_dispatcher/format_handler/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/fq/libs/row_dispatcher/format_handler/ut/test-results/unittest/testing_out_stuff/TestPurecalcFilter.Simple2.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/fq/libs/row_dispatcher/format_handler/ut/test-results/unittest/testing_out_stuff/TestPurecalcFilter.Simple2.out ------ FAIL: 52 - GOOD, 1 - FAIL ydb/core/fq/libs/row_dispatcher/format_handler/ut ydb/services/cms/ut [size:medium] nchunks:10 ------ [0/10] chunk ran 1 test (total:11.69s - test:11.66s) [crashed] TGRpcCmsTest::AlterRemoveTest [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/cms/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/cms/ut/test-results/unittest/testing_out_stuff/TGRpcCmsTest.AlterRemoveTest.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/cms/ut/test-results/unittest/testing_out_stuff/TGRpcCmsTest.AlterRemoveTest.out ------ FAIL: 6 - GOOD, 1 - CRASHED ydb/services/cms/ut ydb/core/driver_lib/run/ut nchunks:10 ------ [4/10] chunk ran 0 tests (total:31.17s - test:31.13s) Cannot obtain list of ut tests in the allotted time ('ut --list-verbose' worked longer than 30s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/driver_lib/run/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/driver_lib/run/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/driver_lib/run/ut/test-results/unittest/testing_out_stuff/stderr ------ [6/10] chunk ran 0 tests (total:31.08s - test:31.05s) Cannot obtain list of ut tests in the allotted time ('ut --list-verbose' worked longer than 30s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/driver_lib/run/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/driver_lib/run/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/driver_lib/run/ut/test-results/unittest/testing_out_stuff/stderr ------ TIMEOUT: 10 - GOOD ydb/core/driver_lib/run/ut ydb/core/viewer/tests [size:medium] ------ sole chunk ran 51 tests (total:91.07s - test:90.18s canon:0.58s) [fail] test.py::TestViewer::test_viewer_tenantinfo [default-linux-x86_64-release-asan] (0.04s) Test results differ from canonical: test_result['TenantInfo'][0]['GrpcRequestBytes']: extra key with value '674', test_result['TenantInfo'][0]['GrpcResponseBytes']: extra key with value '126' test_result['TenantInfo'][1]['Id']: - 6 + 42 test_result['TenantInfo'][2]['Id']: - 8 + 44 test_result['TenantInfo'][2]['ResourceId']: - 7 + 43 test_result['TenantInfo'][3]['Id']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_tenantinfo.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_viewer_tenantinfo_db [default-linux-x86_64-release-asan] (0.18s) Test results differ from canonical: test_result['/Root']['TenantInfo'][0]['GrpcRequestBytes']: extra key with value '674', test_result['/Root']['TenantInfo'][0]['GrpcResponseBytes']: extra key with value '126' test_result['/Root/dedicated_db']['TenantInfo'][0]['Id']: - 6 + 42 test_result['/Root/serverless_db']['TenantInfo'][0]['Id']: - 8 + 44 test_result['/Root/serverless_db']['TenantInfo'][0]['ResourceId']: - 7 + 43 test_result['/Root/shared_db']['TenantInfo'][0]['Id']: - 7 + 43 test_result['no-database']['TenantInfo'][0]['GrpcRequestBytes']: extra key with value '674', test_result['no-database']['TenantInfo'][0]['GrpcResponseBytes']: extra key with value '126' test_result['no-database']['TenantInfo'][1]['Id']: - 6 + 42 test_result['no-database']['TenantInfo'][2]['Id']: - 8 + 44 test_result['no-database']['TenantInfo'][2]['ResourceId']: - 7 + 43 test_result['no-database']['TenantInfo'][3]['Id']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_tenantinfo_db.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_scheme_directory [default-linux-x86_64-release-asan] (0.33s) Test results differ from canonical: test_result['1-get']['children'][3]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['1-get']['self']['owner']: - root + user1 test_result['3-get']['children'][4]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['3-get']['self']['owner']: - root + user1 test_result['5-get']['children'][3]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['5-get']['self']['owner']: - root + user1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_scheme_directory.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff ------ FAIL: 48 - GOOD, 3 - FAIL ydb/core/viewer/tests ydb/core/persqueue/public/describer/ut ------ sole chunk ran 4 tests (total:79.99s - test:60.10s) Chunk exceeded 60s timeout and was killed List of the tests involved in the launch: TDescriberTests::TopicNotTopic (timeout) duration: 36.51s TDescriberTests::TopicNotExists (good) duration: 19.22s TDescriberTests::TopicExists (good) duration: 11.96s TDescriberTests::CDC test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/stderr [timeout] TDescriberTests::TopicNotTopic [default-linux-x86_64-release-asan] (36.51s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/TDescriberTests.TopicNotTopic.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/TDescriberTests.TopicNotTopic.out ------ TIMEOUT: 2 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/persqueue/public/describer/ut ------ sole chunk ran 2 tests (total:555.21s - recipes:14.59s test:536.73s recipes:3.67s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 17.7G (18580312K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 957198 54.9M 54.7M 7.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 957411 40.7M 23.2M 10.5M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 962918 58.7M 58.7M 32.6M │ └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/test_tool.args 963229 2.1G 2.1G 2.1G │ └─ ydb-tests-functional-kqp-kqp_indexes --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/y 958113 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 958242 2.0G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 958571 2.0G 2.0G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 959152 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 959494 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 959675 1.9G 1.9G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 959954 1.9G 1.9G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 960322 2.0G 2.0G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/stderr ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 3/10] chunk ran 5 tests (total:647.07s - test:600.07s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] (good) duration: 200.85s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] (good) duration: 172.71s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] (good) duration: 162.95s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] (timeout) duration: 84.07s test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] (good) duration: 9.46s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/stderr [timeout] test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (84.07s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_column_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 4/10] chunk ran 4 tests (total:507.62s - test:505.26s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (43.63s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (125.54s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (110.87s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (215.35s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 9/10] chunk ran 4 tests (total:620.72s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] (good) duration: 249.81s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] (timeout) duration: 189.15s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] (good) duration: 173.39s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/stderr [timeout] test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (189.15s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--true-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ TIMEOUT: 37 - GOOD, 4 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/functional/serverless ydb/tests/olap/column_family/compression [size:medium] nchunks:10 ------ [1/10] chunk ran 3 tests (total:301.45s - test:301.38s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [default-linux-x86_64-release-asan] (75.73s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015608 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_14_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.14.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [3/10] chunk ran 2 tests (total:269.72s - test:269.64s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [default-linux-x86_64-release-asan] (90.76s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015698 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_19_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.19.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ FAIL: 21 - GOOD, 2 - FAIL ydb/tests/olap/column_family/compression ydb/core/tx/columnshard/splitter/ut ------ sole chunk ran 8 tests (total:62.81s - setup:0.02s test:60.04s) Chunk exceeded 60s timeout and was killed List of the tests involved in the launch: Splitter::Crit (timeout) duration: 29.21s Splitter::CritSmallPortions (good) duration: 21.84s Splitter::Simple (good) duration: 2.65s Splitter::BigAndSmall (good) duration: 2.56s Splitter::Trivial (good) duration: 0.93s Splitter::Minimal (good) duration: 0.26s Splitter::Small (good) duration: 0.22s Splitter::CritSimple test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Splitter::Crit [default-linux-x86_64-release-asan] (29.21s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/testing_out_stuff/Splitter.Crit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/splitter/ut/test-results/unittest/testing_out_stuff/Splitter.Crit.out ------ TIMEOUT: 6 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/tx/columnshard/splitter/ut ------ sole chunk ran 1 test (total:142.54s - test:142.36s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.8G (16567832K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 734539 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 734674 40.4M 23.0M 10.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 734693 1.1G 1.1G 1016M └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini - 737542 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 737893 1.7G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 738327 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 738812 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 739392 1.7G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 739960 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 740424 1.7G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 740829 2.2G 2.1G 1.6G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:78.43s - setup:0.03s recipes:22.46s test:51.81s recipes:4.01s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 17.4G (18237792K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 987557 54.9M 54.6M 7.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 987707 42.6M 25.1M 12.4M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1001856 145M 386M 318M │ └─ ydb_recipe --build-root /home/runner/actions_runner/_work/ydb/ydb/tmp/out --source-root /home/runner/actions_runner/_work/ydb/ydb --gdb-path /home/runner/.ya/tools/v4/10 988691 2.2G 2.2G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 988725 2.2G 2.2G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 989034 2.2G 2.1G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 989365 2.2G 2.1G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 989762 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 990369 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 990749 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 991111 2.1G 2.1G 1.6G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff/stderr ydb/core/statistics/service/ut [size:medium] nchunks:60 ------ [15/60] chunk ran 1 test (total:605.46s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::CountMinSketchServerlessStatistics (timeout) duration: 603.94s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::CountMinSketchServerlessStatistics [default-linux-x86_64-release-asan] (603.94s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchServerlessStatistics.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchServerlessStatistics.out ------ [17/60] chunk ran 1 test (total:616.74s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::EqWidthHistogram (timeout) duration: 614.90s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::EqWidthHistogram [default-linux-x86_64-release-asan] (614.90s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.EqWidthHistogram.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.EqWidthHistogram.out ------ TIMEOUT: 25 - GOOD, 2 - TIMEOUT ydb/core/statistics/service/ut ydb/core/kqp/ut/arrow [size:medium] nchunks:5 ------ [1/5] chunk ran 13 tests (total:145.23s - test:145.15s) [fail] KqpResultSetFormats::ArrowFormat_Stress [default-linux-x86_64-release-asan] (33.23s) greater-than assertion failed at ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp:1472, virtual void NKikimr::NKqp::NTestSuiteKqpResultSetFormats::TTestCaseArrowFormat_Stress::Execute_(NUnitTest::TTestContext &): arrowBatch->num_rows() > 0 Batch must have at least 1 row 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1DF2E6CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1E42B18B 2. /tmp//-S/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp:1472: Execute_ @ 0x1DA92A57 3. /tmp//-S/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp:276: operator() @ 0x1DADE7A7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp:276:1) &> @ 0x1DADE7A7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp:276:1) &> @ 0x1DADE7A7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1DADE7A7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1DADE7A7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1E463DF9 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1E463DF9 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1E463DF9 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1E431E67 12. /tmp//-S/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp:276: Execute @ 0x1DADDAFC 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1E43361F 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1E45DC5C 15. ??:0: ?? @ 0x7F9081E26D8F 16. ??:0: ?? @ 0x7F9081E26E3F 17. ??:0: ?? @ 0x1B104028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/arrow/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/arrow/test-results/unittest/testing_out_stuff/KqpResultSetFormats.ArrowFormat_Stress.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/arrow/test-results/unittest/testing_out_stuff/KqpResultSetFormats.ArrowFormat_Stress.out ------ FAIL: 62 - GOOD, 1 - FAIL ydb/core/kqp/ut/arrow ydb/services/persqueue_v1/ut/new_schemecache_ut [size:medium] nchunks:10 ------ [6/10] chunk ran 2 tests (total:48.10s - setup:0.02s test:47.99s) [crashed] TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/testing_out_stuff/TPersQueueNewSchemeCacheTest.TestReadAtTimestamp_3.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/testing_out_stuff/TPersQueueNewSchemeCacheTest.TestReadAtTimestamp_3.out ------ FAIL: 19 - GOOD, 1 - CRASHED ydb/services/persqueue_v1/ut/new_schemecache_ut ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [8/10] chunk ran 30 tests (total:202.62s - test:202.55s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [default-linux-x86_64-release-asan] (8.08s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29519 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51.out ------ [9/10] chunk ran 30 tests (total:231.08s - test:230.91s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [default-linux-x86_64-release-asan] (7.62s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:6264 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39.out ------ FAIL: 298 - GOOD, 2 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [40/50] chunk ran 4 tests (total:348.04s - test:347.97s) [fail] KqpStats::DeferredEffects+UseSink [default-linux-x86_64-release-asan] (311.92s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:239, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseDeferredEffects::Execute_(NUnitTest::TTestContext &) [UseSink = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (TIMEOUT != SUCCESS)
: Error: Query did not complete within specified timeout 300000ms, session id ydb://session/3?node_id=2&id=ODYzOGFhMmUtYTJkNWMzZjktODkzN2Q3NzQtOTU0MTFiMDk= , with diff: (TIM|SUCC)E(OUT|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1BFA2FAB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1C49BE8B 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:239: Execute_ @ 0x1BB37E81 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: operator() @ 0x1BB29307 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1BB29307 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1BB29307 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1C4D4AF9 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1C4D4AF9 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1C4D4AF9 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1C4A2B67 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: Execute @ 0x1BB285FE 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1C4A431F 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1C4CE95C 15. ??:0: ?? @ 0x7F43752A3D8F 16. ??:0: ?? @ 0x7F43752A3E3F 17. ??:0: ?? @ 0x18BC5028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.DeferredEffects.UseSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.DeferredEffects.UseSink.out ------ [44/50] chunk ran 4 tests (total:45.00s - test:44.96s) [fail] KqpStats::RequestUnitForExecute [default-linux-x86_64-release-asan] (12.33s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:383, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseRequestUnitForExecute::Execute_(NUnitTest::TTestContext &): (result.GetConsumedRu() > 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1BFA2FAB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1C49BE8B 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:383: Execute_ @ 0x1BB09C60 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: operator() @ 0x1BB29307 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1BB29307 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1BB29307 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1C4D4AF9 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1C4D4AF9 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1C4D4AF9 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1C4A2B67 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: Execute @ 0x1BB285FE 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1C4A431F 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1C4CE95C 15. ??:0: ?? @ 0x7F92CA9DBD8F 16. ??:0: ?? @ 0x7F92CA9DBE3F 17. ??:0: ?? @ 0x18BC5028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.RequestUnitForExecute.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.RequestUnitForExecute.out ------ [45/50] chunk ran 4 tests (total:57.20s - test:57.17s) [fail] KqpStats::StreamLookupStats-StreamLookupJoin [default-linux-x86_64-release-asan] (7.06s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:449, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseStreamLookupStats::Execute_(NUnitTest::TTestContext &) [StreamLookupJoin = false]: (stats.query_phases(1).affected_shards() == 1) failed: (0 != 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1BFA2FAB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1C49BE8B 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:449: Execute_ @ 0x1BB55E9E 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: operator() @ 0x1BB29307 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1BB29307 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1BB29307 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1C4D4AF9 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1C4D4AF9 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1C4D4AF9 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1C4A2B67 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: Execute @ 0x1BB285FE 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1C4A431F 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1C4CE95C 15. ??:0: ?? @ 0x7F555B9A9D8F 16. ??:0: ?? @ 0x7F555B9A9E3F 17. ??:0: ?? @ 0x18BC5028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.StreamLookupStats-StreamLookupJoin.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.StreamLookupStats-StreamLookupJoin.out [fail] KqpStats::StreamLookupStats+StreamLookupJoin [default-linux-x86_64-release-asan] (24.40s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:445, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseStreamLookupStats::Execute_(NUnitTest::TTestContext &) [StreamLookupJoin = true]: (stats.query_phases(0).affected_shards() == 2) failed: (1 != 2) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1BFA2FAB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1C49BE8B 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:445: Execute_ @ 0x1BB4FB77 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: operator() @ 0x1BB29307 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB29307 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1BB29307 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1BB29307 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1C4D4AF9 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1C4D4AF9 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1C4D4AF9 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1C4A2B67 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: Execute @ 0x1BB285FE 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1C4A431F 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1C4CE95C 15. ??:0: ?? @ 0x7F555B9A9D8F 16. ??:0: ?? @ 0x7F555B9A9E3F 17. ??:0: ?? @ 0x18BC5028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.StreamLookupStats.StreamLookupJoin.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.StreamLookupStats.StreamLookupJoin.out [fail] KqpStats::StatsProfile [default-linux-x86_64-release-asan] (7.49s) (yexception) util/generic/hash_table.cpp:50: Key not found in hashtable: ComputeNodes Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.StatsProfile.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.StatsProfile.out ------ FAIL: 229 - GOOD, 5 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/sysview [size:medium] nchunks:50 ------ [41/50] chunk ran 1 test (total:32.91s - test:32.87s) [fail] KqpSystemView::QueryStatsScan [default-linux-x86_64-release-asan] (24.49s) assertion failed at ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:956, auto NKikimr::NKqp::NTestSuiteKqpSystemView::TTestCaseQueryStatsScan::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TStringBuf) const: (readBytesSet.contains(192)) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B6C4F0B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BBC19CB 2. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:956: operator() @ 0x1B2CC571 3. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:959: Execute_ @ 0x1B2CC571 4. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34: operator() @ 0x1B2E3097 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34:1) &> @ 0x1B2E3097 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34:1) &> @ 0x1B2E3097 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1B2E3097 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1B2E3097 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1BBFA639 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1BBFA639 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1BBFA639 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BBC86A7 13. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34: Execute @ 0x1B2E23EC 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BBC9E5F 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1BBF449C 16. ??:0: ?? @ 0x7FCC809F4D8F 17. ??:0: ?? @ 0x7FCC809F4E3F 18. ??:0: ?? @ 0x189BB028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/sysview/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/sysview/test-results/unittest/testing_out_stuff/KqpSystemView.QueryStatsScan.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/sysview/test-results/unittest/testing_out_stuff/KqpSystemView.QueryStatsScan.out ------ [42/50] chunk ran 1 test (total:33.16s - setup:0.01s test:33.11s) [fail] KqpSystemView::QueryStatsSimple [default-linux-x86_64-release-asan] (25.34s) assertion failed at ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:904, auto NKikimr::NKqp::NTestSuiteKqpSystemView::TTestCaseQueryStatsSimple::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TStringBuf) const: (readBytesSet.contains(79)) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B6C4F0B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BBC19CB 2. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:904: operator() @ 0x1B2C6347 3. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:908: ?? @ 0x1B2C05B4 4. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34: operator() @ 0x1B2E3097 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34:1) &> @ 0x1B2E3097 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34:1) &> @ 0x1B2E3097 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1B2E3097 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1B2E3097 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1BBFA639 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1BBFA639 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1BBFA639 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BBC86A7 13. /tmp//-S/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp:34: Execute @ 0x1B2E23EC 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BBC9E5F 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1BBF449C 16. ??:0: ?? @ 0x7F16471F1D8F 17. ??:0: ?? @ 0x7F16471F1E3F 18. ??:0: ?? @ 0x189BB028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/sysview/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/sysview/test-results/unittest/testing_out_stuff/KqpSystemView.QueryStatsSimple.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/sysview/test-results/unittest/testing_out_stuff/KqpSystemView.QueryStatsSimple.out ------ FAIL: 45 - GOOD, 2 - FAIL ydb/core/kqp/ut/sysview ------ [test_discovery.py] chunk ran 3 tests (total:156.61s - test:156.55s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 15.3G (16065144K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 847550 54.8M 53.7M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 847653 41.0M 23.4M 10.7M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 847658 833M 836M 758M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 872931 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 873119 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 873443 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 873602 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 873918 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 874072 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 877825 1.5G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 877833 1.5G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 877957 1.4G 1.5G 1.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ------ [1/10] chunk ran 1 test (total:511.85s - test:511.74s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.4G (15126300K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 10748 54.8M 54.9M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 10881 41.1M 23.7M 10.8M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 10888 58.7M 58.7M 32.6M └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing 11091 9.5G 9.4G 9.4G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff/stderr ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [4/60] chunk ran 1 test (total:605.44s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: AnalyzeColumnshard::AnalyzeMultiOperationId (timeout) duration: 603.49s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/stderr [timeout] AnalyzeColumnshard::AnalyzeMultiOperationId [default-linux-x86_64-release-asan] (603.49s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeMultiOperationId.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeMultiOperationId.out ------ TIMEOUT: 31 - GOOD, 1 - TIMEOUT ydb/core/statistics/aggregator/ut ydb/tests/datashard/ttl [size:medium] nchunks:36 ------ [10/36] chunk ran 2 tests (total:391.65s - setup:0.01s test:391.59s) [fail] test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (285.78s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 4 lines, table table_Datetime_1_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [7/36] chunk ran 2 tests (total:400.26s - test:400.16s) [fail] test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (294.06s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 4 lines, table table_Date_1_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ FAIL: 46 - GOOD, 2 - FAIL ydb/tests/datashard/ttl ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [28/50] chunk ran 3 tests (total:445.98s - test:445.52s) [crashed] KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [default-linux-x86_64-release-asan] (177.41s) Test crashed (return code: 100) ==424795==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 27200 byte(s) in 1 object(s) allocated from: #0 0x00001b87bddd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021ce56ab in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021ce56ab in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021ce56ab in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021ce56ab in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021ce56ab in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:807:25 #6 0x000021ce56ab in vector /-S/contrib/libs/cxxsupp/libcxx/include/vector:461:7 #7 0x000021ce56ab in make_unique >, unsigned long &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #8 0x000021ce56ab in grpc_core::Server::ChannelData::InitTransport(grpc_core::RefCountedPtr, grpc_core::RefCountedPtr, unsigned long, grpc_transport*, long) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1155:9 #9 0x000021ce4f1d in grpc_core::Server::SetupTransport(grpc_transport*, grpc_pollset*, grpc_core::ChannelArgs const&, grpc_core::RefCountedPtr con ..[snippet truncated].. r/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #23 0x0000212f40a6 in __call<(lambda at /-S/ydb/library/grpc/server/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #24 0x0000212f40a6 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #25 0x0000212f40a6 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #26 0x00001bbad7be in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #27 0x00001bbad7be in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #28 0x00001bbad7be in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #29 0x00001bbadd1c in Execute /-S/util/thread/factory.h:15:13 #30 0x00001bbadd1c in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #31 0x00001bba1d04 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #32 0x00001b83f8f6 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 429042 byte(s) leaked in 5259 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.TIsolationSettingTest.IsOlap.UsePragma.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.TIsolationSettingTest.IsOlap.UsePragma.out ------ [32/50] chunk ran 3 tests (total:162.92s - test:162.72s) [crashed] KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation [default-linux-x86_64-release-asan] (15.17s) Test crashed (return code: 100) ==418141==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 27200 byte(s) in 1 object(s) allocated from: #0 0x00001b87bddd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021ce56ab in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021ce56ab in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021ce56ab in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021ce56ab in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021ce56ab in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:807:25 #6 0x000021ce56ab in vector /-S/contrib/libs/cxxsupp/libcxx/include/vector:461:7 #7 0x000021ce56ab in make_unique >, unsigned long &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #8 0x000021ce56ab in grpc_core::Server::ChannelData::InitTransport(grpc_core::RefCountedPtr, grpc_core::RefCountedPtr, unsigned long, grpc_transport*, long) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1155:9 #9 0x000021ce4f1d in grpc_core::Server::SetupTransport(grpc_transport*, grpc_pollset*, grpc_core::ChannelArgs const&, grpc_core::RefCountedPtr con ..[snippet truncated].. #17 0x000021507738 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #18 0x000021d503ce in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #19 0x0000225ef43f in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #20 0x0000225ef43f in NYdbGrpc::Dev::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:212:18 #21 0x00001bbad7be in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #22 0x00001bbad7be in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #23 0x00001bbad7be in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #24 0x00001bbadd1c in Execute /-S/util/thread/factory.h:15:13 #25 0x00001bbadd1c in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #26 0x00001bba1d04 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #27 0x00001b83f8f6 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 428565 byte(s) leaked in 5232 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TPragmaSettingOltp-IsSnapshotIsolation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TPragmaSettingOltp-IsSnapshotIsolation.out ------ FAIL: 166 - GOOD, 2 - CRASHED ydb/core/kqp/ut/tx ------ [test_disk.py 0/10] chunk ran 1 test (total:246.67s - setup:0.05s test:245.37s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.4G (15104196K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 15500 54.9M 54.7M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 15658 40.7M 23.3M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 15683 749M 745M 672M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 16713 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t 17099 1.4G 1.4G 966M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t 17617 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t 18442 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t 19659 1.4G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t 20556 1.5G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t 21002 1.5G 1.4G 925M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t 21436 1.3G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t 22519 1.8G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/t Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:76.22s - test:76.12s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.7G (16465228K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 675001 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 675021 40.2M 22.4M 10.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 675024 877M 880M 797M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 678242 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 678900 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 679349 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 679792 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 680625 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 681623 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 682515 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 683747 1.8G 1.8G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/statistics [size:medium] nchunks:10 ------ [test_restarts.py 0/10] chunk ran 1 test (total:143.70s - test:143.66s) [fail] test_restarts.py::test_basic [default-linux-x86_64-release-asan] (138.66s) ydb/tests/functional/statistics/test_restarts.py:95: in test_basic assert_that(wait_for(get_base_stats_response, timeout_seconds=5), E AssertionError: base stats available after restart Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/statistics/test-results/py3test/testing_out_stuff/test_restarts.py.test_basic.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/statistics/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/functional/statistics ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut [size:medium] nchunks:10 ------ [3/10] chunk ran 14 tests (total:267.80s - test:267.58s) [fail] TxUsage::Sinks_Oltp_WriteToTopic_3_Table [default-linux-x86_64-release-asan] (18.69s) assertion failed at ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/txusage_fixture.cpp:1026, void NYdb::NTopic::NTests::NTxUsage::TFixture::TestWriteToTopic26(): (messages.size() == 3) failed: (1 != 3) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NYdb::Dev::NTopic::NTests::NTxUsage::TFixture::TestWriteToTopic26() at /-S/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/txusage_fixture.cpp:0:5 operator() at /-S/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/testing_out_stuff/TxUsage.Sinks_Oltp_WriteToTopic_3_Table.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/testing_out_stuff/TxUsage.Sinks_Oltp_WriteToTopic_3_Table.out ------ FAIL: 135 - GOOD, 1 - FAIL ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut ydb/tests/stress/kafka/tests [size:medium] ------ sole chunk ran 1 test (total:184.50s - test:184.31s) [fail] test_kafka_streams.py::TestYdbTopicWorkload::test [default-linux-x86_64-release-asan] (178.20s) ydb/tests/stress/kafka/tests/test_kafka_streams.py:24: in test yatest.common.execute([ library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/kafka_streams_test --endpoint grpc://localhost:30791 --database /Root --bootstrap http://localhost:29041 --source-path test-topic --target-path target-topic --consumer workload-consumer-0 --num-workers 2 --duration 120' has failed with code 255. E Errors: E ...dProduceRequest$5(Sender.java:844) E at org.apache.kafka.clients.ClientResponse.onComplete(ClientResponse.java:109) E at org.apache.kafka.clients.NetworkClient.completeResponses(NetworkClient.java:583) E at org.apache.kafka.clients.NetworkClient.poll(NetworkClient.java:575) E at org.apache.kafka.clients.producer.internals.Sender.maybeSendAndPollTransactionalRequest(Sender.java:426) E at org.apache.kafka.clients.producer.internals.Sender.runOnce(Sender.java:316) E at org.apache.kafka.clients.producer.internals.Sender.run(Sender.java:243) E at java.base/java.lang.Thread.run(Thread.java:840) E Caused by: org.apache.kafka.common.errors.UnknownServerException: Unexpected termination E Traceback (most recent call last): E File "contrib/tools/python3/Lib/runpy.py", line 198, in _run_module_as_main E return _run_code(code, main_globals, None, E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/tools/python3/Lib/runpy.py", line 88, in _run_code E exec(code, run_globals) E File "ydb/tests/stress/kafka/__main__.py", line 27, in E workload.loop() E File "ydb/tests/stress/kafka/workload/__init__.py", line 125, in loop E assert totalMessCountTest == totalMessCountTarget, f"Source and target {self.target_topic_path}-{i} topics total messages count are not equal:" + \ E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E AssertionError: Source and target target-topic-0 topics total messages count are not equal:{totalMessCountTest} and {totalMessCountTarget} respectively. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/tests/test-results/py3test/testing_out_stuff/test_kafka_streams.py.TestYdbTopicWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/kafka/tests ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [36/40] chunk ran 2 tests (total:238.18s - setup:0.24s test:237.30s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (118.18s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 79 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud Total 358 suites: 333 - GOOD 19 - FAIL 6 - TIMEOUT Total 9550 tests: 9467 - GOOD 62 - FAIL 3 - NOT_LAUNCHED 7 - TIMEOUT 7 - SKIPPED 4 - CRASHED Cache efficiency ratio is 89.27% (44803 of 50189). Local: 0 (0.00%), dist: 10194 (20.31%), by dynamic uids: 0 (0.00%), avoided: 34609 (68.96%) Dist cache download: count=4824, size=17.13 GiB, speed=167.61 MiB/s Disk usage for tools/sdk at least 56.35 MiB Additional disk space consumed for build cache 1.05 TiB Critical path: [101256 ms] [CC] [AeQ8c26mDn0m5qaZdZBC_g default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp [started: 0 (1767806520797), finished: 101256 (1767806622053)] [ 128 ms] [AR] [mVef_ZGlHJYmwHt4jKedyA default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a [started: 102531 (1767806623328), finished: 102659 (1767806623456)] [ 59853 ms] [LD] [hPN_F4amTt0w9XGDfPh9Eg default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 141668 (1767806662465), finished: 201521 (1767806722318)] [647664 ms] [TM] [test-2704802640317260857 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 3275639 (1767809796436), finished: 3923303 (1767810444100)] [ 43397 ms] [TA] [test-2390139784064646906]: $(BUILD_ROOT)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} [started: 3923408 (1767810444205), finished: 3966805 (1767810487602)] Time from start: 4242978 ms, time elapsed by graph 852298 ms, time diff 3390680 ms. The longest 10 tasks: [647664 ms] [TM] [test-2704802640317260857 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1767809796436, finished: 1767810444100] [621799 ms] [TM] [test-8683822730298976731 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1767809401473, finished: 1767810023272] [617374 ms] [TM] [test-6712517240230715866 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1767807317836, finished: 1767807935210] [606103 ms] [TM] [test-825208188338124319 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1767807982663, finished: 1767808588766] [605958 ms] [TM] [test-14450382960741731216 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1767808301907, finished: 1767808907865] [592358 ms] [TM] [test-3092507284803660620 asan default-linux-x86_64 release]: ydb/tests/datashard/async_replication/py3test [started: 1767808953586, finished: 1767809545944] [574810 ms] [TM] [test-5960847785802641019 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1767808297091, finished: 1767808871901] [574024 ms] [TM] [test-1339913585778578659 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1767808301920, finished: 1767808875944] [555854 ms] [TM] [test-2028294075344263782 asan default-linux-x86_64 release]: ydb/tests/functional/kqp/kqp_indexes/unittest [started: 1767810027134, finished: 1767810582988] [541043 ms] [TM] [test-8511463726482247623 asan default-linux-x86_64 release]: ydb/services/keyvalue/ut/unittest [started: 1767806685979, finished: 1767807227022] Total time by type: [162513108 ms] [TM] [count: 4479, ave time 36283.35 msec] [ 15005971 ms] [LD] [count: 453, ave time 33125.76 msec] [ 9429062 ms] [prepare:get from dist cache] [count: 10194, ave time 924.96 msec] [ 1351581 ms] [TS] [count: 181, ave time 7467.30 msec] [ 1002582 ms] [CC] [count: 20, ave time 50129.10 msec] [ 733017 ms] [prepare:put into local cache, clean build dir] [count: 10209, ave time 71.80 msec] [ 693548 ms] [TA] [count: 232, ave time 2989.43 msec] [ 640132 ms] [prepare:put to dist cache] [count: 5291, ave time 120.99 msec] [ 608019 ms] [prepare:bazel-store] [count: 3, ave time 202673.00 msec] [ 364219 ms] [prepare:tools] [count: 22, ave time 16555.41 msec] [ 160671 ms] [prepare:AC] [count: 4, ave time 40167.75 msec] [ 14004 ms] [AR] [count: 12, ave time 1167.00 msec] [ 629 ms] [UN] [count: 2, ave time 314.50 msec] [ 564 ms] [CF] [count: 2, ave time 282.00 msec] [ 503 ms] [SB] [count: 1, ave time 503.00 msec] [ 456 ms] [CP] [count: 2, ave time 228.00 msec] [ 450 ms] [BI] [count: 1, ave time 450.00 msec] [ 321 ms] [prepare:resources] [count: 1, ave time 321.00 msec] [ 132 ms] [PK] [count: 1, ave time 132.00 msec] [ 35 ms] [prepare:clean] [count: 3, ave time 11.67 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 164558237 ms (91.13%) Total run tasks time - 180583528 ms Configure time - 4.7 s Statistics overhead 2188 ms Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0